Refactor services alembic
This commit is contained in:
@@ -1,63 +1,54 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
# ================================================================
|
||||
# services/alert-processor/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
sourceless = false
|
||||
|
||||
# version number format
|
||||
# Uses Alembic datetime format
|
||||
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version name format
|
||||
version_path_separator = /
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+asyncpg://suppliers_user:suppliers_pass123@suppliers-db:5432/suppliers_db
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://alert-processor_user:password@alert-processor-db-service:5432/alert-processor_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
# on newly generated revision scripts.
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
@@ -90,4 +81,4 @@ formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,93 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version number format
|
||||
# Uses Alembic datetime format
|
||||
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
|
||||
|
||||
# version name format
|
||||
version_path_separator = /
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+asyncpg://alert_processor_user:alert_processor_pass123@alert-processor-db:5432/alert_processor_db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,62 +1,54 @@
|
||||
"""
|
||||
Alembic environment configuration for Alert Processor Service
|
||||
"""
|
||||
"""Alembic environment configuration for alert-processor service"""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Import models to ensure they're registered
|
||||
from app.models.alerts import * # noqa
|
||||
from shared.database.base import Base
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable if available
|
||||
database_url = os.getenv('ALERT_PROCESSOR_DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
@@ -70,9 +62,7 @@ def run_migrations_offline() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with database connection"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
@@ -83,9 +73,8 @@ def do_run_migrations(connection: Connection) -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in async mode"""
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
@@ -97,13 +86,11 @@ async def run_async_migrations() -> None:
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial alert-processor service tables
|
||||
|
||||
Revision ID: 001_initial_alert_processor
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_alert_processor'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for alert_processor service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for alert_processor service
|
||||
pass
|
||||
@@ -1,64 +0,0 @@
|
||||
"""Initial alerts table
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-09-18 23:17:00
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum types
|
||||
alert_status_enum = postgresql.ENUM('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus')
|
||||
alert_severity_enum = postgresql.ENUM('low', 'medium', 'high', 'urgent', name='alertseverity')
|
||||
|
||||
alert_status_enum.create(op.get_bind())
|
||||
alert_severity_enum.create(op.get_bind())
|
||||
|
||||
# Create alerts table
|
||||
op.create_table('alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('item_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('alert_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('severity', alert_severity_enum, nullable=False),
|
||||
sa.Column('status', alert_status_enum, nullable=False),
|
||||
sa.Column('service', sa.String(length=100), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('actions', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_alerts_tenant_id', 'alerts', ['tenant_id'])
|
||||
op.create_index('ix_alerts_severity', 'alerts', ['severity'])
|
||||
op.create_index('ix_alerts_status', 'alerts', ['status'])
|
||||
op.create_index('ix_alerts_created_at', 'alerts', ['created_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('ix_alerts_created_at', 'alerts')
|
||||
op.drop_index('ix_alerts_status', 'alerts')
|
||||
op.drop_index('ix_alerts_severity', 'alerts')
|
||||
op.drop_index('ix_alerts_tenant_id', 'alerts')
|
||||
|
||||
# Drop table
|
||||
op.drop_table('alerts')
|
||||
|
||||
# Drop enum types
|
||||
op.execute('DROP TYPE alertseverity')
|
||||
op.execute('DROP TYPE alertstatus')
|
||||
@@ -1,5 +1,5 @@
|
||||
# ================================================================
|
||||
# services/auth/migrations/alembic.ini
|
||||
# services/auth/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
@@ -36,14 +36,14 @@ version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://auth_user:password@auth-db-service:5432/auth_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
@@ -81,4 +81,4 @@ formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
datefmt = %H:%M:%S
|
||||
|
||||
@@ -1,31 +1,50 @@
|
||||
# ================================================================
|
||||
# services/auth/migrations/env.py
|
||||
# ================================================================
|
||||
"""Alembic environment configuration"""
|
||||
"""Alembic environment configuration for auth service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models.users import User
|
||||
from app.models.tokens import RefreshToken, LoginAttempt
|
||||
from shared.database.base import Base
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL
|
||||
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
@@ -36,13 +55,20 @@ def run_migrations_offline() -> None:
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# ================================================================
|
||||
# services/auth/migrations/script.py.mako
|
||||
# ================================================================
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
@@ -8,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -24,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
29
services/auth/migrations/versions/001_initial_auth_tables.py
Normal file
29
services/auth/migrations/versions/001_initial_auth_tables.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Initial auth service tables
|
||||
|
||||
Revision ID: 001_initial_auth
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_auth'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for alert_processor service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for alert_processor service
|
||||
pass
|
||||
84
services/external/alembic.ini
vendored
Normal file
84
services/external/alembic.ini
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/external/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://external_user:password@external-db-service:5432/external_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/external/migrations/env.py
vendored
Normal file
96
services/external/migrations/env.py
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for external service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/external/migrations/script.py.mako
vendored
Normal file
26
services/external/migrations/script.py.mako
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
29
services/external/migrations/versions/001_initial_external_tables.py
vendored
Normal file
29
services/external/migrations/versions/001_initial_external_tables.py
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Initial external service tables
|
||||
|
||||
Revision ID: 001_initial_external
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_external'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for external service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for external service
|
||||
pass
|
||||
84
services/forecasting/alembic.ini
Normal file
84
services/forecasting/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/forecasting/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://forecasting_user:password@forecasting-db-service:5432/forecasting_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/forecasting/migrations/env.py
Normal file
96
services/forecasting/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for forecasting service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/forecasting/migrations/script.py.mako
Normal file
26
services/forecasting/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -1,98 +1,28 @@
|
||||
# ================================================================
|
||||
# services/forecasting/migrations/versions/001_initial_tables.py
|
||||
# ================================================================
|
||||
"""Initial forecasting tables
|
||||
"""Initial forecasting service tables
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2024-01-15 10:00:00.000000
|
||||
Revision ID: 001_initial_forecasting
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_forecasting'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade():
|
||||
# Create forecasts table
|
||||
op.create_table('forecasts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('product_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('location', sa.String(length=255), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('predicted_demand', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_lower', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_upper', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_level', sa.Float(), nullable=True),
|
||||
sa.Column('model_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('model_version', sa.String(length=50), nullable=False),
|
||||
sa.Column('algorithm', sa.String(length=50), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('day_of_week', sa.Integer(), nullable=False),
|
||||
sa.Column('is_holiday', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_weekend', sa.Boolean(), nullable=True),
|
||||
sa.Column('weather_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('weather_precipitation', sa.Float(), nullable=True),
|
||||
sa.Column('weather_description', sa.String(length=100), nullable=True),
|
||||
sa.Column('traffic_volume', sa.Integer(), nullable=True),
|
||||
sa.Column('processing_time_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('features_used', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_forecasts_tenant_id', 'forecasts', ['tenant_id'])
|
||||
op.create_index('ix_forecasts_product_name', 'forecasts', ['product_name'])
|
||||
op.create_index('ix_forecasts_location', 'forecasts', ['location'])
|
||||
op.create_index('ix_forecasts_forecast_date', 'forecasts', ['forecast_date'])
|
||||
|
||||
# Create prediction_batches table
|
||||
op.create_table('prediction_batches',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('batch_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('requested_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('total_products', sa.Integer(), nullable=True),
|
||||
sa.Column('completed_products', sa.Integer(), nullable=True),
|
||||
sa.Column('failed_products', sa.Integer(), nullable=True),
|
||||
sa.Column('forecast_days', sa.Integer(), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('processing_time_ms', sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
op.create_index('ix_prediction_batches_tenant_id', 'prediction_batches', ['tenant_id'])
|
||||
|
||||
# Create forecast_alerts table
|
||||
op.create_table('forecast_alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('forecast_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('alert_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('severity', sa.String(length=20), nullable=True),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('notification_sent', sa.Boolean(), nullable=True),
|
||||
sa.Column('notification_method', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
op.create_index('ix_forecast_alerts_tenant_id', 'forecast_alerts', ['tenant_id'])
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('forecast_alerts')
|
||||
op.drop_table('prediction_batches')
|
||||
op.drop_table('forecasts')
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for forecasting service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for forecasting service
|
||||
pass
|
||||
|
||||
84
services/inventory/alembic.ini
Normal file
84
services/inventory/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/inventory/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://inventory_user:password@inventory-db-service:5432/inventory_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,62 +1,54 @@
|
||||
"""
|
||||
Alembic environment configuration for Inventory Service
|
||||
"""
|
||||
"""Alembic environment configuration for inventory service"""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Import models to ensure they're registered
|
||||
from app.models.inventory import * # noqa
|
||||
from shared.database.base import Base
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable if available
|
||||
database_url = os.getenv('INVENTORY_DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
@@ -70,9 +62,7 @@ def run_migrations_offline() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with database connection"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
@@ -83,9 +73,8 @@ def do_run_migrations(connection: Connection) -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in async mode"""
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
@@ -97,13 +86,11 @@ async def run_async_migrations() -> None:
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -1,223 +1,28 @@
|
||||
"""Initial inventory tables
|
||||
"""Initial inventory service tables
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-01-15 10:00:00.000000
|
||||
Revision ID: 001_initial_inventory
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
revision: str = '001_initial_inventory'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum types
|
||||
op.execute("""
|
||||
CREATE TYPE unitofmeasure AS ENUM (
|
||||
'kg', 'g', 'l', 'ml', 'units', 'pcs', 'pkg', 'bags', 'boxes'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE ingredientcategory AS ENUM (
|
||||
'flour', 'yeast', 'dairy', 'eggs', 'sugar', 'fats', 'salt',
|
||||
'spices', 'additives', 'packaging', 'cleaning', 'other'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE stockmovementtype AS ENUM (
|
||||
'purchase', 'production_use', 'adjustment', 'waste',
|
||||
'transfer', 'return', 'initial_stock'
|
||||
);
|
||||
""")
|
||||
|
||||
# Create ingredients table
|
||||
op.create_table(
|
||||
'ingredients',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('sku', sa.String(100), nullable=True),
|
||||
sa.Column('barcode', sa.String(50), nullable=True),
|
||||
sa.Column('category', sa.Enum('flour', 'yeast', 'dairy', 'eggs', 'sugar', 'fats', 'salt', 'spices', 'additives', 'packaging', 'cleaning', 'other', name='ingredientcategory'), nullable=False),
|
||||
sa.Column('subcategory', sa.String(100), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('brand', sa.String(100), nullable=True),
|
||||
sa.Column('unit_of_measure', sa.Enum('kg', 'g', 'l', 'ml', 'units', 'pcs', 'pkg', 'bags', 'boxes', name='unitofmeasure'), nullable=False),
|
||||
sa.Column('package_size', sa.Float(), nullable=True),
|
||||
sa.Column('average_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('last_purchase_price', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('standard_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('low_stock_threshold', sa.Float(), nullable=False, server_default='10.0'),
|
||||
sa.Column('reorder_point', sa.Float(), nullable=False, server_default='20.0'),
|
||||
sa.Column('reorder_quantity', sa.Float(), nullable=False, server_default='50.0'),
|
||||
sa.Column('max_stock_level', sa.Float(), nullable=True),
|
||||
sa.Column('requires_refrigeration', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('requires_freezing', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('storage_temperature_min', sa.Float(), nullable=True),
|
||||
sa.Column('storage_temperature_max', sa.Float(), nullable=True),
|
||||
sa.Column('storage_humidity_max', sa.Float(), nullable=True),
|
||||
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
|
||||
sa.Column('storage_instructions', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_perishable', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create stock table
|
||||
op.create_table(
|
||||
'stock',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('batch_number', sa.String(100), nullable=True),
|
||||
sa.Column('lot_number', sa.String(100), nullable=True),
|
||||
sa.Column('supplier_batch_ref', sa.String(100), nullable=True),
|
||||
sa.Column('current_quantity', sa.Float(), nullable=False, server_default='0.0'),
|
||||
sa.Column('reserved_quantity', sa.Float(), nullable=False, server_default='0.0'),
|
||||
sa.Column('available_quantity', sa.Float(), nullable=False, server_default='0.0'),
|
||||
sa.Column('received_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('best_before_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('storage_location', sa.String(100), nullable=True),
|
||||
sa.Column('warehouse_zone', sa.String(50), nullable=True),
|
||||
sa.Column('shelf_position', sa.String(50), nullable=True),
|
||||
sa.Column('is_available', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_expired', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('quality_status', sa.String(20), nullable=True, server_default='good'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create stock_movements table
|
||||
op.create_table(
|
||||
'stock_movements',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('movement_type', sa.Enum('purchase', 'production_use', 'adjustment', 'waste', 'transfer', 'return', 'initial_stock', name='stockmovementtype'), nullable=False),
|
||||
sa.Column('quantity', sa.Float(), nullable=False),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('quantity_before', sa.Float(), nullable=True),
|
||||
sa.Column('quantity_after', sa.Float(), nullable=True),
|
||||
sa.Column('reference_number', sa.String(100), nullable=True),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('reason_code', sa.String(50), nullable=True),
|
||||
sa.Column('movement_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
|
||||
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create stock_alerts table
|
||||
op.create_table(
|
||||
'stock_alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('alert_type', sa.String(50), nullable=False),
|
||||
sa.Column('severity', sa.String(20), nullable=False, server_default='medium'),
|
||||
sa.Column('title', sa.String(255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('current_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('threshold_value', sa.Float(), nullable=True),
|
||||
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_acknowledged', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('acknowledged_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_resolved', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('resolved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('resolution_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
|
||||
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for ingredients table
|
||||
op.create_index('idx_ingredients_tenant_name', 'ingredients', ['tenant_id', 'name'], unique=True)
|
||||
op.create_index('idx_ingredients_tenant_sku', 'ingredients', ['tenant_id', 'sku'])
|
||||
op.create_index('idx_ingredients_barcode', 'ingredients', ['barcode'])
|
||||
op.create_index('idx_ingredients_category', 'ingredients', ['tenant_id', 'category', 'is_active'])
|
||||
op.create_index('idx_ingredients_stock_levels', 'ingredients', ['tenant_id', 'low_stock_threshold', 'reorder_point'])
|
||||
|
||||
# Create indexes for stock table
|
||||
op.create_index('idx_stock_tenant_ingredient', 'stock', ['tenant_id', 'ingredient_id'])
|
||||
op.create_index('idx_stock_expiration', 'stock', ['tenant_id', 'expiration_date', 'is_available'])
|
||||
op.create_index('idx_stock_batch', 'stock', ['tenant_id', 'batch_number'])
|
||||
op.create_index('idx_stock_low_levels', 'stock', ['tenant_id', 'current_quantity', 'is_available'])
|
||||
op.create_index('idx_stock_quality', 'stock', ['tenant_id', 'quality_status', 'is_available'])
|
||||
|
||||
# Create indexes for stock_movements table
|
||||
op.create_index('idx_movements_tenant_date', 'stock_movements', ['tenant_id', 'movement_date'])
|
||||
op.create_index('idx_movements_tenant_ingredient', 'stock_movements', ['tenant_id', 'ingredient_id', 'movement_date'])
|
||||
op.create_index('idx_movements_type', 'stock_movements', ['tenant_id', 'movement_type', 'movement_date'])
|
||||
op.create_index('idx_movements_reference', 'stock_movements', ['reference_number'])
|
||||
op.create_index('idx_movements_supplier', 'stock_movements', ['supplier_id', 'movement_date'])
|
||||
|
||||
# Create indexes for stock_alerts table
|
||||
op.create_index('idx_alerts_tenant_active', 'stock_alerts', ['tenant_id', 'is_active', 'created_at'])
|
||||
op.create_index('idx_alerts_type_severity', 'stock_alerts', ['alert_type', 'severity', 'is_active'])
|
||||
op.create_index('idx_alerts_ingredient', 'stock_alerts', ['ingredient_id', 'is_active'])
|
||||
op.create_index('idx_alerts_unresolved', 'stock_alerts', ['tenant_id', 'is_resolved', 'is_active'])
|
||||
# TODO: Add table creation statements for inventory service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('idx_alerts_unresolved', table_name='stock_alerts')
|
||||
op.drop_index('idx_alerts_ingredient', table_name='stock_alerts')
|
||||
op.drop_index('idx_alerts_type_severity', table_name='stock_alerts')
|
||||
op.drop_index('idx_alerts_tenant_active', table_name='stock_alerts')
|
||||
|
||||
op.drop_index('idx_movements_supplier', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_reference', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_type', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_tenant_ingredient', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_tenant_date', table_name='stock_movements')
|
||||
|
||||
op.drop_index('idx_stock_quality', table_name='stock')
|
||||
op.drop_index('idx_stock_low_levels', table_name='stock')
|
||||
op.drop_index('idx_stock_batch', table_name='stock')
|
||||
op.drop_index('idx_stock_expiration', table_name='stock')
|
||||
op.drop_index('idx_stock_tenant_ingredient', table_name='stock')
|
||||
|
||||
op.drop_index('idx_ingredients_stock_levels', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_tenant_sku', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_tenant_name', table_name='ingredients')
|
||||
|
||||
# Drop tables
|
||||
op.drop_table('stock_alerts')
|
||||
op.drop_table('stock_movements')
|
||||
op.drop_table('stock')
|
||||
op.drop_table('ingredients')
|
||||
|
||||
# Drop enum types
|
||||
op.execute("DROP TYPE stockmovementtype;")
|
||||
op.execute("DROP TYPE ingredientcategory;")
|
||||
op.execute("DROP TYPE unitofmeasure;")
|
||||
# TODO: Add table drop statements for inventory service
|
||||
pass
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
"""Add finished products support
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2025-01-15 10:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create new enum types for finished products
|
||||
op.execute("""
|
||||
CREATE TYPE producttype AS ENUM (
|
||||
'ingredient', 'finished_product'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE productcategory AS ENUM (
|
||||
'bread', 'croissants', 'pastries', 'cakes', 'cookies',
|
||||
'muffins', 'sandwiches', 'seasonal', 'beverages', 'other_products'
|
||||
);
|
||||
""")
|
||||
|
||||
# Add new columns to ingredients table
|
||||
op.add_column('ingredients', sa.Column('product_type',
|
||||
sa.Enum('ingredient', 'finished_product', name='producttype'),
|
||||
nullable=False, server_default='ingredient'))
|
||||
|
||||
op.add_column('ingredients', sa.Column('product_category',
|
||||
sa.Enum('bread', 'croissants', 'pastries', 'cakes', 'cookies', 'muffins', 'sandwiches', 'seasonal', 'beverages', 'other_products', name='productcategory'),
|
||||
nullable=True))
|
||||
|
||||
# Rename existing category column to ingredient_category
|
||||
op.alter_column('ingredients', 'category', new_column_name='ingredient_category')
|
||||
|
||||
# Add finished product specific columns
|
||||
op.add_column('ingredients', sa.Column('supplier_name', sa.String(200), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('display_life_hours', sa.Integer(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('best_before_hours', sa.Integer(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('central_baker_product_code', sa.String(100), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('delivery_days', sa.String(20), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('minimum_order_quantity', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('pack_size', sa.Integer(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
|
||||
|
||||
# Update existing indexes and create new ones
|
||||
op.drop_index('idx_ingredients_category', table_name='ingredients')
|
||||
|
||||
# Create new indexes for enhanced functionality
|
||||
op.create_index('idx_ingredients_product_type', 'ingredients', ['tenant_id', 'product_type', 'is_active'])
|
||||
op.create_index('idx_ingredients_ingredient_category', 'ingredients', ['tenant_id', 'ingredient_category', 'is_active'])
|
||||
op.create_index('idx_ingredients_product_category', 'ingredients', ['tenant_id', 'product_category', 'is_active'])
|
||||
op.create_index('idx_ingredients_central_baker', 'ingredients', ['tenant_id', 'supplier_name', 'product_type'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop new indexes
|
||||
op.drop_index('idx_ingredients_central_baker', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_product_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_product_type', table_name='ingredients')
|
||||
|
||||
# Remove finished product specific columns
|
||||
op.drop_column('ingredients', 'nutritional_info')
|
||||
op.drop_column('ingredients', 'pack_size')
|
||||
op.drop_column('ingredients', 'minimum_order_quantity')
|
||||
op.drop_column('ingredients', 'delivery_days')
|
||||
op.drop_column('ingredients', 'central_baker_product_code')
|
||||
op.drop_column('ingredients', 'best_before_hours')
|
||||
op.drop_column('ingredients', 'display_life_hours')
|
||||
op.drop_column('ingredients', 'supplier_name')
|
||||
|
||||
# Remove new columns
|
||||
op.drop_column('ingredients', 'product_category')
|
||||
op.drop_column('ingredients', 'product_type')
|
||||
|
||||
# Rename ingredient_category back to category
|
||||
op.alter_column('ingredients', 'ingredient_category', new_column_name='category')
|
||||
|
||||
# Recreate original category index
|
||||
op.create_index('idx_ingredients_category', 'ingredients', ['tenant_id', 'category', 'is_active'])
|
||||
|
||||
# Drop new enum types
|
||||
op.execute("DROP TYPE productcategory;")
|
||||
op.execute("DROP TYPE producttype;")
|
||||
@@ -1,114 +0,0 @@
|
||||
"""Add production stage enum and columns
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2025-01-17 15:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003'
|
||||
down_revision = '002'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create ProductionStage enum type
|
||||
op.execute("""
|
||||
CREATE TYPE productionstage AS ENUM (
|
||||
'raw_ingredient', 'par_baked', 'fully_baked',
|
||||
'prepared_dough', 'frozen_product'
|
||||
);
|
||||
""")
|
||||
|
||||
# Add production_stage column to stock table
|
||||
op.add_column('stock', sa.Column('production_stage',
|
||||
sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'),
|
||||
nullable=False, server_default='raw_ingredient'))
|
||||
|
||||
# Add transformation_reference column to stock table
|
||||
op.add_column('stock', sa.Column('transformation_reference', sa.String(100), nullable=True))
|
||||
|
||||
# Add stage-specific expiration tracking columns
|
||||
op.add_column('stock', sa.Column('original_expiration_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('stock', sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('stock', sa.Column('final_expiration_date', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
# Create product_transformations table
|
||||
op.create_table(
|
||||
'product_transformations',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('transformation_reference', sa.String(100), nullable=False),
|
||||
sa.Column('source_ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('target_ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('source_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
|
||||
sa.Column('target_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
|
||||
sa.Column('source_quantity', sa.Float(), nullable=False),
|
||||
sa.Column('target_quantity', sa.Float(), nullable=False),
|
||||
sa.Column('conversion_ratio', sa.Float(), nullable=False, server_default='1.0'),
|
||||
sa.Column('expiration_calculation_method', sa.String(50), nullable=False, server_default='days_from_transformation'),
|
||||
sa.Column('expiration_days_offset', sa.Integer(), nullable=True),
|
||||
sa.Column('transformation_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('process_notes', sa.Text(), nullable=True),
|
||||
sa.Column('performed_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('source_batch_numbers', sa.Text(), nullable=True),
|
||||
sa.Column('target_batch_number', sa.String(100), nullable=True),
|
||||
sa.Column('is_completed', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_reversed', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['source_ingredient_id'], ['ingredients.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_ingredient_id'], ['ingredients.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Add new indexes for enhanced functionality
|
||||
op.create_index('idx_stock_production_stage', 'stock', ['tenant_id', 'production_stage', 'is_available'])
|
||||
op.create_index('idx_stock_transformation', 'stock', ['tenant_id', 'transformation_reference'])
|
||||
op.create_index('idx_stock_final_expiration', 'stock', ['tenant_id', 'final_expiration_date', 'is_available'])
|
||||
|
||||
# Create indexes for product_transformations table
|
||||
op.create_index('idx_transformations_tenant_date', 'product_transformations', ['tenant_id', 'transformation_date'])
|
||||
op.create_index('idx_transformations_reference', 'product_transformations', ['transformation_reference'])
|
||||
op.create_index('idx_transformations_source', 'product_transformations', ['tenant_id', 'source_ingredient_id'])
|
||||
op.create_index('idx_transformations_target', 'product_transformations', ['tenant_id', 'target_ingredient_id'])
|
||||
op.create_index('idx_transformations_stages', 'product_transformations', ['source_stage', 'target_stage'])
|
||||
|
||||
# Update existing stockmovementtype enum to include TRANSFORMATION
|
||||
op.execute("ALTER TYPE stockmovementtype ADD VALUE 'transformation';")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes for product_transformations
|
||||
op.drop_index('idx_transformations_stages', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_target', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_source', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_reference', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_tenant_date', table_name='product_transformations')
|
||||
|
||||
# Drop new stock indexes
|
||||
op.drop_index('idx_stock_final_expiration', table_name='stock')
|
||||
op.drop_index('idx_stock_transformation', table_name='stock')
|
||||
op.drop_index('idx_stock_production_stage', table_name='stock')
|
||||
|
||||
# Drop product_transformations table
|
||||
op.drop_table('product_transformations')
|
||||
|
||||
# Remove new columns from stock table
|
||||
op.drop_column('stock', 'final_expiration_date')
|
||||
op.drop_column('stock', 'transformation_date')
|
||||
op.drop_column('stock', 'original_expiration_date')
|
||||
op.drop_column('stock', 'transformation_reference')
|
||||
op.drop_column('stock', 'production_stage')
|
||||
|
||||
# Drop ProductionStage enum type
|
||||
op.execute("DROP TYPE productionstage;")
|
||||
|
||||
# Note: Cannot easily remove 'transformation' from existing enum in PostgreSQL
|
||||
# This would require recreating the enum and updating all references
|
||||
# For now, we leave the enum value as it won't cause issues
|
||||
@@ -1,104 +0,0 @@
|
||||
"""Move storage configuration from ingredient to batch level
|
||||
|
||||
Revision ID: 004_move_storage_config_to_batch
|
||||
Revises: 003_add_production_stage_enum
|
||||
Create Date: 2025-01-17 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004_move_storage_config_to_batch'
|
||||
down_revision = '003_add_production_stage_enum'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Move storage configuration from ingredients to stock batches"""
|
||||
|
||||
# Add batch-specific storage columns to stock table
|
||||
op.add_column('stock', sa.Column('requires_refrigeration', sa.Boolean(), default=False))
|
||||
op.add_column('stock', sa.Column('requires_freezing', sa.Boolean(), default=False))
|
||||
op.add_column('stock', sa.Column('storage_temperature_min', sa.Float(), nullable=True))
|
||||
op.add_column('stock', sa.Column('storage_temperature_max', sa.Float(), nullable=True))
|
||||
op.add_column('stock', sa.Column('storage_humidity_max', sa.Float(), nullable=True))
|
||||
op.add_column('stock', sa.Column('shelf_life_days', sa.Integer(), nullable=True))
|
||||
op.add_column('stock', sa.Column('storage_instructions', sa.Text(), nullable=True))
|
||||
|
||||
# Migrate existing data from ingredients to stock batches
|
||||
# This will copy the ingredient-level storage config to all existing stock batches
|
||||
op.execute("""
|
||||
UPDATE stock
|
||||
SET
|
||||
requires_refrigeration = i.requires_refrigeration,
|
||||
requires_freezing = i.requires_freezing,
|
||||
storage_temperature_min = i.storage_temperature_min,
|
||||
storage_temperature_max = i.storage_temperature_max,
|
||||
storage_humidity_max = i.storage_humidity_max,
|
||||
shelf_life_days = i.shelf_life_days,
|
||||
storage_instructions = i.storage_instructions
|
||||
FROM ingredients i
|
||||
WHERE stock.ingredient_id = i.id
|
||||
""")
|
||||
|
||||
# Remove storage configuration columns from ingredients table
|
||||
# Keep only shelf_life_days as default value
|
||||
op.drop_column('ingredients', 'requires_refrigeration')
|
||||
op.drop_column('ingredients', 'requires_freezing')
|
||||
op.drop_column('ingredients', 'storage_temperature_min')
|
||||
op.drop_column('ingredients', 'storage_temperature_max')
|
||||
op.drop_column('ingredients', 'storage_humidity_max')
|
||||
op.drop_column('ingredients', 'storage_instructions')
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Revert storage configuration back to ingredient level"""
|
||||
|
||||
# Add storage configuration columns back to ingredients table
|
||||
op.add_column('ingredients', sa.Column('requires_refrigeration', sa.Boolean(), default=False))
|
||||
op.add_column('ingredients', sa.Column('requires_freezing', sa.Boolean(), default=False))
|
||||
op.add_column('ingredients', sa.Column('storage_temperature_min', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('storage_temperature_max', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('storage_humidity_max', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('storage_instructions', sa.Text(), nullable=True))
|
||||
|
||||
# Migrate data back from stock to ingredients (use most common values per ingredient)
|
||||
op.execute("""
|
||||
UPDATE ingredients
|
||||
SET
|
||||
requires_refrigeration = COALESCE(
|
||||
(SELECT bool_or(s.requires_refrigeration) FROM stock s WHERE s.ingredient_id = ingredients.id),
|
||||
false
|
||||
),
|
||||
requires_freezing = COALESCE(
|
||||
(SELECT bool_or(s.requires_freezing) FROM stock s WHERE s.ingredient_id = ingredients.id),
|
||||
false
|
||||
),
|
||||
storage_temperature_min = (
|
||||
SELECT MIN(s.storage_temperature_min) FROM stock s WHERE s.ingredient_id = ingredients.id
|
||||
),
|
||||
storage_temperature_max = (
|
||||
SELECT MAX(s.storage_temperature_max) FROM stock s WHERE s.ingredient_id = ingredients.id
|
||||
),
|
||||
storage_humidity_max = (
|
||||
SELECT MAX(s.storage_humidity_max) FROM stock s WHERE s.ingredient_id = ingredients.id
|
||||
),
|
||||
storage_instructions = (
|
||||
SELECT s.storage_instructions FROM stock s
|
||||
WHERE s.ingredient_id = ingredients.id
|
||||
AND s.storage_instructions IS NOT NULL
|
||||
LIMIT 1
|
||||
)
|
||||
""")
|
||||
|
||||
# Remove batch-specific storage columns from stock table
|
||||
op.drop_column('stock', 'requires_refrigeration')
|
||||
op.drop_column('stock', 'requires_freezing')
|
||||
op.drop_column('stock', 'storage_temperature_min')
|
||||
op.drop_column('stock', 'storage_temperature_max')
|
||||
op.drop_column('stock', 'storage_humidity_max')
|
||||
op.drop_column('stock', 'shelf_life_days')
|
||||
op.drop_column('stock', 'storage_instructions')
|
||||
84
services/notification/alembic.ini
Normal file
84
services/notification/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/notification/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://notification_user:password@notification-db-service:5432/notification_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/notification/migrations/env.py
Normal file
96
services/notification/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for notification service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/notification/migrations/script.py.mako
Normal file
26
services/notification/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial notification service tables
|
||||
|
||||
Revision ID: 001_initial_notification
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_notification'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for notification service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for notification service
|
||||
pass
|
||||
84
services/orders/alembic.ini
Normal file
84
services/orders/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/orders/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://orders_user:password@orders-db-service:5432/orders_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/orders/migrations/env.py
Normal file
96
services/orders/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for orders service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/orders/migrations/script.py.mako
Normal file
26
services/orders/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial orders service tables
|
||||
|
||||
Revision ID: 001_initial_orders
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_orders'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for orders service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for orders service
|
||||
pass
|
||||
84
services/pos/alembic.ini
Normal file
84
services/pos/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/pos/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://pos_user:password@pos-db-service:5432/pos_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,45 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,97 +1,96 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
"""Alembic environment configuration for pos service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# Import all models to ensure they're registered
|
||||
from app.models import pos_config, pos_transaction, pos_webhook, pos_sync
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def get_database_url():
|
||||
"""Get database URL from settings"""
|
||||
return settings.DATABASE_URL
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = get_database_url()
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
# Override the ini file database URL with our settings
|
||||
configuration = config.get_section(config.config_ini_section)
|
||||
configuration["sqlalchemy.url"] = get_database_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -1,394 +1,28 @@
|
||||
"""Initial POS Integration tables
|
||||
"""Initial POS service tables
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2024-01-01 00:00:00.000000
|
||||
Revision ID: 001_initial_pos
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
revision: str = '001_initial_pos'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create pos_configurations table
|
||||
op.create_table('pos_configurations',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
||||
sa.Column('is_connected', sa.Boolean(), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('webhook_url', sa.String(length=500), nullable=True),
|
||||
sa.Column('webhook_secret', sa.String(length=255), nullable=True),
|
||||
sa.Column('environment', sa.String(length=20), nullable=False),
|
||||
sa.Column('location_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('merchant_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('sync_enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('sync_interval_minutes', sa.String(length=10), nullable=False),
|
||||
sa.Column('auto_sync_products', sa.Boolean(), nullable=False),
|
||||
sa.Column('auto_sync_transactions', sa.Boolean(), nullable=False),
|
||||
sa.Column('last_sync_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_successful_sync_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_sync_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('last_sync_message', sa.Text(), nullable=True),
|
||||
sa.Column('provider_settings', sa.JSON(), nullable=True),
|
||||
sa.Column('last_health_check_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('health_status', sa.String(length=50), nullable=False),
|
||||
sa.Column('health_message', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_pos_config_active', 'pos_configurations', ['is_active'], unique=False)
|
||||
op.create_index('idx_pos_config_connected', 'pos_configurations', ['is_connected'], unique=False)
|
||||
op.create_index('idx_pos_config_created_at', 'pos_configurations', ['created_at'], unique=False)
|
||||
op.create_index('idx_pos_config_health_status', 'pos_configurations', ['health_status'], unique=False)
|
||||
op.create_index('idx_pos_config_sync_enabled', 'pos_configurations', ['sync_enabled'], unique=False)
|
||||
op.create_index('idx_pos_config_tenant_pos_system', 'pos_configurations', ['tenant_id', 'pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_configurations_id'), 'pos_configurations', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_configurations_tenant_id'), 'pos_configurations', ['tenant_id'], unique=False)
|
||||
|
||||
# Create pos_transactions table
|
||||
op.create_table('pos_transactions',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('pos_config_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('external_transaction_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('external_order_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('transaction_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('tip_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('currency', sa.String(length=3), nullable=False),
|
||||
sa.Column('payment_method', sa.String(length=50), nullable=True),
|
||||
sa.Column('payment_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('transaction_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('pos_created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('pos_updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('location_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('location_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('staff_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('staff_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('customer_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('customer_email', sa.String(length=255), nullable=True),
|
||||
sa.Column('customer_phone', sa.String(length=50), nullable=True),
|
||||
sa.Column('order_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('table_number', sa.String(length=20), nullable=True),
|
||||
sa.Column('receipt_number', sa.String(length=100), nullable=True),
|
||||
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
|
||||
sa.Column('sales_record_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('sync_attempted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sync_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sync_error', sa.Text(), nullable=True),
|
||||
sa.Column('sync_retry_count', sa.Integer(), nullable=False),
|
||||
sa.Column('raw_data', sa.JSON(), nullable=True),
|
||||
sa.Column('is_processed', sa.Boolean(), nullable=False),
|
||||
sa.Column('processing_error', sa.Text(), nullable=True),
|
||||
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
|
||||
sa.Column('duplicate_of', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_pos_transaction_customer', 'pos_transactions', ['customer_id'], unique=False)
|
||||
op.create_index('idx_pos_transaction_duplicate', 'pos_transactions', ['is_duplicate'], unique=False)
|
||||
op.create_index('idx_pos_transaction_external_id', 'pos_transactions', ['pos_system', 'external_transaction_id'], unique=False)
|
||||
op.create_index('idx_pos_transaction_location', 'pos_transactions', ['location_id'], unique=False)
|
||||
op.create_index('idx_pos_transaction_processed', 'pos_transactions', ['is_processed'], unique=False)
|
||||
op.create_index('idx_pos_transaction_status', 'pos_transactions', ['status'], unique=False)
|
||||
op.create_index('idx_pos_transaction_sync_status', 'pos_transactions', ['is_synced_to_sales'], unique=False)
|
||||
op.create_index('idx_pos_transaction_tenant_date', 'pos_transactions', ['tenant_id', 'transaction_date'], unique=False)
|
||||
op.create_index('idx_pos_transaction_type', 'pos_transactions', ['transaction_type'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_external_order_id'), 'pos_transactions', ['external_order_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_external_transaction_id'), 'pos_transactions', ['external_transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_id'), 'pos_transactions', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_pos_config_id'), 'pos_transactions', ['pos_config_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_pos_system'), 'pos_transactions', ['pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_sales_record_id'), 'pos_transactions', ['sales_record_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_tenant_id'), 'pos_transactions', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_transaction_date'), 'pos_transactions', ['transaction_date'], unique=False)
|
||||
|
||||
# Create pos_transaction_items table
|
||||
op.create_table('pos_transaction_items',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('transaction_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('external_item_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('sku', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('product_category', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_subcategory', sa.String(length=100), nullable=True),
|
||||
sa.Column('quantity', sa.Numeric(precision=10, scale=3), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('total_price', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('modifiers', sa.JSON(), nullable=True),
|
||||
sa.Column('inventory_product_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('is_mapped_to_inventory', sa.Boolean(), nullable=False),
|
||||
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
|
||||
sa.Column('sync_error', sa.Text(), nullable=True),
|
||||
sa.Column('raw_data', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['transaction_id'], ['pos_transactions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_pos_item_category', 'pos_transaction_items', ['product_category'], unique=False)
|
||||
op.create_index('idx_pos_item_inventory', 'pos_transaction_items', ['inventory_product_id'], unique=False)
|
||||
op.create_index('idx_pos_item_mapped', 'pos_transaction_items', ['is_mapped_to_inventory'], unique=False)
|
||||
op.create_index('idx_pos_item_product', 'pos_transaction_items', ['product_name'], unique=False)
|
||||
op.create_index('idx_pos_item_sku', 'pos_transaction_items', ['sku'], unique=False)
|
||||
op.create_index('idx_pos_item_sync', 'pos_transaction_items', ['is_synced_to_sales'], unique=False)
|
||||
op.create_index('idx_pos_item_transaction', 'pos_transaction_items', ['transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_id'), 'pos_transaction_items', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_inventory_product_id'), 'pos_transaction_items', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_product_category'), 'pos_transaction_items', ['product_category'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_sku'), 'pos_transaction_items', ['sku'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_tenant_id'), 'pos_transaction_items', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_transaction_id'), 'pos_transaction_items', ['transaction_id'], unique=False)
|
||||
|
||||
# Create pos_webhook_logs table
|
||||
op.create_table('pos_webhook_logs',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('webhook_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('method', sa.String(length=10), nullable=False),
|
||||
sa.Column('url_path', sa.String(length=500), nullable=False),
|
||||
sa.Column('query_params', sa.JSON(), nullable=True),
|
||||
sa.Column('headers', sa.JSON(), nullable=True),
|
||||
sa.Column('raw_payload', sa.Text(), nullable=False),
|
||||
sa.Column('payload_size', sa.Integer(), nullable=False),
|
||||
sa.Column('content_type', sa.String(length=100), nullable=True),
|
||||
sa.Column('signature', sa.String(length=500), nullable=True),
|
||||
sa.Column('is_signature_valid', sa.Boolean(), nullable=True),
|
||||
sa.Column('source_ip', sa.String(length=45), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('processing_started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('processing_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('processing_duration_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('error_code', sa.String(length=50), nullable=True),
|
||||
sa.Column('retry_count', sa.Integer(), nullable=False),
|
||||
sa.Column('max_retries', sa.Integer(), nullable=False),
|
||||
sa.Column('response_status_code', sa.Integer(), nullable=True),
|
||||
sa.Column('response_body', sa.Text(), nullable=True),
|
||||
sa.Column('response_sent_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('event_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('event_timestamp', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sequence_number', sa.Integer(), nullable=True),
|
||||
sa.Column('transaction_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('order_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('customer_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('created_transaction_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('updated_transaction_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
|
||||
sa.Column('duplicate_of', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('priority', sa.String(length=20), nullable=False),
|
||||
sa.Column('user_agent', sa.String(length=500), nullable=True),
|
||||
sa.Column('forwarded_for', sa.String(length=200), nullable=True),
|
||||
sa.Column('request_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('received_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_webhook_duplicate', 'pos_webhook_logs', ['is_duplicate'], unique=False)
|
||||
op.create_index('idx_webhook_event_id', 'pos_webhook_logs', ['event_id'], unique=False)
|
||||
op.create_index('idx_webhook_order_id', 'pos_webhook_logs', ['order_id'], unique=False)
|
||||
op.create_index('idx_webhook_pos_system_type', 'pos_webhook_logs', ['pos_system', 'webhook_type'], unique=False)
|
||||
op.create_index('idx_webhook_priority', 'pos_webhook_logs', ['priority'], unique=False)
|
||||
op.create_index('idx_webhook_received_at', 'pos_webhook_logs', ['received_at'], unique=False)
|
||||
op.create_index('idx_webhook_retry', 'pos_webhook_logs', ['retry_count'], unique=False)
|
||||
op.create_index('idx_webhook_signature_valid', 'pos_webhook_logs', ['is_signature_valid'], unique=False)
|
||||
op.create_index('idx_webhook_status', 'pos_webhook_logs', ['status'], unique=False)
|
||||
op.create_index('idx_webhook_tenant_received', 'pos_webhook_logs', ['tenant_id', 'received_at'], unique=False)
|
||||
op.create_index('idx_webhook_transaction_id', 'pos_webhook_logs', ['transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_event_id'), 'pos_webhook_logs', ['event_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_id'), 'pos_webhook_logs', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_pos_system'), 'pos_webhook_logs', ['pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_received_at'), 'pos_webhook_logs', ['received_at'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_tenant_id'), 'pos_webhook_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_transaction_id'), 'pos_webhook_logs', ['transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_webhook_type'), 'pos_webhook_logs', ['webhook_type'], unique=False)
|
||||
|
||||
# Create pos_sync_logs table
|
||||
op.create_table('pos_sync_logs',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('pos_config_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('sync_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('sync_direction', sa.String(length=20), nullable=False),
|
||||
sa.Column('data_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('duration_seconds', sa.Numeric(precision=10, scale=3), nullable=True),
|
||||
sa.Column('sync_from_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sync_to_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('records_requested', sa.Integer(), nullable=False),
|
||||
sa.Column('records_processed', sa.Integer(), nullable=False),
|
||||
sa.Column('records_created', sa.Integer(), nullable=False),
|
||||
sa.Column('records_updated', sa.Integer(), nullable=False),
|
||||
sa.Column('records_skipped', sa.Integer(), nullable=False),
|
||||
sa.Column('records_failed', sa.Integer(), nullable=False),
|
||||
sa.Column('api_calls_made', sa.Integer(), nullable=False),
|
||||
sa.Column('api_rate_limit_hits', sa.Integer(), nullable=False),
|
||||
sa.Column('total_api_time_ms', sa.Integer(), nullable=False),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('error_code', sa.String(length=100), nullable=True),
|
||||
sa.Column('error_details', sa.JSON(), nullable=True),
|
||||
sa.Column('retry_attempt', sa.Integer(), nullable=False),
|
||||
sa.Column('max_retries', sa.Integer(), nullable=False),
|
||||
sa.Column('parent_sync_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('sync_configuration', sa.JSON(), nullable=True),
|
||||
sa.Column('current_page', sa.Integer(), nullable=True),
|
||||
sa.Column('total_pages', sa.Integer(), nullable=True),
|
||||
sa.Column('current_batch', sa.Integer(), nullable=True),
|
||||
sa.Column('total_batches', sa.Integer(), nullable=True),
|
||||
sa.Column('progress_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('validation_errors', sa.JSON(), nullable=True),
|
||||
sa.Column('data_quality_score', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('memory_usage_mb', sa.Numeric(precision=10, scale=2), nullable=True),
|
||||
sa.Column('cpu_usage_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('network_bytes_received', sa.Integer(), nullable=True),
|
||||
sa.Column('network_bytes_sent', sa.Integer(), nullable=True),
|
||||
sa.Column('revenue_synced', sa.Numeric(precision=12, scale=2), nullable=True),
|
||||
sa.Column('transactions_synced', sa.Integer(), nullable=False),
|
||||
sa.Column('triggered_by', sa.String(length=50), nullable=True),
|
||||
sa.Column('triggered_by_user_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('trigger_details', sa.JSON(), nullable=True),
|
||||
sa.Column('external_batch_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('webhook_log_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_sync_log_completed', 'pos_sync_logs', ['completed_at'], unique=False)
|
||||
op.create_index('idx_sync_log_data_type', 'pos_sync_logs', ['data_type'], unique=False)
|
||||
op.create_index('idx_sync_log_duration', 'pos_sync_logs', ['duration_seconds'], unique=False)
|
||||
op.create_index('idx_sync_log_external_batch', 'pos_sync_logs', ['external_batch_id'], unique=False)
|
||||
op.create_index('idx_sync_log_parent', 'pos_sync_logs', ['parent_sync_id'], unique=False)
|
||||
op.create_index('idx_sync_log_pos_system_type', 'pos_sync_logs', ['pos_system', 'sync_type'], unique=False)
|
||||
op.create_index('idx_sync_log_retry', 'pos_sync_logs', ['retry_attempt'], unique=False)
|
||||
op.create_index('idx_sync_log_status', 'pos_sync_logs', ['status'], unique=False)
|
||||
op.create_index('idx_sync_log_tenant_started', 'pos_sync_logs', ['tenant_id', 'started_at'], unique=False)
|
||||
op.create_index('idx_sync_log_trigger', 'pos_sync_logs', ['triggered_by'], unique=False)
|
||||
op.create_index('idx_sync_log_webhook', 'pos_sync_logs', ['webhook_log_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_data_type'), 'pos_sync_logs', ['data_type'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_id'), 'pos_sync_logs', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_pos_config_id'), 'pos_sync_logs', ['pos_config_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_pos_system'), 'pos_sync_logs', ['pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_started_at'), 'pos_sync_logs', ['started_at'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_sync_type'), 'pos_sync_logs', ['sync_type'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_tenant_id'), 'pos_sync_logs', ['tenant_id'], unique=False)
|
||||
# TODO: Add table creation statements for POS service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop pos_sync_logs table
|
||||
op.drop_index(op.f('ix_pos_sync_logs_tenant_id'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_sync_type'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_started_at'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_pos_system'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_pos_config_id'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_id'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_data_type'), table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_webhook', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_trigger', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_tenant_started', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_status', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_retry', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_pos_system_type', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_parent', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_external_batch', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_duration', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_data_type', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_completed', table_name='pos_sync_logs')
|
||||
op.drop_table('pos_sync_logs')
|
||||
|
||||
# Drop pos_webhook_logs table
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_webhook_type'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_transaction_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_tenant_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_received_at'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_pos_system'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_event_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_transaction_id', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_tenant_received', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_status', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_signature_valid', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_retry', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_received_at', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_priority', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_pos_system_type', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_order_id', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_event_id', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_duplicate', table_name='pos_webhook_logs')
|
||||
op.drop_table('pos_webhook_logs')
|
||||
|
||||
# Drop pos_transaction_items table
|
||||
op.drop_index(op.f('ix_pos_transaction_items_transaction_id'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_tenant_id'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_sku'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_product_category'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_inventory_product_id'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_id'), table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_transaction', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_sync', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_sku', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_product', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_mapped', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_inventory', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_category', table_name='pos_transaction_items')
|
||||
op.drop_table('pos_transaction_items')
|
||||
|
||||
# Drop pos_transactions table
|
||||
op.drop_index(op.f('ix_pos_transactions_transaction_date'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_tenant_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_sales_record_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_pos_system'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_pos_config_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_external_transaction_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_external_order_id'), table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_type', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_tenant_date', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_sync_status', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_status', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_processed', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_location', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_external_id', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_duplicate', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_customer', table_name='pos_transactions')
|
||||
op.drop_table('pos_transactions')
|
||||
|
||||
# Drop pos_configurations table
|
||||
op.drop_index(op.f('ix_pos_configurations_tenant_id'), table_name='pos_configurations')
|
||||
op.drop_index(op.f('ix_pos_configurations_id'), table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_tenant_pos_system', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_sync_enabled', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_health_status', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_created_at', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_connected', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_active', table_name='pos_configurations')
|
||||
op.drop_table('pos_configurations')
|
||||
# TODO: Add table drop statements for POS service
|
||||
pass
|
||||
|
||||
84
services/production/alembic.ini
Normal file
84
services/production/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/production/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://production_user:password@production-db-service:5432/production_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/production/migrations/env.py
Normal file
96
services/production/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for production service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/production/migrations/script.py.mako
Normal file
26
services/production/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial production service tables
|
||||
|
||||
Revision ID: 001_initial_production
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_production'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for production service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for production service
|
||||
pass
|
||||
@@ -1,183 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to add sample recipes for testing
|
||||
"""
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
# Add the app directory to Python path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), 'app'))
|
||||
|
||||
from core.database import get_db_session
|
||||
from repositories.recipe_repository import RecipeRepository
|
||||
from schemas.recipes import RecipeCreate, RecipeIngredientCreate
|
||||
|
||||
# Sample tenant ID - you should replace this with a real tenant ID from your system
|
||||
SAMPLE_TENANT_ID = "946206b3-7446-436b-b29d-f265b28d9ff5"
|
||||
|
||||
# Sample finished product IDs - you should replace these with real product IDs from your system
|
||||
SAMPLE_PRODUCT_IDS = [
|
||||
"550e8400-e29b-41d4-a716-446655440001", # Pan Integral
|
||||
"550e8400-e29b-41d4-a716-446655440002", # Croissant
|
||||
"550e8400-e29b-41d4-a716-446655440003", # Tarta de Manzana
|
||||
"550e8400-e29b-41d4-a716-446655440004", # Magdalenas
|
||||
]
|
||||
|
||||
# Sample ingredient IDs - you should replace these with real ingredient IDs from your system
|
||||
SAMPLE_INGREDIENT_IDS = [
|
||||
"660e8400-e29b-41d4-a716-446655440001", # Harina integral
|
||||
"660e8400-e29b-41d4-a716-446655440002", # Agua
|
||||
"660e8400-e29b-41d4-a716-446655440003", # Levadura
|
||||
"660e8400-e29b-41d4-a716-446655440004", # Sal
|
||||
"660e8400-e29b-41d4-a716-446655440005", # Harina de fuerza
|
||||
"660e8400-e29b-41d4-a716-446655440006", # Mantequilla
|
||||
"660e8400-e29b-41d4-a716-446655440007", # Leche
|
||||
"660e8400-e29b-41d4-a716-446655440008", # Azúcar
|
||||
"660e8400-e29b-41d4-a716-446655440009", # Manzanas
|
||||
"660e8400-e29b-41d4-a716-446655440010", # Huevos
|
||||
"660e8400-e29b-41d4-a716-446655440011", # Limón
|
||||
"660e8400-e29b-41d4-a716-446655440012", # Canela
|
||||
]
|
||||
|
||||
async def add_sample_recipes():
|
||||
"""Add sample recipes to the database"""
|
||||
async with get_db_session() as session:
|
||||
recipe_repo = RecipeRepository(session)
|
||||
|
||||
sample_recipes = [
|
||||
{
|
||||
"name": "Pan de Molde Integral",
|
||||
"recipe_code": "PAN001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[0],
|
||||
"description": "Pan integral artesanal con semillas, perfecto para desayunos saludables.",
|
||||
"category": "bread",
|
||||
"difficulty_level": 2,
|
||||
"yield_quantity": 1,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 120,
|
||||
"cook_time_minutes": 35,
|
||||
"total_time_minutes": 155,
|
||||
"is_signature_item": False,
|
||||
"target_margin_percentage": Decimal("40.0"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[0], "quantity": 500, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[1], "quantity": 300, "unit": "ml", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[2], "quantity": 10, "unit": "g", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[3], "quantity": 8, "unit": "g", "is_optional": False, "ingredient_order": 4},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Croissants de Mantequilla",
|
||||
"recipe_code": "CRO001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[1],
|
||||
"description": "Croissants franceses tradicionales con laminado de mantequilla.",
|
||||
"category": "pastry",
|
||||
"difficulty_level": 3,
|
||||
"yield_quantity": 12,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 480,
|
||||
"cook_time_minutes": 20,
|
||||
"total_time_minutes": 500,
|
||||
"is_signature_item": True,
|
||||
"target_margin_percentage": Decimal("52.8"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[4], "quantity": 500, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[5], "quantity": 250, "unit": "g", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[6], "quantity": 150, "unit": "ml", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[7], "quantity": 50, "unit": "g", "is_optional": False, "ingredient_order": 4},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Tarta de Manzana",
|
||||
"recipe_code": "TAR001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[2],
|
||||
"description": "Tarta casera de manzana con canela y masa quebrada.",
|
||||
"category": "cake",
|
||||
"difficulty_level": 1,
|
||||
"yield_quantity": 8,
|
||||
"yield_unit": "portions",
|
||||
"prep_time_minutes": 45,
|
||||
"cook_time_minutes": 40,
|
||||
"total_time_minutes": 85,
|
||||
"is_signature_item": False,
|
||||
"target_margin_percentage": Decimal("65.0"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[8], "quantity": 1000, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[0], "quantity": 250, "unit": "g", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[5], "quantity": 125, "unit": "g", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[7], "quantity": 100, "unit": "g", "is_optional": False, "ingredient_order": 4},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[11], "quantity": 5, "unit": "g", "is_optional": True, "ingredient_order": 5},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Magdalenas de Limón",
|
||||
"recipe_code": "MAG001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[3],
|
||||
"description": "Magdalenas suaves y esponjosas con ralladura de limón.",
|
||||
"category": "pastry",
|
||||
"difficulty_level": 1,
|
||||
"yield_quantity": 12,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 20,
|
||||
"cook_time_minutes": 25,
|
||||
"total_time_minutes": 45,
|
||||
"is_signature_item": False,
|
||||
"target_margin_percentage": Decimal("57.8"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[0], "quantity": 200, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[9], "quantity": 3, "unit": "units", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[7], "quantity": 150, "unit": "g", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[10], "quantity": 2, "unit": "units", "is_optional": False, "ingredient_order": 4},
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
for recipe_data in sample_recipes:
|
||||
try:
|
||||
# Prepare ingredients
|
||||
ingredients = [
|
||||
RecipeIngredientCreate(**ing_data)
|
||||
for ing_data in recipe_data.pop("ingredients")
|
||||
]
|
||||
|
||||
# Create recipe
|
||||
recipe_create = RecipeCreate(
|
||||
**recipe_data,
|
||||
ingredients=ingredients
|
||||
)
|
||||
|
||||
# Check if recipe already exists
|
||||
existing_recipes = await recipe_repo.search_recipes(
|
||||
tenant_id=SAMPLE_TENANT_ID,
|
||||
search_term=recipe_data["name"]
|
||||
)
|
||||
|
||||
recipe_exists = any(
|
||||
recipe.name == recipe_data["name"]
|
||||
for recipe in existing_recipes
|
||||
)
|
||||
|
||||
if not recipe_exists:
|
||||
recipe = await recipe_repo.create_recipe(
|
||||
tenant_id=SAMPLE_TENANT_ID,
|
||||
recipe_data=recipe_create
|
||||
)
|
||||
print(f"✅ Created recipe: {recipe.name}")
|
||||
else:
|
||||
print(f"⏭️ Recipe already exists: {recipe_data['name']}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating recipe {recipe_data['name']}: {e}")
|
||||
|
||||
await session.commit()
|
||||
print(f"\n🎉 Sample recipes setup completed!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🧁 Adding sample recipes to database...")
|
||||
print(f"📍 Tenant ID: {SAMPLE_TENANT_ID}")
|
||||
print("=" * 50)
|
||||
|
||||
asyncio.run(add_sample_recipes())
|
||||
84
services/recipes/alembic.ini
Normal file
84
services/recipes/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/recipes/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://recipes_user:password@recipes-db-service:5432/recipes_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,41 +0,0 @@
|
||||
"""Add quality check configuration to recipes
|
||||
|
||||
Revision ID: 004
|
||||
Revises: 003
|
||||
Create Date: 2024-01-15 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004'
|
||||
down_revision = '003'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Upgrade database schema to add quality check configuration"""
|
||||
|
||||
# Add quality_check_configuration column to recipes table
|
||||
op.add_column('recipes', sa.Column('quality_check_configuration', postgresql.JSONB, nullable=True))
|
||||
|
||||
# Create index for better performance on quality configuration queries
|
||||
op.create_index(
|
||||
'ix_recipes_quality_check_configuration',
|
||||
'recipes',
|
||||
['quality_check_configuration'],
|
||||
postgresql_using='gin'
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Downgrade database schema"""
|
||||
|
||||
# Drop index
|
||||
op.drop_index('ix_recipes_quality_check_configuration')
|
||||
|
||||
# Remove quality_check_configuration column
|
||||
op.drop_column('recipes', 'quality_check_configuration')
|
||||
@@ -1,94 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version number format string
|
||||
# version_num_format = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d
|
||||
|
||||
# version number path regex
|
||||
# version_path_separator = :
|
||||
# version_path_separator = os # Use os.pathsep. Default configuration used when version_path_separator is not provided
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql://recipes_user:recipes_pass@localhost:5432/recipes_db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,71 +1,96 @@
|
||||
"""Alembic environment configuration for recipes service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the parent directory to the path so we can import our modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Import the Base class and all models
|
||||
from shared.database.base import Base
|
||||
from app.models.recipes import *
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the target metadata
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def get_database_url():
|
||||
"""Get database URL from environment or config"""
|
||||
return os.getenv('RECIPES_DATABASE_URL', config.get_main_option("sqlalchemy.url"))
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = get_database_url()
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
configuration = config.get_section(config.config_ini_section)
|
||||
configuration["sqlalchemy.url"] = get_database_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -1,240 +1,29 @@
|
||||
"""Initial recipe management tables
|
||||
"""Initial ecipes service tables
|
||||
|
||||
Revision ID: 001_initial_recipe_tables
|
||||
Revises:
|
||||
Create Date: 2024-01-15 10:00:00.000000
|
||||
Revision ID: 001_initial_recipes
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001_initial_recipe_tables'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
revision: str = '001_initial_recipes'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum types
|
||||
op.execute("CREATE TYPE recipestatus AS ENUM ('draft', 'active', 'testing', 'archived', 'discontinued')")
|
||||
op.execute("CREATE TYPE productionstatus AS ENUM ('planned', 'in_progress', 'completed', 'failed', 'cancelled')")
|
||||
op.execute("CREATE TYPE measurementunit AS ENUM ('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%')")
|
||||
op.execute("CREATE TYPE productionpriority AS ENUM ('low', 'normal', 'high', 'urgent')")
|
||||
|
||||
# Create recipes table
|
||||
op.create_table(
|
||||
'recipes',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('name', sa.String(255), nullable=False, index=True),
|
||||
sa.Column('recipe_code', sa.String(100), nullable=True, index=True),
|
||||
sa.Column('version', sa.String(20), nullable=False, default='1.0'),
|
||||
sa.Column('finished_product_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('description', sa.Text, nullable=True),
|
||||
sa.Column('category', sa.String(100), nullable=True, index=True),
|
||||
sa.Column('cuisine_type', sa.String(100), nullable=True),
|
||||
sa.Column('difficulty_level', sa.Integer, nullable=False, default=1),
|
||||
sa.Column('yield_quantity', sa.Float, nullable=False),
|
||||
sa.Column('yield_unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
|
||||
sa.Column('prep_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('cook_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('total_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('rest_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('estimated_cost_per_unit', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('last_calculated_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('cost_calculation_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('target_margin_percentage', sa.Float, nullable=True),
|
||||
sa.Column('suggested_selling_price', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('instructions', postgresql.JSONB, nullable=True),
|
||||
sa.Column('preparation_notes', sa.Text, nullable=True),
|
||||
sa.Column('storage_instructions', sa.Text, nullable=True),
|
||||
sa.Column('quality_standards', sa.Text, nullable=True),
|
||||
sa.Column('serves_count', sa.Integer, nullable=True),
|
||||
sa.Column('nutritional_info', postgresql.JSONB, nullable=True),
|
||||
sa.Column('allergen_info', postgresql.JSONB, nullable=True),
|
||||
sa.Column('dietary_tags', postgresql.JSONB, nullable=True),
|
||||
sa.Column('batch_size_multiplier', sa.Float, nullable=False, default=1.0),
|
||||
sa.Column('minimum_batch_size', sa.Float, nullable=True),
|
||||
sa.Column('maximum_batch_size', sa.Float, nullable=True),
|
||||
sa.Column('optimal_production_temperature', sa.Float, nullable=True),
|
||||
sa.Column('optimal_humidity', sa.Float, nullable=True),
|
||||
sa.Column('quality_check_points', postgresql.JSONB, nullable=True),
|
||||
sa.Column('common_issues', postgresql.JSONB, nullable=True),
|
||||
sa.Column('status', sa.Enum('draft', 'active', 'testing', 'archived', 'discontinued', name='recipestatus'), nullable=False, default='draft', index=True),
|
||||
sa.Column('is_seasonal', sa.Boolean, default=False),
|
||||
sa.Column('season_start_month', sa.Integer, nullable=True),
|
||||
sa.Column('season_end_month', sa.Integer, nullable=True),
|
||||
sa.Column('is_signature_item', sa.Boolean, default=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('updated_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create recipe ingredients table
|
||||
op.create_table(
|
||||
'recipe_ingredients',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipes.id', ondelete='CASCADE'), nullable=False, index=True),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('quantity', sa.Float, nullable=False),
|
||||
sa.Column('unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
|
||||
sa.Column('quantity_in_base_unit', sa.Float, nullable=True),
|
||||
sa.Column('alternative_quantity', sa.Float, nullable=True),
|
||||
sa.Column('alternative_unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=True),
|
||||
sa.Column('preparation_method', sa.String(255), nullable=True),
|
||||
sa.Column('ingredient_notes', sa.Text, nullable=True),
|
||||
sa.Column('is_optional', sa.Boolean, default=False),
|
||||
sa.Column('ingredient_order', sa.Integer, nullable=False, default=1),
|
||||
sa.Column('ingredient_group', sa.String(100), nullable=True),
|
||||
sa.Column('substitution_options', postgresql.JSONB, nullable=True),
|
||||
sa.Column('substitution_ratio', sa.Float, nullable=True),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('cost_updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create production batches table
|
||||
op.create_table(
|
||||
'production_batches',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipes.id'), nullable=False, index=True),
|
||||
sa.Column('batch_number', sa.String(100), nullable=False, index=True),
|
||||
sa.Column('production_date', sa.DateTime(timezone=True), nullable=False, index=True),
|
||||
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('planned_quantity', sa.Float, nullable=False),
|
||||
sa.Column('actual_quantity', sa.Float, nullable=True),
|
||||
sa.Column('yield_percentage', sa.Float, nullable=True),
|
||||
sa.Column('batch_size_multiplier', sa.Float, nullable=False, default=1.0),
|
||||
sa.Column('status', sa.Enum('planned', 'in_progress', 'completed', 'failed', 'cancelled', name='productionstatus'), nullable=False, default='planned', index=True),
|
||||
sa.Column('priority', sa.Enum('low', 'normal', 'high', 'urgent', name='productionpriority'), nullable=False, default='normal'),
|
||||
sa.Column('assigned_staff', postgresql.JSONB, nullable=True),
|
||||
sa.Column('production_notes', sa.Text, nullable=True),
|
||||
sa.Column('quality_score', sa.Float, nullable=True),
|
||||
sa.Column('quality_notes', sa.Text, nullable=True),
|
||||
sa.Column('defect_rate', sa.Float, nullable=True),
|
||||
sa.Column('rework_required', sa.Boolean, default=False),
|
||||
sa.Column('planned_material_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('actual_material_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('labor_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('overhead_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_production_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('cost_per_unit', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('production_temperature', sa.Float, nullable=True),
|
||||
sa.Column('production_humidity', sa.Float, nullable=True),
|
||||
sa.Column('oven_temperature', sa.Float, nullable=True),
|
||||
sa.Column('baking_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('waste_quantity', sa.Float, nullable=False, default=0.0),
|
||||
sa.Column('waste_reason', sa.String(255), nullable=True),
|
||||
sa.Column('efficiency_percentage', sa.Float, nullable=True),
|
||||
sa.Column('customer_order_reference', sa.String(100), nullable=True),
|
||||
sa.Column('pre_order_quantity', sa.Float, nullable=True),
|
||||
sa.Column('shelf_quantity', sa.Float, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('completed_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create production ingredient consumption table
|
||||
op.create_table(
|
||||
'production_ingredient_consumption',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('production_batch_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('production_batches.id', ondelete='CASCADE'), nullable=False, index=True),
|
||||
sa.Column('recipe_ingredient_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipe_ingredients.id'), nullable=False, index=True),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True, index=True),
|
||||
sa.Column('planned_quantity', sa.Float, nullable=False),
|
||||
sa.Column('actual_quantity', sa.Float, nullable=False),
|
||||
sa.Column('unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
|
||||
sa.Column('variance_quantity', sa.Float, nullable=True),
|
||||
sa.Column('variance_percentage', sa.Float, nullable=True),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('consumption_time', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column('consumption_notes', sa.Text, nullable=True),
|
||||
sa.Column('staff_member', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('ingredient_condition', sa.String(50), nullable=True),
|
||||
sa.Column('quality_impact', sa.String(255), nullable=True),
|
||||
sa.Column('substitution_used', sa.Boolean, default=False),
|
||||
sa.Column('substitution_details', sa.Text, nullable=True),
|
||||
)
|
||||
|
||||
# Create production schedules table
|
||||
op.create_table(
|
||||
'production_schedules',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False, index=True),
|
||||
sa.Column('schedule_name', sa.String(255), nullable=True),
|
||||
sa.Column('total_planned_batches', sa.Integer, nullable=False, default=0),
|
||||
sa.Column('total_planned_items', sa.Float, nullable=False, default=0.0),
|
||||
sa.Column('estimated_production_hours', sa.Float, nullable=True),
|
||||
sa.Column('estimated_material_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('is_published', sa.Boolean, default=False),
|
||||
sa.Column('is_completed', sa.Boolean, default=False),
|
||||
sa.Column('completion_percentage', sa.Float, nullable=True),
|
||||
sa.Column('available_staff_hours', sa.Float, nullable=True),
|
||||
sa.Column('oven_capacity_hours', sa.Float, nullable=True),
|
||||
sa.Column('production_capacity_limit', sa.Float, nullable=True),
|
||||
sa.Column('schedule_notes', sa.Text, nullable=True),
|
||||
sa.Column('preparation_instructions', sa.Text, nullable=True),
|
||||
sa.Column('special_requirements', postgresql.JSONB, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('published_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('published_at', sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('idx_recipes_tenant_name', 'recipes', ['tenant_id', 'name'])
|
||||
op.create_index('idx_recipes_tenant_product', 'recipes', ['tenant_id', 'finished_product_id'])
|
||||
op.create_index('idx_recipes_status', 'recipes', ['tenant_id', 'status'])
|
||||
op.create_index('idx_recipes_category', 'recipes', ['tenant_id', 'category', 'status'])
|
||||
op.create_index('idx_recipes_seasonal', 'recipes', ['tenant_id', 'is_seasonal', 'season_start_month', 'season_end_month'])
|
||||
op.create_index('idx_recipes_signature', 'recipes', ['tenant_id', 'is_signature_item', 'status'])
|
||||
|
||||
op.create_index('idx_recipe_ingredients_recipe', 'recipe_ingredients', ['recipe_id', 'ingredient_order'])
|
||||
op.create_index('idx_recipe_ingredients_ingredient', 'recipe_ingredients', ['ingredient_id'])
|
||||
op.create_index('idx_recipe_ingredients_tenant', 'recipe_ingredients', ['tenant_id', 'recipe_id'])
|
||||
op.create_index('idx_recipe_ingredients_group', 'recipe_ingredients', ['recipe_id', 'ingredient_group', 'ingredient_order'])
|
||||
|
||||
op.create_index('idx_production_batches_tenant_date', 'production_batches', ['tenant_id', 'production_date'])
|
||||
op.create_index('idx_production_batches_recipe', 'production_batches', ['recipe_id', 'production_date'])
|
||||
op.create_index('idx_production_batches_status', 'production_batches', ['tenant_id', 'status', 'production_date'])
|
||||
op.create_index('idx_production_batches_batch_number', 'production_batches', ['tenant_id', 'batch_number'])
|
||||
op.create_index('idx_production_batches_priority', 'production_batches', ['tenant_id', 'priority', 'planned_start_time'])
|
||||
|
||||
op.create_index('idx_consumption_batch', 'production_ingredient_consumption', ['production_batch_id'])
|
||||
op.create_index('idx_consumption_ingredient', 'production_ingredient_consumption', ['ingredient_id', 'consumption_time'])
|
||||
op.create_index('idx_consumption_tenant', 'production_ingredient_consumption', ['tenant_id', 'consumption_time'])
|
||||
op.create_index('idx_consumption_recipe_ingredient', 'production_ingredient_consumption', ['recipe_ingredient_id'])
|
||||
op.create_index('idx_consumption_stock', 'production_ingredient_consumption', ['stock_id'])
|
||||
|
||||
op.create_index('idx_production_schedules_tenant_date', 'production_schedules', ['tenant_id', 'schedule_date'])
|
||||
op.create_index('idx_production_schedules_published', 'production_schedules', ['tenant_id', 'is_published', 'schedule_date'])
|
||||
op.create_index('idx_production_schedules_completed', 'production_schedules', ['tenant_id', 'is_completed', 'schedule_date'])
|
||||
# TODO: Add table creation statements for ecipes service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop all tables
|
||||
op.drop_table('production_schedules')
|
||||
op.drop_table('production_ingredient_consumption')
|
||||
op.drop_table('production_batches')
|
||||
op.drop_table('recipe_ingredients')
|
||||
op.drop_table('recipes')
|
||||
|
||||
# Drop enum types
|
||||
op.execute("DROP TYPE IF EXISTS productionpriority")
|
||||
op.execute("DROP TYPE IF EXISTS measurementunit")
|
||||
op.execute("DROP TYPE IF EXISTS productionstatus")
|
||||
op.execute("DROP TYPE IF EXISTS recipestatus")
|
||||
# TODO: Add table drop statements for ecipes service
|
||||
pass
|
||||
|
||||
84
services/sales/alembic.ini
Normal file
84
services/sales/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/sales/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://sales_user:password@sales-db-service:5432/sales_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/sales/migrations/env.py
Normal file
96
services/sales/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for sales service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/sales/migrations/script.py.mako
Normal file
26
services/sales/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial sales service tables
|
||||
|
||||
Revision ID: 001_initial_sales
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_sales'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for sales service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for sales service
|
||||
pass
|
||||
84
services/suppliers/alembic.ini
Normal file
84
services/suppliers/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/suppliers/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://suppliers_user:password@suppliers-db-service:5432/suppliers_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,62 +1,54 @@
|
||||
"""
|
||||
Alembic environment configuration for Suppliers Service
|
||||
"""
|
||||
"""Alembic environment configuration for suppliers service"""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Import models to ensure they're registered
|
||||
from app.models.suppliers import * # noqa
|
||||
from shared.database.base import Base
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable if available
|
||||
database_url = os.getenv('SUPPLIERS_DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
@@ -70,9 +62,7 @@ def run_migrations_offline() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with database connection"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
@@ -83,9 +73,8 @@ def do_run_migrations(connection: Connection) -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in async mode"""
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
@@ -97,13 +86,11 @@ async def run_async_migrations() -> None:
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial supplioers service tables
|
||||
|
||||
Revision ID: 001_initial_suppliers
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_suppliers'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for suppliers service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for suppliers service
|
||||
pass
|
||||
@@ -1,151 +0,0 @@
|
||||
"""Standardize product references to inventory_product_id
|
||||
|
||||
Revision ID: 001_standardize_product_references
|
||||
Revises:
|
||||
Create Date: 2025-01-15 12:00:00.000000
|
||||
|
||||
This migration standardizes product references across the suppliers service by:
|
||||
1. Renaming ingredient_id columns to inventory_product_id
|
||||
2. Removing redundant product_name columns where UUID references exist
|
||||
3. Updating indexes to match new column names
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers
|
||||
revision = '001_standardize_product_references'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Apply the changes to standardize product references"""
|
||||
|
||||
# 1. Update supplier_price_lists table
|
||||
print("Updating supplier_price_lists table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('supplier_price_lists', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('supplier_price_lists', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_price_lists_ingredient')
|
||||
op.create_index('ix_price_lists_inventory_product', 'supplier_price_lists',
|
||||
['inventory_product_id'])
|
||||
|
||||
|
||||
# 2. Update purchase_order_items table
|
||||
print("Updating purchase_order_items table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('purchase_order_items', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('purchase_order_items', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_po_items_ingredient')
|
||||
op.create_index('ix_po_items_inventory_product', 'purchase_order_items',
|
||||
['inventory_product_id'])
|
||||
|
||||
|
||||
# 3. Update delivery_items table
|
||||
print("Updating delivery_items table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('delivery_items', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('delivery_items', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_delivery_items_ingredient')
|
||||
op.create_index('ix_delivery_items_inventory_product', 'delivery_items',
|
||||
['inventory_product_id'])
|
||||
|
||||
print("Migration completed successfully!")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Revert the changes (for rollback purposes)"""
|
||||
|
||||
print("Rolling back product reference standardization...")
|
||||
|
||||
# 1. Revert delivery_items table
|
||||
print("Reverting delivery_items table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_delivery_items_inventory_product')
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('delivery_items',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('delivery_items', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_delivery_items_ingredient')
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items',
|
||||
['ingredient_id'])
|
||||
|
||||
|
||||
# 2. Revert purchase_order_items table
|
||||
print("Reverting purchase_order_items table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_po_items_inventory_product')
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('purchase_order_items',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('purchase_order_items', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_po_items_ingredient')
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items',
|
||||
['ingredient_id'])
|
||||
|
||||
|
||||
# 3. Revert supplier_price_lists table
|
||||
print("Reverting supplier_price_lists table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_price_lists_inventory_product')
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('supplier_price_lists',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('supplier_price_lists', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_price_lists_ingredient')
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists',
|
||||
['ingredient_id'])
|
||||
|
||||
print("Rollback completed successfully!")
|
||||
@@ -1,285 +0,0 @@
|
||||
"""add performance tracking tables
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2024-12-19 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Create performance metric type enum
|
||||
performance_metric_type = postgresql.ENUM(
|
||||
'DELIVERY_PERFORMANCE', 'QUALITY_SCORE', 'PRICE_COMPETITIVENESS',
|
||||
'COMMUNICATION_RATING', 'ORDER_ACCURACY', 'RESPONSE_TIME',
|
||||
'COMPLIANCE_SCORE', 'FINANCIAL_STABILITY',
|
||||
name='performancemetrictype'
|
||||
)
|
||||
performance_metric_type.create(op.get_bind())
|
||||
|
||||
# Create performance period enum
|
||||
performance_period = postgresql.ENUM(
|
||||
'DAILY', 'WEEKLY', 'MONTHLY', 'QUARTERLY', 'YEARLY',
|
||||
name='performanceperiod'
|
||||
)
|
||||
performance_period.create(op.get_bind())
|
||||
|
||||
# Create alert severity enum
|
||||
alert_severity = postgresql.ENUM(
|
||||
'CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO',
|
||||
name='alertseverity'
|
||||
)
|
||||
alert_severity.create(op.get_bind())
|
||||
|
||||
# Create alert type enum
|
||||
alert_type = postgresql.ENUM(
|
||||
'POOR_QUALITY', 'LATE_DELIVERY', 'PRICE_INCREASE', 'LOW_PERFORMANCE',
|
||||
'CONTRACT_EXPIRY', 'COMPLIANCE_ISSUE', 'FINANCIAL_RISK',
|
||||
'COMMUNICATION_ISSUE', 'CAPACITY_CONSTRAINT', 'CERTIFICATION_EXPIRY',
|
||||
name='alerttype'
|
||||
)
|
||||
alert_type.create(op.get_bind())
|
||||
|
||||
# Create alert status enum
|
||||
alert_status = postgresql.ENUM(
|
||||
'ACTIVE', 'ACKNOWLEDGED', 'IN_PROGRESS', 'RESOLVED', 'DISMISSED',
|
||||
name='alertstatus'
|
||||
)
|
||||
alert_status.create(op.get_bind())
|
||||
|
||||
# Create supplier performance metrics table
|
||||
op.create_table('supplier_performance_metrics',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=False),
|
||||
sa.Column('period', performance_period, nullable=False),
|
||||
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('metric_value', sa.Float(), nullable=False),
|
||||
sa.Column('target_value', sa.Float(), nullable=True),
|
||||
sa.Column('previous_value', sa.Float(), nullable=True),
|
||||
sa.Column('total_orders', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_deliveries', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('on_time_deliveries', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('late_deliveries', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('quality_issues', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('metrics_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('trend_direction', sa.String(length=20), nullable=True),
|
||||
sa.Column('trend_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('external_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('calculated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('calculated_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for performance metrics
|
||||
op.create_index('ix_performance_metrics_tenant_supplier', 'supplier_performance_metrics', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_performance_metrics_type_period', 'supplier_performance_metrics', ['metric_type', 'period'])
|
||||
op.create_index('ix_performance_metrics_period_dates', 'supplier_performance_metrics', ['period_start', 'period_end'])
|
||||
op.create_index('ix_performance_metrics_value', 'supplier_performance_metrics', ['metric_value'])
|
||||
|
||||
# Create supplier alerts table
|
||||
op.create_table('supplier_alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('alert_type', alert_type, nullable=False),
|
||||
sa.Column('severity', alert_severity, nullable=False),
|
||||
sa.Column('status', alert_status, nullable=False, default='ACTIVE'),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('trigger_value', sa.Float(), nullable=True),
|
||||
sa.Column('threshold_value', sa.Float(), nullable=True),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=True),
|
||||
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('delivery_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('performance_metric_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('triggered_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('acknowledged_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('resolved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('resolution_notes', sa.Text(), nullable=True),
|
||||
sa.Column('auto_resolve', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('auto_resolve_condition', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('escalated', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('escalated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('escalated_to', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('notification_sent', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('notification_sent_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('priority_score', sa.Integer(), nullable=False, default=50),
|
||||
sa.Column('business_impact', sa.String(length=50), nullable=True),
|
||||
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['performance_metric_id'], ['supplier_performance_metrics.id'], ),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for alerts
|
||||
op.create_index('ix_supplier_alerts_tenant_supplier', 'supplier_alerts', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_supplier_alerts_type_severity', 'supplier_alerts', ['alert_type', 'severity'])
|
||||
op.create_index('ix_supplier_alerts_status_triggered', 'supplier_alerts', ['status', 'triggered_at'])
|
||||
op.create_index('ix_supplier_alerts_metric_type', 'supplier_alerts', ['metric_type'])
|
||||
op.create_index('ix_supplier_alerts_priority', 'supplier_alerts', ['priority_score'])
|
||||
|
||||
# Create supplier scorecards table
|
||||
op.create_table('supplier_scorecards',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('scorecard_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('period', performance_period, nullable=False),
|
||||
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('overall_score', sa.Float(), nullable=False),
|
||||
sa.Column('quality_score', sa.Float(), nullable=False),
|
||||
sa.Column('delivery_score', sa.Float(), nullable=False),
|
||||
sa.Column('cost_score', sa.Float(), nullable=False),
|
||||
sa.Column('service_score', sa.Float(), nullable=False),
|
||||
sa.Column('overall_rank', sa.Integer(), nullable=True),
|
||||
sa.Column('category_rank', sa.Integer(), nullable=True),
|
||||
sa.Column('total_suppliers_evaluated', sa.Integer(), nullable=True),
|
||||
sa.Column('on_time_delivery_rate', sa.Float(), nullable=False),
|
||||
sa.Column('quality_rejection_rate', sa.Float(), nullable=False),
|
||||
sa.Column('order_accuracy_rate', sa.Float(), nullable=False),
|
||||
sa.Column('response_time_hours', sa.Float(), nullable=False),
|
||||
sa.Column('cost_variance_percentage', sa.Float(), nullable=False),
|
||||
sa.Column('total_orders_processed', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_amount_processed', sa.Numeric(precision=12, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('average_order_value', sa.Numeric(precision=10, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('cost_savings_achieved', sa.Numeric(precision=10, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('score_trend', sa.String(length=20), nullable=True),
|
||||
sa.Column('score_change_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('strengths', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('improvement_areas', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('is_final', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('approved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('attachments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('generated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('generated_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for scorecards
|
||||
op.create_index('ix_scorecards_tenant_supplier', 'supplier_scorecards', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_scorecards_period_dates', 'supplier_scorecards', ['period_start', 'period_end'])
|
||||
op.create_index('ix_scorecards_overall_score', 'supplier_scorecards', ['overall_score'])
|
||||
op.create_index('ix_scorecards_period', 'supplier_scorecards', ['period'])
|
||||
op.create_index('ix_scorecards_final', 'supplier_scorecards', ['is_final'])
|
||||
|
||||
# Create supplier benchmarks table
|
||||
op.create_table('supplier_benchmarks',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('benchmark_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('benchmark_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('supplier_category', sa.String(length=100), nullable=True),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=False),
|
||||
sa.Column('excellent_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('good_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('acceptable_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('poor_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('data_source', sa.String(length=255), nullable=True),
|
||||
sa.Column('sample_size', sa.Integer(), nullable=True),
|
||||
sa.Column('confidence_level', sa.Float(), nullable=True),
|
||||
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('methodology', sa.Text(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for benchmarks
|
||||
op.create_index('ix_benchmarks_tenant_type', 'supplier_benchmarks', ['tenant_id', 'benchmark_type'])
|
||||
op.create_index('ix_benchmarks_metric_type', 'supplier_benchmarks', ['metric_type'])
|
||||
op.create_index('ix_benchmarks_category', 'supplier_benchmarks', ['supplier_category'])
|
||||
op.create_index('ix_benchmarks_active', 'supplier_benchmarks', ['is_active'])
|
||||
|
||||
# Create alert rules table
|
||||
op.create_table('alert_rules',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('rule_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('rule_description', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('alert_type', alert_type, nullable=False),
|
||||
sa.Column('severity', alert_severity, nullable=False),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=True),
|
||||
sa.Column('trigger_condition', sa.String(length=50), nullable=False),
|
||||
sa.Column('threshold_value', sa.Float(), nullable=False),
|
||||
sa.Column('consecutive_violations', sa.Integer(), nullable=False, default=1),
|
||||
sa.Column('supplier_categories', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('supplier_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('exclude_suppliers', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('evaluation_period', performance_period, nullable=False),
|
||||
sa.Column('time_window_hours', sa.Integer(), nullable=True),
|
||||
sa.Column('business_hours_only', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('auto_resolve', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('auto_resolve_threshold', sa.Float(), nullable=True),
|
||||
sa.Column('auto_resolve_duration_hours', sa.Integer(), nullable=True),
|
||||
sa.Column('notification_enabled', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('escalation_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('escalation_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('auto_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('priority', sa.Integer(), nullable=False, default=50),
|
||||
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('last_triggered', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('trigger_count', sa.Integer(), nullable=False, default=0),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for alert rules
|
||||
op.create_index('ix_alert_rules_tenant_active', 'alert_rules', ['tenant_id', 'is_active'])
|
||||
op.create_index('ix_alert_rules_type_severity', 'alert_rules', ['alert_type', 'severity'])
|
||||
op.create_index('ix_alert_rules_metric_type', 'alert_rules', ['metric_type'])
|
||||
op.create_index('ix_alert_rules_priority', 'alert_rules', ['priority'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Drop all tables and indexes
|
||||
op.drop_table('alert_rules')
|
||||
op.drop_table('supplier_benchmarks')
|
||||
op.drop_table('supplier_scorecards')
|
||||
op.drop_table('supplier_alerts')
|
||||
op.drop_table('supplier_performance_metrics')
|
||||
|
||||
# Drop enums
|
||||
op.execute('DROP TYPE IF EXISTS alertstatus')
|
||||
op.execute('DROP TYPE IF EXISTS alerttype')
|
||||
op.execute('DROP TYPE IF EXISTS alertseverity')
|
||||
op.execute('DROP TYPE IF EXISTS performanceperiod')
|
||||
op.execute('DROP TYPE IF EXISTS performancemetrictype')
|
||||
84
services/tenant/alembic.ini
Normal file
84
services/tenant/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/tenant/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://tenant_user:password@tenant-db-service:5432/tenant_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/tenant/migrations/env.py
Normal file
96
services/tenant/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for tenant service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/tenant/migrations/script.py.mako
Normal file
26
services/tenant/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial tenant service tables
|
||||
|
||||
Revision ID: 001_initial_tenant
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_tenant'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for tenant service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for teannt service
|
||||
pass
|
||||
84
services/training/alembic.ini
Normal file
84
services/training/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/training/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://training_user:password@training-db-service:5432/training_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
96
services/training/migrations/env.py
Normal file
96
services/training/migrations/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Alembic environment configuration for training service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/training/migrations/script.py.mako
Normal file
26
services/training/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial training service tables
|
||||
|
||||
Revision ID: 001_initial_training
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_training'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for training service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for training service
|
||||
pass
|
||||
@@ -1,26 +0,0 @@
|
||||
"""Add normalization_params to models table
|
||||
|
||||
Revision ID: add_normalization_params
|
||||
Revises: 001_initial_tables
|
||||
Create Date: 2025-08-12 14:20:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'add_normalization_params'
|
||||
down_revision = '001_initial_tables'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add normalization_params column to models table"""
|
||||
op.add_column('models', sa.Column('normalization_params', sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove normalization_params column from models table"""
|
||||
op.drop_column('models', 'normalization_params')
|
||||
Reference in New Issue
Block a user