Refactor services alembic

This commit is contained in:
Urtzi Alfaro
2025-09-29 19:16:34 +02:00
parent befcc126b0
commit 2712a60a2a
68 changed files with 2659 additions and 2511 deletions

84
services/pos/alembic.ini Normal file
View File

@@ -0,0 +1,84 @@
# ================================================================
# services/pos/alembic.ini - Alembic Configuration
# ================================================================
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration file names
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
timezone = Europe/Madrid
# max length of characters to apply to the
# "slug" field
truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
sourceless = false
# version of a migration file's filename format
version_num_format = %s
# version path separator
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
output_encoding = utf-8
# Database URL - will be overridden by environment variable or settings
sqlalchemy.url = postgresql+asyncpg://pos_user:password@pos-db-service:5432/pos_db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1,45 +0,0 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1,97 +1,96 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
"""Alembic environment configuration for pos service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the app directory to the path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
# Add the service directory to the Python path
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if service_path not in sys.path:
sys.path.insert(0, service_path)
from app.core.config import settings
from shared.database.base import Base
# Add shared modules to path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
if shared_path not in sys.path:
sys.path.insert(0, shared_path)
# Import all models to ensure they're registered
from app.models import pos_config, pos_transaction, pos_webhook, pos_sync
try:
from app.core.config import settings
from shared.database.base import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
print(f"Current Python path: {sys.path}")
raise
# this is the Alembic Config object
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# Set database URL from settings if not already set
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# Set target metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_database_url():
"""Get database URL from settings"""
return settings.DATABASE_URL
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = get_database_url()
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
with context.begin_transaction():
context.run_migrations()
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# Override the ini file database URL with our settings
configuration = config.get_section(config.config_ini_section)
configuration["sqlalchemy.url"] = get_database_url()
connectable = engine_from_config(
configuration,
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
with context.begin_transaction():
context.run_migrations()
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
run_migrations_online()

View File

@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
@@ -21,4 +23,4 @@ def upgrade() -> None:
def downgrade() -> None:
${downgrades if downgrades else "pass"}
${downgrades if downgrades else "pass"}

View File

@@ -1,394 +1,28 @@
"""Initial POS Integration tables
"""Initial POS service tables
Revision ID: 001
Revises:
Create Date: 2024-01-01 00:00:00.000000
Revision ID: 001_initial_pos
Create Date: 2024-01-01 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
revision: str = '001_initial_pos'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create pos_configurations table
op.create_table('pos_configurations',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('provider_name', sa.String(length=100), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_connected', sa.Boolean(), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('webhook_url', sa.String(length=500), nullable=True),
sa.Column('webhook_secret', sa.String(length=255), nullable=True),
sa.Column('environment', sa.String(length=20), nullable=False),
sa.Column('location_id', sa.String(length=100), nullable=True),
sa.Column('merchant_id', sa.String(length=100), nullable=True),
sa.Column('sync_enabled', sa.Boolean(), nullable=False),
sa.Column('sync_interval_minutes', sa.String(length=10), nullable=False),
sa.Column('auto_sync_products', sa.Boolean(), nullable=False),
sa.Column('auto_sync_transactions', sa.Boolean(), nullable=False),
sa.Column('last_sync_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_successful_sync_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_sync_status', sa.String(length=50), nullable=True),
sa.Column('last_sync_message', sa.Text(), nullable=True),
sa.Column('provider_settings', sa.JSON(), nullable=True),
sa.Column('last_health_check_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('health_status', sa.String(length=50), nullable=False),
sa.Column('health_message', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_pos_config_active', 'pos_configurations', ['is_active'], unique=False)
op.create_index('idx_pos_config_connected', 'pos_configurations', ['is_connected'], unique=False)
op.create_index('idx_pos_config_created_at', 'pos_configurations', ['created_at'], unique=False)
op.create_index('idx_pos_config_health_status', 'pos_configurations', ['health_status'], unique=False)
op.create_index('idx_pos_config_sync_enabled', 'pos_configurations', ['sync_enabled'], unique=False)
op.create_index('idx_pos_config_tenant_pos_system', 'pos_configurations', ['tenant_id', 'pos_system'], unique=False)
op.create_index(op.f('ix_pos_configurations_id'), 'pos_configurations', ['id'], unique=False)
op.create_index(op.f('ix_pos_configurations_tenant_id'), 'pos_configurations', ['tenant_id'], unique=False)
# Create pos_transactions table
op.create_table('pos_transactions',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('pos_config_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('external_transaction_id', sa.String(length=255), nullable=False),
sa.Column('external_order_id', sa.String(length=255), nullable=True),
sa.Column('transaction_type', sa.String(length=50), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('tip_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('currency', sa.String(length=3), nullable=False),
sa.Column('payment_method', sa.String(length=50), nullable=True),
sa.Column('payment_status', sa.String(length=50), nullable=True),
sa.Column('transaction_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('pos_created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('pos_updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('location_id', sa.String(length=100), nullable=True),
sa.Column('location_name', sa.String(length=255), nullable=True),
sa.Column('staff_id', sa.String(length=100), nullable=True),
sa.Column('staff_name', sa.String(length=255), nullable=True),
sa.Column('customer_id', sa.String(length=100), nullable=True),
sa.Column('customer_email', sa.String(length=255), nullable=True),
sa.Column('customer_phone', sa.String(length=50), nullable=True),
sa.Column('order_type', sa.String(length=50), nullable=True),
sa.Column('table_number', sa.String(length=20), nullable=True),
sa.Column('receipt_number', sa.String(length=100), nullable=True),
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
sa.Column('sales_record_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('sync_attempted_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('sync_retry_count', sa.Integer(), nullable=False),
sa.Column('raw_data', sa.JSON(), nullable=True),
sa.Column('is_processed', sa.Boolean(), nullable=False),
sa.Column('processing_error', sa.Text(), nullable=True),
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
sa.Column('duplicate_of', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_pos_transaction_customer', 'pos_transactions', ['customer_id'], unique=False)
op.create_index('idx_pos_transaction_duplicate', 'pos_transactions', ['is_duplicate'], unique=False)
op.create_index('idx_pos_transaction_external_id', 'pos_transactions', ['pos_system', 'external_transaction_id'], unique=False)
op.create_index('idx_pos_transaction_location', 'pos_transactions', ['location_id'], unique=False)
op.create_index('idx_pos_transaction_processed', 'pos_transactions', ['is_processed'], unique=False)
op.create_index('idx_pos_transaction_status', 'pos_transactions', ['status'], unique=False)
op.create_index('idx_pos_transaction_sync_status', 'pos_transactions', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_transaction_tenant_date', 'pos_transactions', ['tenant_id', 'transaction_date'], unique=False)
op.create_index('idx_pos_transaction_type', 'pos_transactions', ['transaction_type'], unique=False)
op.create_index(op.f('ix_pos_transactions_external_order_id'), 'pos_transactions', ['external_order_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_external_transaction_id'), 'pos_transactions', ['external_transaction_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_id'), 'pos_transactions', ['id'], unique=False)
op.create_index(op.f('ix_pos_transactions_pos_config_id'), 'pos_transactions', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_pos_system'), 'pos_transactions', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_transactions_sales_record_id'), 'pos_transactions', ['sales_record_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_tenant_id'), 'pos_transactions', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_transaction_date'), 'pos_transactions', ['transaction_date'], unique=False)
# Create pos_transaction_items table
op.create_table('pos_transaction_items',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('transaction_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('external_item_id', sa.String(length=255), nullable=True),
sa.Column('sku', sa.String(length=100), nullable=True),
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('product_category', sa.String(length=100), nullable=True),
sa.Column('product_subcategory', sa.String(length=100), nullable=True),
sa.Column('quantity', sa.Numeric(precision=10, scale=3), nullable=False),
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_price', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('modifiers', sa.JSON(), nullable=True),
sa.Column('inventory_product_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('is_mapped_to_inventory', sa.Boolean(), nullable=False),
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('raw_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['transaction_id'], ['pos_transactions.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_pos_item_category', 'pos_transaction_items', ['product_category'], unique=False)
op.create_index('idx_pos_item_inventory', 'pos_transaction_items', ['inventory_product_id'], unique=False)
op.create_index('idx_pos_item_mapped', 'pos_transaction_items', ['is_mapped_to_inventory'], unique=False)
op.create_index('idx_pos_item_product', 'pos_transaction_items', ['product_name'], unique=False)
op.create_index('idx_pos_item_sku', 'pos_transaction_items', ['sku'], unique=False)
op.create_index('idx_pos_item_sync', 'pos_transaction_items', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_item_transaction', 'pos_transaction_items', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_id'), 'pos_transaction_items', ['id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_inventory_product_id'), 'pos_transaction_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_product_category'), 'pos_transaction_items', ['product_category'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_sku'), 'pos_transaction_items', ['sku'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_tenant_id'), 'pos_transaction_items', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_transaction_id'), 'pos_transaction_items', ['transaction_id'], unique=False)
# Create pos_webhook_logs table
op.create_table('pos_webhook_logs',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('webhook_type', sa.String(length=100), nullable=False),
sa.Column('method', sa.String(length=10), nullable=False),
sa.Column('url_path', sa.String(length=500), nullable=False),
sa.Column('query_params', sa.JSON(), nullable=True),
sa.Column('headers', sa.JSON(), nullable=True),
sa.Column('raw_payload', sa.Text(), nullable=False),
sa.Column('payload_size', sa.Integer(), nullable=False),
sa.Column('content_type', sa.String(length=100), nullable=True),
sa.Column('signature', sa.String(length=500), nullable=True),
sa.Column('is_signature_valid', sa.Boolean(), nullable=True),
sa.Column('source_ip', sa.String(length=45), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('processing_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('processing_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('processing_duration_ms', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_code', sa.String(length=50), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('response_status_code', sa.Integer(), nullable=True),
sa.Column('response_body', sa.Text(), nullable=True),
sa.Column('response_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('event_id', sa.String(length=255), nullable=True),
sa.Column('event_timestamp', sa.DateTime(timezone=True), nullable=True),
sa.Column('sequence_number', sa.Integer(), nullable=True),
sa.Column('transaction_id', sa.String(length=255), nullable=True),
sa.Column('order_id', sa.String(length=255), nullable=True),
sa.Column('customer_id', sa.String(length=255), nullable=True),
sa.Column('created_transaction_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('updated_transaction_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
sa.Column('duplicate_of', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('priority', sa.String(length=20), nullable=False),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('forwarded_for', sa.String(length=200), nullable=True),
sa.Column('request_id', sa.String(length=100), nullable=True),
sa.Column('received_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_webhook_duplicate', 'pos_webhook_logs', ['is_duplicate'], unique=False)
op.create_index('idx_webhook_event_id', 'pos_webhook_logs', ['event_id'], unique=False)
op.create_index('idx_webhook_order_id', 'pos_webhook_logs', ['order_id'], unique=False)
op.create_index('idx_webhook_pos_system_type', 'pos_webhook_logs', ['pos_system', 'webhook_type'], unique=False)
op.create_index('idx_webhook_priority', 'pos_webhook_logs', ['priority'], unique=False)
op.create_index('idx_webhook_received_at', 'pos_webhook_logs', ['received_at'], unique=False)
op.create_index('idx_webhook_retry', 'pos_webhook_logs', ['retry_count'], unique=False)
op.create_index('idx_webhook_signature_valid', 'pos_webhook_logs', ['is_signature_valid'], unique=False)
op.create_index('idx_webhook_status', 'pos_webhook_logs', ['status'], unique=False)
op.create_index('idx_webhook_tenant_received', 'pos_webhook_logs', ['tenant_id', 'received_at'], unique=False)
op.create_index('idx_webhook_transaction_id', 'pos_webhook_logs', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_event_id'), 'pos_webhook_logs', ['event_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_id'), 'pos_webhook_logs', ['id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_pos_system'), 'pos_webhook_logs', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_received_at'), 'pos_webhook_logs', ['received_at'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_tenant_id'), 'pos_webhook_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_transaction_id'), 'pos_webhook_logs', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_webhook_type'), 'pos_webhook_logs', ['webhook_type'], unique=False)
# Create pos_sync_logs table
op.create_table('pos_sync_logs',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('pos_config_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('sync_type', sa.String(length=50), nullable=False),
sa.Column('sync_direction', sa.String(length=20), nullable=False),
sa.Column('data_type', sa.String(length=50), nullable=False),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('duration_seconds', sa.Numeric(precision=10, scale=3), nullable=True),
sa.Column('sync_from_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_to_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('records_requested', sa.Integer(), nullable=False),
sa.Column('records_processed', sa.Integer(), nullable=False),
sa.Column('records_created', sa.Integer(), nullable=False),
sa.Column('records_updated', sa.Integer(), nullable=False),
sa.Column('records_skipped', sa.Integer(), nullable=False),
sa.Column('records_failed', sa.Integer(), nullable=False),
sa.Column('api_calls_made', sa.Integer(), nullable=False),
sa.Column('api_rate_limit_hits', sa.Integer(), nullable=False),
sa.Column('total_api_time_ms', sa.Integer(), nullable=False),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_code', sa.String(length=100), nullable=True),
sa.Column('error_details', sa.JSON(), nullable=True),
sa.Column('retry_attempt', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('parent_sync_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('sync_configuration', sa.JSON(), nullable=True),
sa.Column('current_page', sa.Integer(), nullable=True),
sa.Column('total_pages', sa.Integer(), nullable=True),
sa.Column('current_batch', sa.Integer(), nullable=True),
sa.Column('total_batches', sa.Integer(), nullable=True),
sa.Column('progress_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('validation_errors', sa.JSON(), nullable=True),
sa.Column('data_quality_score', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('memory_usage_mb', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('cpu_usage_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('network_bytes_received', sa.Integer(), nullable=True),
sa.Column('network_bytes_sent', sa.Integer(), nullable=True),
sa.Column('revenue_synced', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('transactions_synced', sa.Integer(), nullable=False),
sa.Column('triggered_by', sa.String(length=50), nullable=True),
sa.Column('triggered_by_user_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('trigger_details', sa.JSON(), nullable=True),
sa.Column('external_batch_id', sa.String(length=255), nullable=True),
sa.Column('webhook_log_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('tags', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_sync_log_completed', 'pos_sync_logs', ['completed_at'], unique=False)
op.create_index('idx_sync_log_data_type', 'pos_sync_logs', ['data_type'], unique=False)
op.create_index('idx_sync_log_duration', 'pos_sync_logs', ['duration_seconds'], unique=False)
op.create_index('idx_sync_log_external_batch', 'pos_sync_logs', ['external_batch_id'], unique=False)
op.create_index('idx_sync_log_parent', 'pos_sync_logs', ['parent_sync_id'], unique=False)
op.create_index('idx_sync_log_pos_system_type', 'pos_sync_logs', ['pos_system', 'sync_type'], unique=False)
op.create_index('idx_sync_log_retry', 'pos_sync_logs', ['retry_attempt'], unique=False)
op.create_index('idx_sync_log_status', 'pos_sync_logs', ['status'], unique=False)
op.create_index('idx_sync_log_tenant_started', 'pos_sync_logs', ['tenant_id', 'started_at'], unique=False)
op.create_index('idx_sync_log_trigger', 'pos_sync_logs', ['triggered_by'], unique=False)
op.create_index('idx_sync_log_webhook', 'pos_sync_logs', ['webhook_log_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_data_type'), 'pos_sync_logs', ['data_type'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_id'), 'pos_sync_logs', ['id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_pos_config_id'), 'pos_sync_logs', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_pos_system'), 'pos_sync_logs', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_started_at'), 'pos_sync_logs', ['started_at'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_sync_type'), 'pos_sync_logs', ['sync_type'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_tenant_id'), 'pos_sync_logs', ['tenant_id'], unique=False)
# TODO: Add table creation statements for POS service
# This is a placeholder migration - replace with actual table definitions
pass
def downgrade() -> None:
# Drop pos_sync_logs table
op.drop_index(op.f('ix_pos_sync_logs_tenant_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_sync_type'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_started_at'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_pos_system'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_pos_config_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_data_type'), table_name='pos_sync_logs')
op.drop_index('idx_sync_log_webhook', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_trigger', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_tenant_started', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_status', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_retry', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_pos_system_type', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_parent', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_external_batch', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_duration', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_data_type', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_completed', table_name='pos_sync_logs')
op.drop_table('pos_sync_logs')
# Drop pos_webhook_logs table
op.drop_index(op.f('ix_pos_webhook_logs_webhook_type'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_transaction_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_tenant_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_received_at'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_pos_system'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_event_id'), table_name='pos_webhook_logs')
op.drop_index('idx_webhook_transaction_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_tenant_received', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_status', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_signature_valid', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_retry', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_received_at', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_priority', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_pos_system_type', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_order_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_event_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_duplicate', table_name='pos_webhook_logs')
op.drop_table('pos_webhook_logs')
# Drop pos_transaction_items table
op.drop_index(op.f('ix_pos_transaction_items_transaction_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_tenant_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_sku'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_product_category'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_inventory_product_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_id'), table_name='pos_transaction_items')
op.drop_index('idx_pos_item_transaction', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_sync', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_sku', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_product', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_mapped', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_inventory', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_category', table_name='pos_transaction_items')
op.drop_table('pos_transaction_items')
# Drop pos_transactions table
op.drop_index(op.f('ix_pos_transactions_transaction_date'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_tenant_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_sales_record_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_pos_system'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_pos_config_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_external_transaction_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_external_order_id'), table_name='pos_transactions')
op.drop_index('idx_pos_transaction_type', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_tenant_date', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_sync_status', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_status', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_processed', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_location', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_external_id', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_duplicate', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_customer', table_name='pos_transactions')
op.drop_table('pos_transactions')
# Drop pos_configurations table
op.drop_index(op.f('ix_pos_configurations_tenant_id'), table_name='pos_configurations')
op.drop_index(op.f('ix_pos_configurations_id'), table_name='pos_configurations')
op.drop_index('idx_pos_config_tenant_pos_system', table_name='pos_configurations')
op.drop_index('idx_pos_config_sync_enabled', table_name='pos_configurations')
op.drop_index('idx_pos_config_health_status', table_name='pos_configurations')
op.drop_index('idx_pos_config_created_at', table_name='pos_configurations')
op.drop_index('idx_pos_config_connected', table_name='pos_configurations')
op.drop_index('idx_pos_config_active', table_name='pos_configurations')
op.drop_table('pos_configurations')
# TODO: Add table drop statements for POS service
pass