Initial commit - production deployment
This commit is contained in:
141
services/tenant/migrations/env.py
Normal file
141
services/tenant/migrations/env.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Alembic environment configuration for tenant service"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # noqa: F401, F403
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Determine service name from file path
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
# If DATABASE_URL is not set, construct from individual components
|
||||
if not database_url:
|
||||
# Try generic PostgreSQL environment variables first
|
||||
postgres_host = os.getenv('POSTGRES_HOST')
|
||||
postgres_port = os.getenv('POSTGRES_PORT', '5432')
|
||||
postgres_db = os.getenv('POSTGRES_DB')
|
||||
postgres_user = os.getenv('POSTGRES_USER')
|
||||
postgres_password = os.getenv('POSTGRES_PASSWORD')
|
||||
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
else:
|
||||
# Final fallback: try to get from settings object
|
||||
try:
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not database_url:
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Execute migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async support."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/tenant/migrations/script.py.mako
Normal file
26
services/tenant/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,541 @@
|
||||
"""Comprehensive unified initial schema with all tenant service tables and columns
|
||||
|
||||
Revision ID: 001_unified_initial_schema
|
||||
Revises:
|
||||
Create Date: 2025-11-27 12:00:00.000000+00:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
import uuid
|
||||
|
||||
|
||||
def _index_exists(connection, index_name: str) -> bool:
|
||||
"""Check if an index exists in the database."""
|
||||
result = connection.execute(
|
||||
sa.text("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE indexname = :index_name
|
||||
)
|
||||
"""),
|
||||
{"index_name": index_name}
|
||||
)
|
||||
return result.scalar()
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_unified_initial_schema'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create audit_logs table
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
|
||||
# Create tenants table with all columns including hierarchy fields
|
||||
op.create_table('tenants',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=200), nullable=False),
|
||||
sa.Column('subdomain', sa.String(length=100), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=100), nullable=True),
|
||||
sa.Column('business_model', sa.String(length=100), nullable=True),
|
||||
sa.Column('address', sa.Text(), nullable=False),
|
||||
sa.Column('city', sa.String(length=100), nullable=True),
|
||||
sa.Column('postal_code', sa.String(length=10), nullable=False),
|
||||
sa.Column('latitude', sa.Float(), nullable=True),
|
||||
sa.Column('longitude', sa.Float(), nullable=True),
|
||||
# Regional/Localization configuration
|
||||
sa.Column('timezone', sa.String(length=50), nullable=False, server_default='Europe/Madrid'),
|
||||
sa.Column('currency', sa.String(length=3), nullable=False, server_default='EUR'), # Currency code: EUR, USD, GBP
|
||||
sa.Column('language', sa.String(length=5), nullable=False, server_default='es'), # Language code: es, en, eu
|
||||
sa.Column('phone', sa.String(length=20), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_demo', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_demo_template', sa.Boolean(), nullable=True),
|
||||
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('demo_session_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
|
||||
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('metadata_', sa.JSON(), nullable=True),
|
||||
sa.Column('owner_id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
# 3D Secure (3DS) tracking
|
||||
sa.Column('threeds_authentication_required', sa.Boolean(), nullable=True, server_default='FALSE'),
|
||||
sa.Column('threeds_authentication_required_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('threeds_authentication_completed', sa.Boolean(), nullable=True, server_default='FALSE'),
|
||||
sa.Column('threeds_authentication_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_threeds_setup_intent_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('threeds_action_type', sa.String(length=100), nullable=True),
|
||||
# Enterprise tier hierarchy fields
|
||||
sa.Column('parent_tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('tenant_type', sa.String(length=50), nullable=False, server_default='standalone'),
|
||||
sa.Column('hierarchy_path', sa.String(length=500), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('subdomain')
|
||||
)
|
||||
op.create_index(op.f('ix_tenants_base_demo_tenant_id'), 'tenants', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_demo_session_id'), 'tenants', ['demo_session_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo'), 'tenants', ['is_demo'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo_template'), 'tenants', ['is_demo_template'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
|
||||
# Hierarchy indexes
|
||||
op.create_index('ix_tenants_parent_tenant_id', 'tenants', ['parent_tenant_id'])
|
||||
op.create_index('ix_tenants_tenant_type', 'tenants', ['tenant_type'])
|
||||
op.create_index('ix_tenants_hierarchy_path', 'tenants', ['hierarchy_path'])
|
||||
# Add foreign key constraint for hierarchy
|
||||
op.create_foreign_key(
|
||||
'fk_tenants_parent_tenant',
|
||||
'tenants',
|
||||
'tenants',
|
||||
['parent_tenant_id'],
|
||||
['id'],
|
||||
ondelete='RESTRICT'
|
||||
)
|
||||
# Add check constraint to prevent circular hierarchy
|
||||
op.create_check_constraint(
|
||||
'check_parent_not_self',
|
||||
'tenants',
|
||||
'id != parent_tenant_id'
|
||||
)
|
||||
|
||||
# Create tenant_members table
|
||||
op.create_table('tenant_members',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('role', sa.String(length=50), nullable=True),
|
||||
sa.Column('permissions', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('invited_by', sa.UUID(), nullable=True),
|
||||
sa.Column('invited_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
|
||||
|
||||
# Create tenant_settings table with all settings including notification settings
|
||||
op.create_table('tenant_settings',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('procurement_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('inventory_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('production_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('supplier_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('pos_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('order_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('replenishment_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('safety_stock_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('moq_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('supplier_selection_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('ml_insights_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('notification_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False,
|
||||
server_default=sa.text("""'{
|
||||
"whatsapp_enabled": false,
|
||||
"whatsapp_phone_number_id": "",
|
||||
"whatsapp_access_token": "",
|
||||
"whatsapp_business_account_id": "",
|
||||
"whatsapp_api_version": "v18.0",
|
||||
"whatsapp_default_language": "es",
|
||||
"email_enabled": true,
|
||||
"email_from_address": "",
|
||||
"email_from_name": "",
|
||||
"email_reply_to": "",
|
||||
"enable_po_notifications": true,
|
||||
"enable_inventory_alerts": true,
|
||||
"enable_production_alerts": true,
|
||||
"enable_forecast_alerts": true,
|
||||
"po_notification_channels": ["email"],
|
||||
"inventory_alert_channels": ["email"],
|
||||
"production_alert_channels": ["email"],
|
||||
"forecast_alert_channels": ["email"]
|
||||
}'::jsonb""")),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('tenant_id')
|
||||
)
|
||||
|
||||
# Create subscriptions table with all quota columns
|
||||
op.create_table('subscriptions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('plan', sa.String(length=50), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('monthly_price', sa.Float(), nullable=True),
|
||||
sa.Column('billing_cycle', sa.String(length=20), nullable=True),
|
||||
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cancellation_effective_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('subscription_id', sa.String(255), nullable=True),
|
||||
sa.Column('customer_id', sa.String(255), nullable=True),
|
||||
# Basic resource limits
|
||||
sa.Column('max_users', sa.Integer(), nullable=True),
|
||||
sa.Column('max_locations', sa.Integer(), nullable=True),
|
||||
sa.Column('max_products', sa.Integer(), nullable=True),
|
||||
sa.Column('max_recipes', sa.Integer(), nullable=True),
|
||||
sa.Column('max_suppliers', sa.Integer(), nullable=True),
|
||||
# Daily/hourly quota limits
|
||||
sa.Column('training_jobs_per_day', sa.Integer(), nullable=True),
|
||||
sa.Column('forecast_generation_per_day', sa.Integer(), nullable=True),
|
||||
sa.Column('api_calls_per_hour', sa.Integer(), nullable=True),
|
||||
# Storage limits
|
||||
sa.Column('file_storage_gb', sa.Integer(), nullable=True),
|
||||
# Data access limits
|
||||
sa.Column('dataset_size_rows', sa.Integer(), nullable=True),
|
||||
sa.Column('forecast_horizon_days', sa.Integer(), nullable=True),
|
||||
sa.Column('historical_data_access_days', sa.Integer(), nullable=True),
|
||||
# Bulk operation limits
|
||||
sa.Column('bulk_import_rows', sa.Integer(), nullable=True),
|
||||
sa.Column('bulk_export_rows', sa.Integer(), nullable=True),
|
||||
# Integration limits
|
||||
sa.Column('webhook_endpoints', sa.Integer(), nullable=True),
|
||||
sa.Column('pos_sync_interval_minutes', sa.Integer(), nullable=True),
|
||||
# Reporting limits
|
||||
sa.Column('report_retention_days', sa.Integer(), nullable=True),
|
||||
# Enterprise-specific limits
|
||||
sa.Column('max_child_tenants', sa.Integer(), nullable=True),
|
||||
# Tenant linking support
|
||||
sa.Column('user_id', sa.String(length=36), nullable=True),
|
||||
sa.Column('is_tenant_linked', sa.Boolean(), nullable=False, server_default='FALSE'),
|
||||
sa.Column('tenant_linking_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('linked_at', sa.DateTime(), nullable=True),
|
||||
# 3D Secure (3DS) tracking
|
||||
sa.Column('threeds_authentication_required', sa.Boolean(), nullable=True, server_default='FALSE'),
|
||||
sa.Column('threeds_authentication_required_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('threeds_authentication_completed', sa.Boolean(), nullable=True, server_default='FALSE'),
|
||||
sa.Column('threeds_authentication_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_threeds_setup_intent_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('threeds_action_type', sa.String(length=100), nullable=True),
|
||||
# Features and metadata
|
||||
sa.Column('features', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Add performance indexes for subscriptions table
|
||||
# Get connection to check existing indexes
|
||||
connection = op.get_bind()
|
||||
|
||||
# Index 1: Fast lookup by tenant_id with status filter
|
||||
if not _index_exists(connection, 'idx_subscriptions_tenant_status'):
|
||||
op.create_index(
|
||||
'idx_subscriptions_tenant_status',
|
||||
'subscriptions',
|
||||
['tenant_id', 'status'],
|
||||
unique=False,
|
||||
postgresql_where=sa.text("status IN ('active', 'trial', 'trialing')")
|
||||
)
|
||||
|
||||
# Index 2: Covering index to avoid table lookups (most efficient)
|
||||
if not _index_exists(connection, 'idx_subscriptions_tenant_covering'):
|
||||
op.execute("""
|
||||
CREATE INDEX idx_subscriptions_tenant_covering
|
||||
ON subscriptions (tenant_id, plan, status, next_billing_date, monthly_price, max_users, max_locations, max_products)
|
||||
""")
|
||||
|
||||
# Index 3: Status and validity checks for batch operations
|
||||
if not _index_exists(connection, 'idx_subscriptions_status_billing'):
|
||||
op.create_index(
|
||||
'idx_subscriptions_status_billing',
|
||||
'subscriptions',
|
||||
['status', 'next_billing_date'],
|
||||
unique=False,
|
||||
postgresql_where=sa.text("status IN ('active', 'trial', 'trialing')")
|
||||
)
|
||||
|
||||
# Index 4: Quick lookup for tenant's active subscription (specialized)
|
||||
if not _index_exists(connection, 'idx_subscriptions_active_tenant'):
|
||||
op.execute("""
|
||||
CREATE INDEX idx_subscriptions_active_tenant
|
||||
ON subscriptions (tenant_id, status, plan, next_billing_date, max_users, max_locations, max_products)
|
||||
WHERE status = 'active'
|
||||
""")
|
||||
|
||||
# Index 5: Subscription ID lookup (for webhook processing)
|
||||
if not _index_exists(connection, 'idx_subscriptions_subscription_id'):
|
||||
op.create_index(
|
||||
'idx_subscriptions_subscription_id',
|
||||
'subscriptions',
|
||||
['subscription_id'],
|
||||
unique=False,
|
||||
postgresql_where=sa.text("subscription_id IS NOT NULL")
|
||||
)
|
||||
|
||||
# Index 6: Customer ID lookup (for customer-related operations)
|
||||
if not _index_exists(connection, 'idx_subscriptions_customer_id'):
|
||||
op.create_index(
|
||||
'idx_subscriptions_customer_id',
|
||||
'subscriptions',
|
||||
['customer_id'],
|
||||
unique=False,
|
||||
postgresql_where=sa.text("customer_id IS NOT NULL")
|
||||
)
|
||||
|
||||
# Index 7: User ID for tenant linking
|
||||
if not _index_exists(connection, 'idx_subscriptions_user_id'):
|
||||
op.create_index(
|
||||
'idx_subscriptions_user_id',
|
||||
'subscriptions',
|
||||
['user_id'],
|
||||
unique=False
|
||||
)
|
||||
|
||||
# Index 8: Tenant linking status
|
||||
if not _index_exists(connection, 'idx_subscriptions_linking_status'):
|
||||
op.create_index(
|
||||
'idx_subscriptions_linking_status',
|
||||
'subscriptions',
|
||||
['tenant_linking_status'],
|
||||
unique=False
|
||||
)
|
||||
|
||||
# Create coupons table with tenant_id nullable to support system-wide coupons
|
||||
op.create_table('coupons',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True), # Changed to nullable to support system-wide coupons
|
||||
sa.Column('code', sa.String(length=50), nullable=False),
|
||||
sa.Column('discount_type', sa.String(length=20), nullable=False),
|
||||
sa.Column('discount_value', sa.Integer(), nullable=False),
|
||||
sa.Column('max_redemptions', sa.Integer(), nullable=True),
|
||||
sa.Column('current_redemptions', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('valid_from', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('valid_until', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('extra_data', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('code') # In the model, it's unique=True on the code column, so per tenant
|
||||
)
|
||||
op.create_index('idx_coupon_code_active', 'coupons', ['code', 'active'], unique=False)
|
||||
op.create_index('idx_coupon_valid_dates', 'coupons', ['valid_from', 'valid_until'], unique=False)
|
||||
# Index for tenant_id queries (only non-null values)
|
||||
op.create_index('idx_coupon_tenant_id', 'coupons', ['tenant_id'], unique=False)
|
||||
|
||||
# Create coupon_redemptions table with current model structure
|
||||
op.create_table('coupon_redemptions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('coupon_code', sa.String(length=50), nullable=False),
|
||||
sa.Column('redeemed_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('discount_applied', sa.JSON(), nullable=False),
|
||||
sa.Column('extra_data', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coupon_code'], ['coupons.code'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_redemption_tenant', 'coupon_redemptions', ['tenant_id'], unique=False)
|
||||
op.create_index('idx_redemption_coupon', 'coupon_redemptions', ['coupon_code'], unique=False)
|
||||
op.create_index('idx_redemption_tenant_coupon', 'coupon_redemptions', ['tenant_id', 'coupon_code'], unique=False)
|
||||
|
||||
# Create events table with current model structure
|
||||
op.create_table(
|
||||
'events',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('event_name', sa.String(500), nullable=False),
|
||||
sa.Column('event_type', sa.String(100), nullable=False, index=True),
|
||||
sa.Column('description', sa.Text, nullable=True),
|
||||
sa.Column('event_date', sa.Date, nullable=False, index=True),
|
||||
sa.Column('start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_all_day', sa.Boolean, default=True),
|
||||
sa.Column('expected_impact', sa.String(50), nullable=True),
|
||||
sa.Column('impact_multiplier', sa.Float, nullable=True),
|
||||
sa.Column('affected_product_categories', sa.String(500), nullable=True),
|
||||
sa.Column('location', sa.String(500), nullable=True),
|
||||
sa.Column('is_local', sa.Boolean, default=True),
|
||||
sa.Column('is_confirmed', sa.Boolean, default=False),
|
||||
sa.Column('is_recurring', sa.Boolean, default=False),
|
||||
sa.Column('recurrence_pattern', sa.String(200), nullable=True),
|
||||
sa.Column('actual_impact_multiplier', sa.Float, nullable=True),
|
||||
sa.Column('actual_sales_increase_percent', sa.Float, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('created_by', sa.String(255), nullable=True),
|
||||
sa.Column('notes', sa.Text, nullable=True),
|
||||
)
|
||||
|
||||
# Create event_templates table with current model structure
|
||||
op.create_table(
|
||||
'event_templates',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('template_name', sa.String(500), nullable=False),
|
||||
sa.Column('event_type', sa.String(100), nullable=False),
|
||||
sa.Column('description', sa.Text, nullable=True),
|
||||
sa.Column('default_impact', sa.String(50), nullable=True),
|
||||
sa.Column('default_impact_multiplier', sa.Float, nullable=True),
|
||||
sa.Column('default_affected_categories', sa.String(500), nullable=True),
|
||||
sa.Column('recurrence_pattern', sa.String(200), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean, default=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
)
|
||||
|
||||
# Create indexes for better query performance on events
|
||||
op.create_index('ix_events_tenant_date', 'events', ['tenant_id', 'event_date'])
|
||||
op.create_index('ix_events_type_date', 'events', ['event_type', 'event_date'])
|
||||
op.create_index('ix_event_templates_tenant_active', 'event_templates', ['tenant_id', 'is_active'])
|
||||
|
||||
# Create tenant_locations table (from 004 migration)
|
||||
op.create_table('tenant_locations',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('name', sa.String(length=200), nullable=False),
|
||||
sa.Column('location_type', sa.String(length=50), nullable=False), # central_production, retail_outlet
|
||||
sa.Column('address', sa.Text(), nullable=False),
|
||||
sa.Column('city', sa.String(length=100), nullable=False, server_default='Madrid'),
|
||||
sa.Column('postal_code', sa.String(length=10), nullable=False),
|
||||
sa.Column('latitude', sa.Float(), nullable=True),
|
||||
sa.Column('longitude', sa.Float(), nullable=True),
|
||||
sa.Column('delivery_windows', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('capacity', sa.Integer(), nullable=True),
|
||||
sa.Column('max_delivery_radius_km', sa.Float(), nullable=True, default=50.0),
|
||||
sa.Column('operational_hours', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('contact_person', sa.String(length=200), nullable=True),
|
||||
sa.Column('contact_phone', sa.String(length=20), nullable=True),
|
||||
sa.Column('contact_email', sa.String(length=255), nullable=True),
|
||||
sa.Column('delivery_schedule_config', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('metadata_', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_tenant_locations_tenant_id', 'tenant_locations', ['tenant_id'])
|
||||
op.create_index('ix_tenant_locations_location_type', 'tenant_locations', ['location_type'])
|
||||
op.create_index('ix_tenant_locations_coordinates', 'tenant_locations', ['latitude', 'longitude'])
|
||||
|
||||
# Add constraint to ensure data consistency for tenant linking
|
||||
op.create_check_constraint(
|
||||
'chk_tenant_linking',
|
||||
'subscriptions',
|
||||
"((is_tenant_linked = FALSE AND tenant_id IS NULL) OR (is_tenant_linked = TRUE AND tenant_id IS NOT NULL))"
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop tenant_locations table
|
||||
op.drop_index('ix_tenant_locations_coordinates')
|
||||
op.drop_index('ix_tenant_locations_location_type')
|
||||
op.drop_index('ix_tenant_locations_tenant_id')
|
||||
op.drop_table('tenant_locations')
|
||||
|
||||
# Drop indexes for events
|
||||
op.drop_index('ix_event_templates_tenant_active', table_name='event_templates')
|
||||
op.drop_index('ix_events_type_date', table_name='events')
|
||||
op.drop_index('ix_events_tenant_date', table_name='events')
|
||||
|
||||
# Drop event-related tables
|
||||
op.drop_table('event_templates')
|
||||
op.drop_table('events')
|
||||
|
||||
# Drop coupon-related tables
|
||||
op.drop_index('idx_redemption_tenant_coupon', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_redemption_coupon', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_redemption_tenant', table_name='coupon_redemptions')
|
||||
op.drop_table('coupon_redemptions')
|
||||
|
||||
op.drop_index('idx_coupon_tenant_id', table_name='coupons')
|
||||
op.drop_index('idx_coupon_valid_dates', table_name='coupons')
|
||||
op.drop_index('idx_coupon_code_active', table_name='coupons')
|
||||
op.drop_table('coupons')
|
||||
|
||||
# Drop check constraint for tenant linking
|
||||
op.drop_constraint('chk_tenant_linking', 'subscriptions', type_='check')
|
||||
|
||||
# Drop subscriptions table indexes first
|
||||
op.drop_index('idx_subscriptions_linking_status', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_user_id', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_customer_id', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_subscription_id', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_active_tenant', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_status_billing', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_tenant_covering', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_tenant_status', table_name='subscriptions')
|
||||
|
||||
# Drop subscriptions table
|
||||
op.drop_table('subscriptions')
|
||||
|
||||
# Drop tenant_settings table
|
||||
op.drop_table('tenant_settings')
|
||||
|
||||
# Drop other tables in reverse order
|
||||
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
|
||||
op.drop_table('tenant_members')
|
||||
|
||||
# Drop tenant hierarchy constraints and indexes
|
||||
op.drop_constraint('check_parent_not_self', 'tenants', type_='check')
|
||||
op.drop_constraint('fk_tenants_parent_tenant', 'tenants', type_='foreignkey')
|
||||
op.drop_index('ix_tenants_hierarchy_path', table_name='tenants')
|
||||
op.drop_index('ix_tenants_tenant_type', table_name='tenants')
|
||||
op.drop_index('ix_tenants_parent_tenant_id', table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo_template'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
|
||||
op.drop_table('tenants')
|
||||
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
@@ -0,0 +1,51 @@
|
||||
"""Fix tenant_id nullable constraint for tenant-independent subscriptions
|
||||
|
||||
This migration fixes the database schema issue where tenant_id was incorrectly
|
||||
defined as NOT NULL, preventing tenant-independent subscription creation during
|
||||
registration.
|
||||
|
||||
Revision ID: 002_fix_tenant_id_nullable
|
||||
Revises: 001_unified_initial_schema
|
||||
Create Date: 2026-01-15 19:00:00.000000+00:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '002_fix_tenant_id_nullable'
|
||||
down_revision: Union[str, None] = '001_unified_initial_schema'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# First, drop the foreign key constraint temporarily
|
||||
op.drop_constraint('subscriptions_tenant_id_fkey', 'subscriptions', type_='foreignkey')
|
||||
|
||||
# Alter the tenant_id column to be nullable
|
||||
op.alter_column('subscriptions', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
|
||||
# Recreate the foreign key constraint
|
||||
op.create_foreign_key('subscriptions_tenant_id_fkey', 'subscriptions', 'tenants',
|
||||
['tenant_id'], ['id'], ondelete='CASCADE')
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop the foreign key constraint temporarily
|
||||
op.drop_constraint('subscriptions_tenant_id_fkey', 'subscriptions', type_='foreignkey')
|
||||
|
||||
# Revert the tenant_id column to be non-nullable
|
||||
# Note: This will fail if there are any records with tenant_id = NULL
|
||||
op.alter_column('subscriptions', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
|
||||
# Recreate the foreign key constraint
|
||||
op.create_foreign_key('subscriptions_tenant_id_fkey', 'subscriptions', 'tenants',
|
||||
['tenant_id'], ['id'], ondelete='CASCADE')
|
||||
Reference in New Issue
Block a user