Improve AI logic

This commit is contained in:
Urtzi Alfaro
2025-11-05 13:34:56 +01:00
parent 5c87fbcf48
commit 394ad3aea4
218 changed files with 30627 additions and 7658 deletions

View File

@@ -0,0 +1,295 @@
"""Comprehensive initial schema with all tenant service tables and columns
Revision ID: initial_schema_comprehensive
Revises:
Create Date: 2025-11-05 13:30:00.000000+00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import UUID
import uuid
# revision identifiers, used by Alembic.
revision: str = '001_initial_schema'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create audit_logs table
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
# Create tenants table
op.create_table('tenants',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=200), nullable=False),
sa.Column('subdomain', sa.String(length=100), nullable=True),
sa.Column('business_type', sa.String(length=100), nullable=True),
sa.Column('business_model', sa.String(length=100), nullable=True),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('city', sa.String(length=100), nullable=True),
sa.Column('postal_code', sa.String(length=10), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('timezone', sa.String(length=50), nullable=False),
sa.Column('phone', sa.String(length=20), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_demo', sa.Boolean(), nullable=True),
sa.Column('is_demo_template', sa.Boolean(), nullable=True),
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=True),
sa.Column('demo_session_id', sa.String(length=100), nullable=True),
sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('metadata_', sa.JSON(), nullable=True),
sa.Column('owner_id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('subdomain')
)
op.create_index(op.f('ix_tenants_base_demo_tenant_id'), 'tenants', ['base_demo_tenant_id'], unique=False)
op.create_index(op.f('ix_tenants_demo_session_id'), 'tenants', ['demo_session_id'], unique=False)
op.create_index(op.f('ix_tenants_is_demo'), 'tenants', ['is_demo'], unique=False)
op.create_index(op.f('ix_tenants_is_demo_template'), 'tenants', ['is_demo_template'], unique=False)
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
# Create tenant_members table
op.create_table('tenant_members',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('role', sa.String(length=50), nullable=True),
sa.Column('permissions', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('invited_by', sa.UUID(), nullable=True),
sa.Column('invited_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
# Create tenant_settings table with current model structure
op.create_table('tenant_settings',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('procurement_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('inventory_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('production_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('supplier_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('pos_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('order_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('replenishment_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('safety_stock_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('moq_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('supplier_selection_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('ml_insights_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('tenant_id')
)
# Create subscriptions table with all current columns
op.create_table('subscriptions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('plan', sa.String(length=50), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('monthly_price', sa.Float(), nullable=True),
sa.Column('billing_cycle', sa.String(length=20), nullable=True),
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancellation_effective_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('stripe_subscription_id', sa.String(255), nullable=True),
sa.Column('stripe_customer_id', sa.String(255), nullable=True),
sa.Column('max_users', sa.Integer(), nullable=True),
sa.Column('max_locations', sa.Integer(), nullable=True),
sa.Column('max_products', sa.Integer(), nullable=True),
sa.Column('features', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# Create coupons table with current model structure
op.create_table('coupons',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('code', sa.String(length=50), nullable=False),
sa.Column('discount_type', sa.String(length=20), nullable=False),
sa.Column('discount_value', sa.Integer(), nullable=False),
sa.Column('max_redemptions', sa.Integer(), nullable=True),
sa.Column('current_redemptions', sa.Integer(), nullable=False, default=0),
sa.Column('valid_from', sa.DateTime(timezone=True), nullable=False),
sa.Column('valid_until', sa.DateTime(timezone=True), nullable=True),
sa.Column('active', sa.Boolean(), nullable=False, default=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('extra_data', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('code') # In the model, it's unique=True on the code column, so per tenant
)
op.create_index('idx_coupon_code_active', 'coupons', ['code', 'active'], unique=False)
op.create_index('idx_coupon_valid_dates', 'coupons', ['valid_from', 'valid_until'], unique=False)
# Create coupon_redemptions table with current model structure
op.create_table('coupon_redemptions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=False),
sa.Column('coupon_code', sa.String(length=50), nullable=False),
sa.Column('redeemed_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('discount_applied', sa.JSON(), nullable=False),
sa.Column('extra_data', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['coupon_code'], ['coupons.code'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_redemption_tenant', 'coupon_redemptions', ['tenant_id'], unique=False)
op.create_index('idx_redemption_coupon', 'coupon_redemptions', ['coupon_code'], unique=False)
op.create_index('idx_redemption_tenant_coupon', 'coupon_redemptions', ['tenant_id', 'coupon_code'], unique=False)
# Create events table with current model structure
op.create_table(
'events',
sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False, index=True),
sa.Column('event_name', sa.String(500), nullable=False),
sa.Column('event_type', sa.String(100), nullable=False, index=True),
sa.Column('description', sa.Text, nullable=True),
sa.Column('event_date', sa.Date, nullable=False, index=True),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_all_day', sa.Boolean, default=True),
sa.Column('expected_impact', sa.String(50), nullable=True),
sa.Column('impact_multiplier', sa.Float, nullable=True),
sa.Column('affected_product_categories', sa.String(500), nullable=True),
sa.Column('location', sa.String(500), nullable=True),
sa.Column('is_local', sa.Boolean, default=True),
sa.Column('is_confirmed', sa.Boolean, default=False),
sa.Column('is_recurring', sa.Boolean, default=False),
sa.Column('recurrence_pattern', sa.String(200), nullable=True),
sa.Column('actual_impact_multiplier', sa.Float, nullable=True),
sa.Column('actual_sales_increase_percent', sa.Float, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
sa.Column('created_by', sa.String(255), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
)
# Create event_templates table with current model structure
op.create_table(
'event_templates',
sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False, index=True),
sa.Column('template_name', sa.String(500), nullable=False),
sa.Column('event_type', sa.String(100), nullable=False),
sa.Column('description', sa.Text, nullable=True),
sa.Column('default_impact', sa.String(50), nullable=True),
sa.Column('default_impact_multiplier', sa.Float, nullable=True),
sa.Column('default_affected_categories', sa.String(500), nullable=True),
sa.Column('recurrence_pattern', sa.String(200), nullable=False),
sa.Column('is_active', sa.Boolean, default=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
)
# Create indexes for better query performance on events
op.create_index('ix_events_tenant_date', 'events', ['tenant_id', 'event_date'])
op.create_index('ix_events_type_date', 'events', ['event_type', 'event_date'])
op.create_index('ix_event_templates_tenant_active', 'event_templates', ['tenant_id', 'is_active'])
def downgrade() -> None:
# Drop indexes for events
op.drop_index('ix_event_templates_tenant_active', table_name='event_templates')
op.drop_index('ix_events_type_date', table_name='events')
op.drop_index('ix_events_tenant_date', table_name='events')
# Drop event-related tables
op.drop_table('event_templates')
op.drop_table('events')
# Drop coupon-related tables
op.drop_index('idx_redemption_tenant_coupon', table_name='coupon_redemptions')
op.drop_index('idx_redemption_coupon', table_name='coupon_redemptions')
op.drop_index('idx_redemption_tenant', table_name='coupon_redemptions')
op.drop_table('coupon_redemptions')
op.drop_index('idx_coupon_valid_dates', table_name='coupons')
op.drop_index('idx_coupon_code_active', table_name='coupons')
op.drop_table('coupons')
# Drop subscriptions table
op.drop_table('subscriptions')
# Drop tenant_settings table
op.drop_table('tenant_settings')
# Drop other tables in reverse order
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
op.drop_table('tenant_members')
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
op.drop_index(op.f('ix_tenants_is_demo_template'), table_name='tenants')
op.drop_index(op.f('ix_tenants_is_demo'), table_name='tenants')
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
op.drop_table('tenants')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')

View File

@@ -1,151 +0,0 @@
"""initial_schema_20251015_1230
Revision ID: 4e1ddc13dd0f
Revises:
Create Date: 2025-10-15 12:30:04.847858+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '4e1ddc13dd0f'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('tenants',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=200), nullable=False),
sa.Column('subdomain', sa.String(length=100), nullable=True),
sa.Column('business_type', sa.String(length=100), nullable=True),
sa.Column('business_model', sa.String(length=100), nullable=True),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('city', sa.String(length=100), nullable=True),
sa.Column('postal_code', sa.String(length=10), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('timezone', sa.String(length=50), nullable=False),
sa.Column('phone', sa.String(length=20), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('subscription_tier', sa.String(length=50), nullable=True),
sa.Column('is_demo', sa.Boolean(), nullable=True),
sa.Column('is_demo_template', sa.Boolean(), nullable=True),
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=True),
sa.Column('demo_session_id', sa.String(length=100), nullable=True),
sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('metadata_', sa.JSON(), nullable=True),
sa.Column('owner_id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('subdomain')
)
op.create_index(op.f('ix_tenants_base_demo_tenant_id'), 'tenants', ['base_demo_tenant_id'], unique=False)
op.create_index(op.f('ix_tenants_demo_session_id'), 'tenants', ['demo_session_id'], unique=False)
op.create_index(op.f('ix_tenants_is_demo'), 'tenants', ['is_demo'], unique=False)
op.create_index(op.f('ix_tenants_is_demo_template'), 'tenants', ['is_demo_template'], unique=False)
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
op.create_table('subscriptions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('plan', sa.String(length=50), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('monthly_price', sa.Float(), nullable=True),
sa.Column('billing_cycle', sa.String(length=20), nullable=True),
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('max_users', sa.Integer(), nullable=True),
sa.Column('max_locations', sa.Integer(), nullable=True),
sa.Column('max_products', sa.Integer(), nullable=True),
sa.Column('features', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tenant_members',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('role', sa.String(length=50), nullable=True),
sa.Column('permissions', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('invited_by', sa.UUID(), nullable=True),
sa.Column('invited_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
op.drop_table('tenant_members')
op.drop_table('subscriptions')
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
op.drop_index(op.f('ix_tenants_is_demo_template'), table_name='tenants')
op.drop_index(op.f('ix_tenants_is_demo'), table_name='tenants')
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
op.drop_table('tenants')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###

View File

@@ -1,32 +0,0 @@
"""add_subscription_cancellation_fields
Revision ID: 20251016_0000
Revises: 4e1ddc13dd0f
Create Date: 2025-10-16 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '20251016_0000'
down_revision = '4e1ddc13dd0f'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add new columns to subscriptions table
op.add_column('subscriptions', sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('subscriptions', sa.Column('cancellation_effective_date', sa.DateTime(timezone=True), nullable=True))
op.add_column('subscriptions', sa.Column('stripe_subscription_id', sa.String(length=255), nullable=True))
op.add_column('subscriptions', sa.Column('stripe_customer_id', sa.String(length=255), nullable=True))
def downgrade() -> None:
# Remove columns
op.drop_column('subscriptions', 'stripe_customer_id')
op.drop_column('subscriptions', 'stripe_subscription_id')
op.drop_column('subscriptions', 'cancellation_effective_date')
op.drop_column('subscriptions', 'cancelled_at')

View File

@@ -1,69 +0,0 @@
"""add_coupon_system
Revision ID: 20251017_0000
Revises: 20251016_0000
Create Date: 2025-10-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import uuid
# revision identifiers, used by Alembic.
revision = '20251017_0000'
down_revision = '20251016_0000'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create coupons table
op.create_table(
'coupons',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
sa.Column('code', sa.String(50), nullable=False, unique=True),
sa.Column('discount_type', sa.String(20), nullable=False),
sa.Column('discount_value', sa.Integer(), nullable=False),
sa.Column('max_redemptions', sa.Integer(), nullable=True),
sa.Column('current_redemptions', sa.Integer(), nullable=False, server_default='0'),
sa.Column('valid_from', sa.DateTime(timezone=True), nullable=False),
sa.Column('valid_until', sa.DateTime(timezone=True), nullable=True),
sa.Column('active', sa.Boolean(), nullable=False, server_default='true'),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
)
# Create indexes for coupons table
op.create_index('idx_coupon_code_active', 'coupons', ['code', 'active'])
op.create_index('idx_coupon_valid_dates', 'coupons', ['valid_from', 'valid_until'])
# Create coupon_redemptions table
op.create_table(
'coupon_redemptions',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
sa.Column('tenant_id', sa.String(255), nullable=False),
sa.Column('coupon_code', sa.String(50), nullable=False),
sa.Column('redeemed_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('discount_applied', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['coupon_code'], ['coupons.code'], name='fk_coupon_redemption_code'),
)
# Create indexes for coupon_redemptions table
op.create_index('idx_redemption_tenant', 'coupon_redemptions', ['tenant_id'])
op.create_index('idx_redemption_coupon', 'coupon_redemptions', ['coupon_code'])
op.create_index('idx_redemption_tenant_coupon', 'coupon_redemptions', ['tenant_id', 'coupon_code'])
def downgrade() -> None:
# Drop indexes first
op.drop_index('idx_redemption_tenant_coupon', table_name='coupon_redemptions')
op.drop_index('idx_redemption_coupon', table_name='coupon_redemptions')
op.drop_index('idx_redemption_tenant', table_name='coupon_redemptions')
op.drop_index('idx_coupon_valid_dates', table_name='coupons')
op.drop_index('idx_coupon_code_active', table_name='coupons')
# Drop tables
op.drop_table('coupon_redemptions')
op.drop_table('coupons')

View File

@@ -1,155 +0,0 @@
"""add tenant_settings
Revision ID: 20251022_0000
Revises: 20251017_0000
Create Date: 2025-10-22
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
# revision identifiers, used by Alembic.
revision = '20251022_0000'
down_revision = '20251017_0000'
branch_labels = None
depends_on = None
def get_default_settings():
"""Get default settings for all categories"""
return {
"procurement_settings": {
"auto_approve_enabled": True,
"auto_approve_threshold_eur": 500.0,
"auto_approve_min_supplier_score": 0.80,
"require_approval_new_suppliers": True,
"require_approval_critical_items": True,
"procurement_lead_time_days": 3,
"demand_forecast_days": 14,
"safety_stock_percentage": 20.0,
"po_approval_reminder_hours": 24,
"po_critical_escalation_hours": 12
},
"inventory_settings": {
"low_stock_threshold": 10,
"reorder_point": 20,
"reorder_quantity": 50,
"expiring_soon_days": 7,
"expiration_warning_days": 3,
"quality_score_threshold": 8.0,
"temperature_monitoring_enabled": True,
"refrigeration_temp_min": 1.0,
"refrigeration_temp_max": 4.0,
"freezer_temp_min": -20.0,
"freezer_temp_max": -15.0,
"room_temp_min": 18.0,
"room_temp_max": 25.0,
"temp_deviation_alert_minutes": 15,
"critical_temp_deviation_minutes": 5
},
"production_settings": {
"planning_horizon_days": 7,
"minimum_batch_size": 1.0,
"maximum_batch_size": 100.0,
"production_buffer_percentage": 10.0,
"working_hours_per_day": 12,
"max_overtime_hours": 4,
"capacity_utilization_target": 0.85,
"capacity_warning_threshold": 0.95,
"quality_check_enabled": True,
"minimum_yield_percentage": 85.0,
"quality_score_threshold": 8.0,
"schedule_optimization_enabled": True,
"prep_time_buffer_minutes": 30,
"cleanup_time_buffer_minutes": 15,
"labor_cost_per_hour_eur": 15.0,
"overhead_cost_percentage": 20.0
},
"supplier_settings": {
"default_payment_terms_days": 30,
"default_delivery_days": 3,
"excellent_delivery_rate": 95.0,
"good_delivery_rate": 90.0,
"excellent_quality_rate": 98.0,
"good_quality_rate": 95.0,
"critical_delivery_delay_hours": 24,
"critical_quality_rejection_rate": 10.0,
"high_cost_variance_percentage": 15.0
},
"pos_settings": {
"sync_interval_minutes": 5,
"auto_sync_products": True,
"auto_sync_transactions": True
},
"order_settings": {
"max_discount_percentage": 50.0,
"default_delivery_window_hours": 48,
"dynamic_pricing_enabled": False,
"discount_enabled": True,
"delivery_tracking_enabled": True
}
}
def upgrade():
"""Create tenant_settings table and seed existing tenants"""
# Create tenant_settings table
op.create_table(
'tenant_settings',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid4),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('procurement_settings', postgresql.JSON(), nullable=False),
sa.Column('inventory_settings', postgresql.JSON(), nullable=False),
sa.Column('production_settings', postgresql.JSON(), nullable=False),
sa.Column('supplier_settings', postgresql.JSON(), nullable=False),
sa.Column('pos_settings', postgresql.JSON(), nullable=False),
sa.Column('order_settings', postgresql.JSON(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.UniqueConstraint('tenant_id', name='uq_tenant_settings_tenant_id')
)
# Create indexes
op.create_index('ix_tenant_settings_tenant_id', 'tenant_settings', ['tenant_id'])
# Seed existing tenants with default settings
connection = op.get_bind()
# Get all existing tenant IDs
result = connection.execute(sa.text("SELECT id FROM tenants"))
tenant_ids = [row[0] for row in result]
# Insert default settings for each existing tenant
defaults = get_default_settings()
for tenant_id in tenant_ids:
connection.execute(
sa.text("""
INSERT INTO tenant_settings (
id, tenant_id, procurement_settings, inventory_settings,
production_settings, supplier_settings, pos_settings, order_settings
) VALUES (
:id, :tenant_id, :procurement_settings::jsonb, :inventory_settings::jsonb,
:production_settings::jsonb, :supplier_settings::jsonb,
:pos_settings::jsonb, :order_settings::jsonb
)
"""),
{
"id": str(uuid4()),
"tenant_id": tenant_id,
"procurement_settings": str(defaults["procurement_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
"inventory_settings": str(defaults["inventory_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
"production_settings": str(defaults["production_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
"supplier_settings": str(defaults["supplier_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
"pos_settings": str(defaults["pos_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
"order_settings": str(defaults["order_settings"]).replace("'", '"').replace("True", "true").replace("False", "false")
}
)
def downgrade():
"""Drop tenant_settings table"""
op.drop_index('ix_tenant_settings_tenant_id', table_name='tenant_settings')
op.drop_table('tenant_settings')

View File

@@ -1,43 +0,0 @@
"""add smart procurement settings to tenant settings
Revision ID: 20251025_procurement
Revises: 20251022_0000
Create Date: 2025-10-25
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import json
# revision identifiers, used by Alembic.
revision = '20251025_procurement'
down_revision = '20251022_0000'
branch_labels = None
depends_on = None
def upgrade():
"""Add smart procurement flags to existing procurement_settings"""
# Use a single SQL statement to update all rows at once
# This avoids cursor lock issues and is more efficient
# Note: Cast to jsonb for merge operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
procurement_settings = (procurement_settings::jsonb ||
'{"use_reorder_rules": true, "economic_rounding": true, "respect_storage_limits": true, "use_supplier_minimums": true, "optimize_price_tiers": true}'::jsonb)::json,
updated_at = now()
""")
def downgrade():
"""Remove smart procurement flags from procurement_settings"""
# Use a single SQL statement to remove the keys from all rows
# Note: Cast to jsonb for operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
procurement_settings = (procurement_settings::jsonb - 'use_reorder_rules' - 'economic_rounding' - 'respect_storage_limits' - 'use_supplier_minimums' - 'optimize_price_tiers')::json,
updated_at = now()
""")

View File

@@ -1,43 +0,0 @@
"""add supplier approval workflow settings to tenant settings
Revision ID: 20251025_supplier_approval
Revises: 20251025_procurement
Create Date: 2025-10-25
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import json
# revision identifiers, used by Alembic.
revision = '20251025_supplier_approval'
down_revision = '20251025_procurement'
branch_labels = None
depends_on = None
def upgrade():
"""Add supplier approval workflow settings to existing supplier_settings"""
# Use a single SQL statement to update all rows at once
# This avoids cursor lock issues and is more efficient
# Note: Cast to jsonb for merge operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
supplier_settings = (supplier_settings::jsonb ||
'{"require_supplier_approval": true, "auto_approve_for_admin_owner": true, "approval_required_roles": ["member", "viewer"]}'::jsonb)::json,
updated_at = now()
""")
def downgrade():
"""Remove supplier approval workflow settings from supplier_settings"""
# Use a single SQL statement to remove the keys from all rows
# Note: Cast to jsonb for operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
supplier_settings = (supplier_settings::jsonb - 'require_supplier_approval' - 'auto_approve_for_admin_owner' - 'approval_required_roles')::json,
updated_at = now()
""")

View File

@@ -1,103 +0,0 @@
"""remove subscription_tier from tenants
Revision ID: 20251028_remove_sub_tier
Revises: 20251025_supplier_approval
Create Date: 2025-10-28 12:00:00.000000
This migration removes the denormalized subscription_tier column from the tenants table.
The subscription tier is now sourced exclusively from the subscriptions table (single source of truth).
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251028_remove_sub_tier'
down_revision = '20251025_supplier_approval'
branch_labels = None
depends_on = None
def upgrade():
"""
Remove subscription_tier column from tenants table
"""
# Pre-flight check: Ensure all tenants have active subscriptions
# This is important to avoid breaking the application
connection = op.get_bind()
# Check for tenants without subscriptions
result = connection.execute(sa.text("""
SELECT COUNT(*) as count
FROM tenants t
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
WHERE s.id IS NULL
"""))
orphaned_count = result.fetchone()[0]
if orphaned_count > 0:
# Create default subscriptions for orphaned tenants
connection.execute(sa.text("""
INSERT INTO subscriptions (
id, tenant_id, plan, status, monthly_price, billing_cycle,
max_users, max_locations, max_products, features, created_at, updated_at
)
SELECT
gen_random_uuid(),
t.id,
'starter',
'active',
49.0,
'monthly',
5,
1,
50,
'{"inventory_management": true, "demand_prediction": true}'::jsonb,
NOW(),
NOW()
FROM tenants t
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
WHERE s.id IS NULL
"""))
print(f"Created default subscriptions for {orphaned_count} tenants without subscriptions")
# Drop the subscription_tier column
op.drop_column('tenants', 'subscription_tier')
print("Successfully removed subscription_tier column from tenants table")
def downgrade():
"""
Re-add subscription_tier column and populate from subscriptions table
Note: This is for rollback purposes only. Going forward, always use subscriptions table.
"""
# Add the column back
op.add_column('tenants',
sa.Column('subscription_tier', sa.String(length=50), nullable=True)
)
# Populate from subscriptions table
connection = op.get_bind()
connection.execute(sa.text("""
UPDATE tenants t
SET subscription_tier = s.plan
FROM subscriptions s
WHERE t.id = s.tenant_id
AND s.status = 'active'
"""))
# Set default for any tenants without active subscriptions
connection.execute(sa.text("""
UPDATE tenants
SET subscription_tier = 'starter'
WHERE subscription_tier IS NULL
"""))
# Make it non-nullable after population
op.alter_column('tenants', 'subscription_tier', nullable=False)
print("Restored subscription_tier column (downgrade)")

View File

@@ -1,102 +0,0 @@
"""add missing settings columns to tenant settings
Revision ID: 20251030_add_missing_settings
Revises: 20251028_remove_sub_tier
Create Date: 2025-10-30
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from uuid import uuid4
import json
# revision identifiers, used by Alembic.
revision = '20251030_add_missing_settings'
down_revision = '20251028_remove_sub_tier'
branch_labels = None
depends_on = None
def get_default_settings():
"""Get default settings for the new categories"""
return {
"replenishment_settings": {
"projection_horizon_days": 7,
"service_level": 0.95,
"buffer_days": 1,
"enable_auto_replenishment": True,
"min_order_quantity": 1.0,
"max_order_quantity": 1000.0,
"demand_forecast_days": 14
},
"safety_stock_settings": {
"service_level": 0.95,
"method": "statistical",
"min_safety_stock": 0.0,
"max_safety_stock": 100.0,
"reorder_point_calculation": "safety_stock_plus_lead_time_demand"
},
"moq_settings": {
"consolidation_window_days": 7,
"allow_early_ordering": True,
"enable_batch_optimization": True,
"min_batch_size": 1.0,
"max_batch_size": 1000.0
},
"supplier_selection_settings": {
"price_weight": 0.40,
"lead_time_weight": 0.20,
"quality_weight": 0.20,
"reliability_weight": 0.20,
"diversification_threshold": 1000,
"max_single_percentage": 0.70,
"enable_supplier_score_optimization": True
}
}
def upgrade():
"""Add missing settings columns to tenant_settings table"""
# Add the missing columns with default values
default_settings = get_default_settings()
# Add replenishment_settings column
op.add_column('tenant_settings',
sa.Column('replenishment_settings', postgresql.JSON(),
nullable=False,
server_default=str(default_settings["replenishment_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
)
# Add safety_stock_settings column
op.add_column('tenant_settings',
sa.Column('safety_stock_settings', postgresql.JSON(),
nullable=False,
server_default=str(default_settings["safety_stock_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
)
# Add moq_settings column
op.add_column('tenant_settings',
sa.Column('moq_settings', postgresql.JSON(),
nullable=False,
server_default=str(default_settings["moq_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
)
# Add supplier_selection_settings column
op.add_column('tenant_settings',
sa.Column('supplier_selection_settings', postgresql.JSON(),
nullable=False,
server_default=str(default_settings["supplier_selection_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
)
# Update the updated_at timestamp for all existing rows
connection = op.get_bind()
connection.execute(sa.text("UPDATE tenant_settings SET updated_at = now()"))
def downgrade():
"""Remove the added settings columns from tenant_settings table"""
op.drop_column('tenant_settings', 'supplier_selection_settings')
op.drop_column('tenant_settings', 'moq_settings')
op.drop_column('tenant_settings', 'safety_stock_settings')
op.drop_column('tenant_settings', 'replenishment_settings')