Fix Alembic issue
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
"""Alembic environment configuration for sales service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
@@ -25,7 +24,7 @@ try:
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
from app.models import * # noqa: F401, F403
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
@@ -35,12 +34,19 @@ except ImportError as e:
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from environment variables or settings
|
||||
# Try service-specific DATABASE_URL first, then fall back to generic
|
||||
database_url = os.getenv('SALES_DATABASE_URL') or os.getenv('DATABASE_URL')
|
||||
# Determine service name from file path
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
# If DATABASE_URL is not set, construct from individual components
|
||||
if not database_url:
|
||||
# Try generic PostgreSQL environment variables first
|
||||
postgres_host = os.getenv('POSTGRES_HOST')
|
||||
postgres_port = os.getenv('POSTGRES_PORT', '5432')
|
||||
postgres_db = os.getenv('POSTGRES_DB')
|
||||
@@ -50,11 +56,28 @@ if not database_url:
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Fallback to settings
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
else:
|
||||
# Final fallback: try to get from settings object
|
||||
try:
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not database_url:
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Execute migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
"""Run migrations in 'online' mode with async support."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
"""Initial schema for sales service
|
||||
|
||||
Revision ID: 00001
|
||||
Revises:
|
||||
Create Date: 2025-09-30 18:00:00.0000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table('sales_transactions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('order_id', sa.UUID(), nullable=True),
|
||||
sa.Column('customer_id', sa.UUID(), nullable=True),
|
||||
sa.Column('transaction_type', sa.String(50), nullable=False),
|
||||
sa.Column('payment_method', sa.String(50), nullable=True),
|
||||
sa.Column('total_amount', sa.Float(), nullable=False),
|
||||
sa.Column('tax_amount', sa.Float(), nullable=True),
|
||||
sa.Column('discount_amount', sa.Float(), nullable=True),
|
||||
sa.Column('currency', sa.String(3), nullable=True),
|
||||
sa.Column('status', sa.String(50), nullable=True),
|
||||
sa.Column('transaction_date', sa.DateTime(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_sales_transactions_tenant_id'), 'sales_transactions', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_transactions_order_id'), 'sales_transactions', ['order_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_transactions_customer_id'), 'sales_transactions', ['customer_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_transactions_transaction_type'), 'sales_transactions', ['transaction_type'], unique=False)
|
||||
op.create_index(op.f('ix_sales_transactions_status'), 'sales_transactions', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_sales_transactions_transaction_date'), 'sales_transactions', ['transaction_date'], unique=False)
|
||||
|
||||
op.create_table('sales_reports',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('report_type', sa.String(100), nullable=False),
|
||||
sa.Column('report_date', sa.Date(), nullable=False),
|
||||
sa.Column('period_start', sa.Date(), nullable=False),
|
||||
sa.Column('period_end', sa.Date(), nullable=False),
|
||||
sa.Column('total_sales', sa.Float(), nullable=False),
|
||||
sa.Column('total_transactions', sa.Integer(), nullable=False),
|
||||
sa.Column('average_transaction_value', sa.Float(), nullable=True),
|
||||
sa.Column('top_products', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('metrics', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_sales_reports_tenant_id'), 'sales_reports', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_reports_report_type'), 'sales_reports', ['report_type'], unique=False)
|
||||
op.create_index(op.f('ix_sales_reports_report_date'), 'sales_reports', ['report_date'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_sales_reports_report_date'), table_name='sales_reports')
|
||||
op.drop_index(op.f('ix_sales_reports_report_type'), table_name='sales_reports')
|
||||
op.drop_index(op.f('ix_sales_reports_tenant_id'), table_name='sales_reports')
|
||||
op.drop_table('sales_reports')
|
||||
op.drop_index(op.f('ix_sales_transactions_transaction_date'), table_name='sales_transactions')
|
||||
op.drop_index(op.f('ix_sales_transactions_status'), table_name='sales_transactions')
|
||||
op.drop_index(op.f('ix_sales_transactions_transaction_type'), table_name='sales_transactions')
|
||||
op.drop_index(op.f('ix_sales_transactions_customer_id'), table_name='sales_transactions')
|
||||
op.drop_index(op.f('ix_sales_transactions_order_id'), table_name='sales_transactions')
|
||||
op.drop_index(op.f('ix_sales_transactions_tenant_id'), table_name='sales_transactions')
|
||||
op.drop_table('sales_transactions')
|
||||
@@ -0,0 +1,103 @@
|
||||
"""initial_schema_20251001_1118
|
||||
|
||||
Revision ID: a0ed92525634
|
||||
Revises:
|
||||
Create Date: 2025-10-01 11:18:26.606970+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'a0ed92525634'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('sales_data',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('quantity_sold', sa.Integer(), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=True),
|
||||
sa.Column('revenue', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('cost_of_goods', sa.Numeric(precision=10, scale=2), nullable=True),
|
||||
sa.Column('discount_applied', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('location_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('sales_channel', sa.String(length=50), nullable=True),
|
||||
sa.Column('source', sa.String(length=50), nullable=False),
|
||||
sa.Column('is_validated', sa.Boolean(), nullable=True),
|
||||
sa.Column('validation_notes', sa.Text(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('weather_condition', sa.String(length=50), nullable=True),
|
||||
sa.Column('is_holiday', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_weekend', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', sa.UUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_sales_channel_date', 'sales_data', ['sales_channel', 'date', 'tenant_id'], unique=False)
|
||||
op.create_index('idx_sales_date_range', 'sales_data', ['date', 'tenant_id'], unique=False)
|
||||
op.create_index('idx_sales_inventory_product', 'sales_data', ['inventory_product_id', 'tenant_id'], unique=False)
|
||||
op.create_index('idx_sales_product_date', 'sales_data', ['inventory_product_id', 'date', 'tenant_id'], unique=False)
|
||||
op.create_index('idx_sales_source_validated', 'sales_data', ['source', 'is_validated', 'tenant_id'], unique=False)
|
||||
op.create_index('idx_sales_tenant_date', 'sales_data', ['tenant_id', 'date'], unique=False)
|
||||
op.create_index('idx_sales_tenant_location', 'sales_data', ['tenant_id', 'location_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_data_date'), 'sales_data', ['date'], unique=False)
|
||||
op.create_index(op.f('ix_sales_data_inventory_product_id'), 'sales_data', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_data_location_id'), 'sales_data', ['location_id'], unique=False)
|
||||
op.create_index(op.f('ix_sales_data_tenant_id'), 'sales_data', ['tenant_id'], unique=False)
|
||||
op.create_table('sales_import_jobs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('filename', sa.String(length=255), nullable=False),
|
||||
sa.Column('file_size', sa.Integer(), nullable=True),
|
||||
sa.Column('import_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=20), nullable=False),
|
||||
sa.Column('progress_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('total_rows', sa.Integer(), nullable=True),
|
||||
sa.Column('processed_rows', sa.Integer(), nullable=True),
|
||||
sa.Column('successful_imports', sa.Integer(), nullable=True),
|
||||
sa.Column('failed_imports', sa.Integer(), nullable=True),
|
||||
sa.Column('duplicate_rows', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('validation_errors', sa.Text(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', sa.UUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_import_jobs_status_date', 'sales_import_jobs', ['status', 'created_at'], unique=False)
|
||||
op.create_index('idx_import_jobs_tenant_status', 'sales_import_jobs', ['tenant_id', 'status', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_sales_import_jobs_tenant_id'), 'sales_import_jobs', ['tenant_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_sales_import_jobs_tenant_id'), table_name='sales_import_jobs')
|
||||
op.drop_index('idx_import_jobs_tenant_status', table_name='sales_import_jobs')
|
||||
op.drop_index('idx_import_jobs_status_date', table_name='sales_import_jobs')
|
||||
op.drop_table('sales_import_jobs')
|
||||
op.drop_index(op.f('ix_sales_data_tenant_id'), table_name='sales_data')
|
||||
op.drop_index(op.f('ix_sales_data_location_id'), table_name='sales_data')
|
||||
op.drop_index(op.f('ix_sales_data_inventory_product_id'), table_name='sales_data')
|
||||
op.drop_index(op.f('ix_sales_data_date'), table_name='sales_data')
|
||||
op.drop_index('idx_sales_tenant_location', table_name='sales_data')
|
||||
op.drop_index('idx_sales_tenant_date', table_name='sales_data')
|
||||
op.drop_index('idx_sales_source_validated', table_name='sales_data')
|
||||
op.drop_index('idx_sales_product_date', table_name='sales_data')
|
||||
op.drop_index('idx_sales_inventory_product', table_name='sales_data')
|
||||
op.drop_index('idx_sales_date_range', table_name='sales_data')
|
||||
op.drop_index('idx_sales_channel_date', table_name='sales_data')
|
||||
op.drop_table('sales_data')
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user