Initial commit - production deployment

This commit is contained in:
2026-01-21 17:17:16 +01:00
commit c23d00dd92
2289 changed files with 638440 additions and 0 deletions

View File

@@ -0,0 +1,141 @@
"""Alembic environment configuration for sales service"""
import asyncio
import os
import sys
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the service directory to the Python path
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if service_path not in sys.path:
sys.path.insert(0, service_path)
# Add shared modules to path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
if shared_path not in sys.path:
sys.path.insert(0, shared_path)
try:
from app.core.config import settings
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
print(f"Current Python path: {sys.path}")
raise
# this is the Alembic Config object
config = context.config
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
postgres_user = os.getenv('POSTGRES_USER')
postgres_password = os.getenv('POSTGRES_PASSWORD')
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,149 @@
"""initial_schema_20251015_1228
Revision ID: 1949ed96e20e
Revises:
Create Date: 2025-10-15 12:28:44.373103+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '1949ed96e20e'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('sales_data',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('quantity_sold', sa.Integer(), nullable=False),
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('revenue', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('cost_of_goods', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('discount_applied', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('location_id', sa.String(length=100), nullable=True),
sa.Column('sales_channel', sa.String(length=50), nullable=True),
sa.Column('source', sa.String(length=50), nullable=False),
sa.Column('is_validated', sa.Boolean(), nullable=True),
sa.Column('validation_notes', sa.Text(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('weather_condition', sa.String(length=50), nullable=True),
sa.Column('is_holiday', sa.Boolean(), nullable=True),
sa.Column('is_weekend', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_sales_channel_date', 'sales_data', ['sales_channel', 'date', 'tenant_id'], unique=False)
op.create_index('idx_sales_date_range', 'sales_data', ['date', 'tenant_id'], unique=False)
op.create_index('idx_sales_inventory_product', 'sales_data', ['inventory_product_id', 'tenant_id'], unique=False)
op.create_index('idx_sales_product_date', 'sales_data', ['inventory_product_id', 'date', 'tenant_id'], unique=False)
op.create_index('idx_sales_source_validated', 'sales_data', ['source', 'is_validated', 'tenant_id'], unique=False)
op.create_index('idx_sales_tenant_date', 'sales_data', ['tenant_id', 'date'], unique=False)
op.create_index('idx_sales_tenant_location', 'sales_data', ['tenant_id', 'location_id'], unique=False)
op.create_index(op.f('ix_sales_data_date'), 'sales_data', ['date'], unique=False)
op.create_index(op.f('ix_sales_data_inventory_product_id'), 'sales_data', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_sales_data_location_id'), 'sales_data', ['location_id'], unique=False)
op.create_index(op.f('ix_sales_data_tenant_id'), 'sales_data', ['tenant_id'], unique=False)
op.create_table('sales_import_jobs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('filename', sa.String(length=255), nullable=False),
sa.Column('file_size', sa.Integer(), nullable=True),
sa.Column('import_type', sa.String(length=50), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('progress_percentage', sa.Float(), nullable=True),
sa.Column('total_rows', sa.Integer(), nullable=True),
sa.Column('processed_rows', sa.Integer(), nullable=True),
sa.Column('successful_imports', sa.Integer(), nullable=True),
sa.Column('failed_imports', sa.Integer(), nullable=True),
sa.Column('duplicate_rows', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('validation_errors', sa.Text(), nullable=True),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_import_jobs_status_date', 'sales_import_jobs', ['status', 'created_at'], unique=False)
op.create_index('idx_import_jobs_tenant_status', 'sales_import_jobs', ['tenant_id', 'status', 'created_at'], unique=False)
op.create_index(op.f('ix_sales_import_jobs_tenant_id'), 'sales_import_jobs', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_sales_import_jobs_tenant_id'), table_name='sales_import_jobs')
op.drop_index('idx_import_jobs_tenant_status', table_name='sales_import_jobs')
op.drop_index('idx_import_jobs_status_date', table_name='sales_import_jobs')
op.drop_table('sales_import_jobs')
op.drop_index(op.f('ix_sales_data_tenant_id'), table_name='sales_data')
op.drop_index(op.f('ix_sales_data_location_id'), table_name='sales_data')
op.drop_index(op.f('ix_sales_data_inventory_product_id'), table_name='sales_data')
op.drop_index(op.f('ix_sales_data_date'), table_name='sales_data')
op.drop_index('idx_sales_tenant_location', table_name='sales_data')
op.drop_index('idx_sales_tenant_date', table_name='sales_data')
op.drop_index('idx_sales_source_validated', table_name='sales_data')
op.drop_index('idx_sales_product_date', table_name='sales_data')
op.drop_index('idx_sales_inventory_product', table_name='sales_data')
op.drop_index('idx_sales_date_range', table_name='sales_data')
op.drop_index('idx_sales_channel_date', table_name='sales_data')
op.drop_table('sales_data')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###