Fix Alembic issue

This commit is contained in:
Urtzi Alfaro
2025-10-01 11:24:06 +02:00
parent 7cc4b957a5
commit 2eeebfc1e0
62 changed files with 6114 additions and 3676 deletions

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for alert-processor service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('ALERT_PROCESSOR_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = "alert-processor"
service_name_upper = "ALERT_PROCESSOR"
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv('ALERT_PROCESSOR_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,22 +56,22 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# As a last resort, construct the database URL manually from individual environment variables
# that are likely to be set in the Kubernetes environment
db_user = os.getenv("ALERT_PROCESSOR_DB_USER", "alert_processor_user")
db_password = os.getenv("ALERT_PROCESSOR_DB_PASSWORD", "alert_processor_pass123")
db_host = os.getenv("ALERT_PROCESSOR_DB_HOST", "alert-processor-db-service")
db_port = os.getenv("ALERT_PROCESSOR_DB_PORT", "5432")
db_name = os.getenv("ALERT_PROCESSOR_DB_NAME", "alert_processor_db")
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
# Try service-specific environment variables (alert-processor specific pattern)
db_host = os.getenv('ALERT_PROCESSOR_DB_HOST', 'alert-processor-db-service')
db_port = os.getenv('ALERT_PROCESSOR_DB_PORT', '5432')
db_name = os.getenv('ALERT_PROCESSOR_DB_NAME', 'alert_processor_db')
db_user = os.getenv('ALERT_PROCESSOR_DB_USER', 'alert_processor_user')
db_password = os.getenv('ALERT_PROCESSOR_DB_PASSWORD')
if database_url:
print(f"Using database URL: {database_url}")
config.set_main_option("sqlalchemy.url", database_url)
else:
print("ERROR: No database URL configured!")
raise Exception("No database URL found after all fallback methods")
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
if not database_url:
error_msg = "ERROR: No database URL configured for alert-processor service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -74,6 +80,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -89,7 +96,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -100,8 +109,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -113,10 +123,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,53 +0,0 @@
"""Initial schema for alert processor
Revision ID: 00000001
Revises:
Create Date: 2025-09-30 18:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '000001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create alerts table (ENUMs will be created automatically)
op.create_table('alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('item_type', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.String(length=100), nullable=False),
sa.Column('severity', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='alertseverity'), nullable=False),
sa.Column('status', sa.Enum('ACTIVE', 'RESOLVED', 'ACKNOWLEDGED', 'IGNORED', name='alertstatus'), nullable=False),
sa.Column('service', sa.String(length=100), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('actions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('alert_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('resolved_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_alerts_tenant_id'), 'alerts', ['tenant_id'], unique=False)
op.create_index(op.f('ix_alerts_severity'), 'alerts', ['severity'], unique=False)
op.create_index(op.f('ix_alerts_status'), 'alerts', ['status'], unique=False)
op.create_index(op.f('ix_alerts_created_at'), 'alerts', ['created_at'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_alerts_created_at'), table_name='alerts')
op.drop_index(op.f('ix_alerts_status'), table_name='alerts')
op.drop_index(op.f('ix_alerts_severity'), table_name='alerts')
op.drop_index(op.f('ix_alerts_tenant_id'), table_name='alerts')
op.drop_table('alerts')
# Drop enums (will be dropped automatically with table, but explicit for clarity)
sa.Enum(name='alertseverity').drop(op.get_bind(), checkfirst=True)
sa.Enum(name='alertstatus').drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,54 @@
"""initial_schema_20251001_1119
Revision ID: 7114c42d4b31
Revises:
Create Date: 2025-10-01 11:19:33.701313+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '7114c42d4b31'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('item_type', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.String(length=100), nullable=False),
sa.Column('severity', sa.Enum('low', 'medium', 'high', 'urgent', name='alertseverity'), nullable=False),
sa.Column('status', sa.Enum('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus'), nullable=True),
sa.Column('service', sa.String(length=100), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('actions', sa.JSON(), nullable=True),
sa.Column('alert_metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('resolved_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_alerts_created_at'), 'alerts', ['created_at'], unique=False)
op.create_index(op.f('ix_alerts_severity'), 'alerts', ['severity'], unique=False)
op.create_index(op.f('ix_alerts_status'), 'alerts', ['status'], unique=False)
op.create_index(op.f('ix_alerts_tenant_id'), 'alerts', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_alerts_tenant_id'), table_name='alerts')
op.drop_index(op.f('ix_alerts_status'), table_name='alerts')
op.drop_index(op.f('ix_alerts_severity'), table_name='alerts')
op.drop_index(op.f('ix_alerts_created_at'), table_name='alerts')
op.drop_table('alerts')
# ### end Alembic commands ###

View File

@@ -3,12 +3,14 @@
Models export for auth service
"""
from .users import User, RefreshToken
from .users import User
from .tokens import RefreshToken, LoginAttempt
from .onboarding import UserOnboardingProgress, UserOnboardingSummary
__all__ = [
'User',
'RefreshToken',
'RefreshToken',
'LoginAttempt',
'UserOnboardingProgress',
'UserOnboardingSummary',
]

View File

@@ -54,35 +54,4 @@ class User(Base):
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
"last_login": self.last_login.isoformat() if self.last_login else None
}
class RefreshToken(Base):
"""Refresh token model for JWT token management"""
__tablename__ = "refresh_tokens"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
token = Column(String(500), unique=True, nullable=False)
expires_at = Column(DateTime(timezone=True), nullable=False)
is_revoked = Column(Boolean, default=False)
revoked_at = Column(DateTime(timezone=True), nullable=True)
# Timezone-aware datetime fields
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
def __repr__(self):
return f"<RefreshToken(id={self.id}, user_id={self.user_id}, is_revoked={self.is_revoked})>"
def to_dict(self):
"""Convert refresh token to dictionary"""
return {
"id": str(self.id),
"user_id": str(self.user_id),
"token": self.token,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"is_revoked": self.is_revoked,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None
}

View File

@@ -10,7 +10,7 @@ from datetime import datetime, timezone, timedelta
import structlog
from .base import AuthBaseRepository
from app.models.users import RefreshToken
from app.models.tokens import RefreshToken
from shared.database.exceptions import DatabaseError
logger = structlog.get_logger()

View File

@@ -138,7 +138,8 @@ class AdminUserDeleteService:
"""Validate user exists and get basic info from local database"""
try:
from app.models.users import User
from app.models.tokens import RefreshToken
# Query user from local auth database
query = select(User).where(User.id == uuid.UUID(user_id))
result = await self.db.execute(query)
@@ -403,8 +404,9 @@ class AdminUserDeleteService:
}
try:
from app.models.users import User, RefreshToken
from app.models.users import User
from app.models.tokens import RefreshToken
# Delete refresh tokens
token_delete_query = delete(RefreshToken).where(RefreshToken.user_id == uuid.UUID(user_id))
token_result = await self.db.execute(token_delete_query)

View File

@@ -12,7 +12,8 @@ import structlog
from app.repositories import UserRepository, TokenRepository
from app.schemas.auth import UserRegistration, UserLogin, TokenResponse, UserResponse
from app.models.users import User, RefreshToken
from app.models.users import User
from app.models.tokens import RefreshToken
from app.core.security import SecurityManager
from app.services.messaging import publish_user_registered, publish_user_login
from shared.database.unit_of_work import UnitOfWork

View File

@@ -10,7 +10,8 @@ import structlog
from app.repositories import UserRepository, TokenRepository
from app.schemas.auth import UserResponse, UserUpdate
from app.models.users import User, RefreshToken
from app.models.users import User
from app.models.tokens import RefreshToken
from app.core.security import SecurityManager
from shared.database.unit_of_work import UnitOfWork
from shared.database.transactions import transactional

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for auth service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('AUTH_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,108 +0,0 @@
"""Initial schema for auth service
Revision ID: 0000001
Revises:
Create Date: 2025-09-30 18:00:00.00000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '000001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('users',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('username', sa.String(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('hashed_password', sa.String(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_superuser', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
op.create_table('refresh_tokens',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=True),
sa.Column('token', sa.String(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_refresh_tokens_id'), 'refresh_tokens', ['id'], unique=False)
op.create_index(op.f('ix_refresh_tokens_token'), 'refresh_tokens', ['token'], unique=True)
op.create_table('user_onboarding_progress',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('step', sa.String(), nullable=False),
sa.Column('completed', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_onboarding_progress_id'), 'user_onboarding_progress', ['id'], unique=False)
op.create_table('user_onboarding_summary',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('total_steps', sa.Integer(), nullable=True),
sa.Column('completed_steps', sa.Integer(), nullable=True),
sa.Column('completion_percentage', sa.Float(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_onboarding_summary_id'), 'user_onboarding_summary', ['id'], unique=False)
# Create login_attempts table
op.create_table('login_attempts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(255), nullable=False),
sa.Column('ip_address', sa.String(45), nullable=False),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('success', sa.Boolean(), nullable=True),
sa.Column('failure_reason', sa.String(255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_login_attempts_email'), 'login_attempts', ['email'], unique=False)
op.create_index(op.f('ix_login_attempts_ip_address'), 'login_attempts', ['ip_address'], unique=False)
op.create_index(op.f('ix_login_attempts_success'), 'login_attempts', ['success'], unique=False)
op.create_index(op.f('ix_login_attempts_created_at'), 'login_attempts', ['created_at'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_login_attempts_created_at'), table_name='login_attempts')
op.drop_index(op.f('ix_login_attempts_success'), table_name='login_attempts')
op.drop_index(op.f('ix_login_attempts_ip_address'), table_name='login_attempts')
op.drop_index(op.f('ix_login_attempts_email'), table_name='login_attempts')
op.drop_table('login_attempts')
op.drop_index(op.f('ix_user_onboarding_summary_id'), table_name='user_onboarding_summary')
op.drop_table('user_onboarding_summary')
op.drop_index(op.f('ix_user_onboarding_progress_id'), table_name='user_onboarding_progress')
op.drop_table('user_onboarding_progress')
op.drop_index(op.f('ix_refresh_tokens_token'), table_name='refresh_tokens')
op.drop_index(op.f('ix_refresh_tokens_id'), table_name='refresh_tokens')
op.drop_table('refresh_tokens')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')

View File

@@ -0,0 +1,114 @@
"""initial_schema_20251001_1118
Revision ID: 2822f7ec9874
Revises:
Create Date: 2025-10-01 11:18:44.973074+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '2822f7ec9874'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('login_attempts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=False),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('success', sa.Boolean(), nullable=True),
sa.Column('failure_reason', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_login_attempts_email'), 'login_attempts', ['email'], unique=False)
op.create_table('refresh_tokens',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('token', sa.Text(), nullable=False),
sa.Column('token_hash', sa.String(length=255), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_revoked', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('revoked_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token_hash')
)
op.create_index('ix_refresh_tokens_expires_at', 'refresh_tokens', ['expires_at'], unique=False)
op.create_index('ix_refresh_tokens_token_hash', 'refresh_tokens', ['token_hash'], unique=False)
op.create_index(op.f('ix_refresh_tokens_user_id'), 'refresh_tokens', ['user_id'], unique=False)
op.create_index('ix_refresh_tokens_user_id_active', 'refresh_tokens', ['user_id', 'is_revoked'], unique=False)
op.create_table('users',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('hashed_password', sa.String(length=255), nullable=False),
sa.Column('full_name', sa.String(length=255), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_verified', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_login', sa.DateTime(timezone=True), nullable=True),
sa.Column('phone', sa.String(length=20), nullable=True),
sa.Column('language', sa.String(length=10), nullable=True),
sa.Column('timezone', sa.String(length=50), nullable=True),
sa.Column('role', sa.String(length=20), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_table('user_onboarding_progress',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('step_name', sa.String(length=50), nullable=False),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'step_name', name='uq_user_step')
)
op.create_index(op.f('ix_user_onboarding_progress_user_id'), 'user_onboarding_progress', ['user_id'], unique=False)
op.create_table('user_onboarding_summary',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('current_step', sa.String(length=50), nullable=False),
sa.Column('next_step', sa.String(length=50), nullable=True),
sa.Column('completion_percentage', sa.String(length=50), nullable=True),
sa.Column('fully_completed', sa.Boolean(), nullable=True),
sa.Column('steps_completed_count', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_onboarding_summary_user_id'), 'user_onboarding_summary', ['user_id'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_onboarding_summary_user_id'), table_name='user_onboarding_summary')
op.drop_table('user_onboarding_summary')
op.drop_index(op.f('ix_user_onboarding_progress_user_id'), table_name='user_onboarding_progress')
op.drop_table('user_onboarding_progress')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.drop_index('ix_refresh_tokens_user_id_active', table_name='refresh_tokens')
op.drop_index(op.f('ix_refresh_tokens_user_id'), table_name='refresh_tokens')
op.drop_index('ix_refresh_tokens_token_hash', table_name='refresh_tokens')
op.drop_index('ix_refresh_tokens_expires_at', table_name='refresh_tokens')
op.drop_table('refresh_tokens')
op.drop_index(op.f('ix_login_attempts_email'), table_name='login_attempts')
op.drop_table('login_attempts')
# ### end Alembic commands ###

View File

@@ -1 +1,28 @@
# services/external/app/models/__init__.py
"""
External Service Models Package
Import all models to ensure they are registered with SQLAlchemy Base.
"""
# Import all models to register them with the Base metadata
from .traffic import (
TrafficData,
TrafficMeasurementPoint,
TrafficDataBackgroundJob,
)
from .weather import (
WeatherData,
WeatherForecast,
)
# List all models for easier access
__all__ = [
# Traffic models
"TrafficData",
"TrafficMeasurementPoint",
"TrafficDataBackgroundJob",
# Weather models
"WeatherData",
"WeatherForecast",
]

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for external service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('EXTERNAL_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,236 +0,0 @@
"""Initial schema for external service
Revision ID: 000001
Revises:
Create Date: 2025-09-30 18:00:00.00000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '000001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create traffic_data table
op.create_table('traffic_data',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('location_id', sa.String(100), nullable=False),
sa.Column('city', sa.String(50), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('traffic_volume', sa.Integer(), nullable=True),
sa.Column('congestion_level', sa.String(20), nullable=True),
sa.Column('average_speed', sa.Float(), nullable=True),
sa.Column('occupation_percentage', sa.Float(), nullable=True),
sa.Column('load_percentage', sa.Float(), nullable=True),
sa.Column('pedestrian_count', sa.Integer(), nullable=True),
sa.Column('measurement_point_id', sa.String(100), nullable=True),
sa.Column('measurement_point_name', sa.String(500), nullable=True),
sa.Column('measurement_point_type', sa.String(50), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('district', sa.String(100), nullable=True),
sa.Column('zone', sa.String(100), nullable=True),
sa.Column('source', sa.String(50), nullable=False),
sa.Column('data_quality_score', sa.Float(), nullable=True),
sa.Column('is_synthetic', sa.Boolean(), nullable=True),
sa.Column('has_pedestrian_inference', sa.Boolean(), nullable=True),
sa.Column('city_specific_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('raw_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_traffic_data_tenant_id'), 'traffic_data', ['tenant_id'], unique=False)
op.create_index(op.f('ix_traffic_data_location_id'), 'traffic_data', ['location_id'], unique=False)
op.create_index(op.f('ix_traffic_data_city'), 'traffic_data', ['city'], unique=False)
op.create_index(op.f('ix_traffic_data_date'), 'traffic_data', ['date'], unique=False)
op.create_index('idx_traffic_location_date', 'traffic_data', ['location_id', 'date'], unique=False)
op.create_index('idx_traffic_city_date', 'traffic_data', ['city', 'date'], unique=False)
op.create_index('idx_traffic_tenant_date', 'traffic_data', ['tenant_id', 'date'], unique=False)
op.create_index('idx_traffic_city_location', 'traffic_data', ['city', 'location_id'], unique=False)
op.create_index('idx_traffic_measurement_point', 'traffic_data', ['city', 'measurement_point_id'], unique=False)
op.create_index('idx_traffic_district_date', 'traffic_data', ['city', 'district', 'date'], unique=False)
op.create_index('idx_traffic_training', 'traffic_data', ['tenant_id', 'city', 'date', 'is_synthetic'], unique=False)
op.create_index('idx_traffic_quality', 'traffic_data', ['city', 'data_quality_score', 'date'], unique=False)
# Create traffic_measurement_points table
op.create_table('traffic_measurement_points',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('city', sa.String(50), nullable=False),
sa.Column('measurement_point_id', sa.String(100), nullable=False),
sa.Column('name', sa.String(500), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('latitude', sa.Float(), nullable=False),
sa.Column('longitude', sa.Float(), nullable=False),
sa.Column('district', sa.String(100), nullable=True),
sa.Column('zone', sa.String(100), nullable=True),
sa.Column('road_type', sa.String(50), nullable=True),
sa.Column('measurement_type', sa.String(50), nullable=True),
sa.Column('point_category', sa.String(50), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('installation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_data_received', sa.DateTime(timezone=True), nullable=True),
sa.Column('data_quality_rating', sa.Float(), nullable=True),
sa.Column('city_specific_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_unique_city_point', 'traffic_measurement_points', ['city', 'measurement_point_id'], unique=True)
op.create_index(op.f('ix_traffic_measurement_points_city'), 'traffic_measurement_points', ['city'], unique=False)
op.create_index(op.f('ix_traffic_measurement_points_measurement_point_id'), 'traffic_measurement_points', ['measurement_point_id'], unique=False)
op.create_index('idx_points_city_location', 'traffic_measurement_points', ['city', 'latitude', 'longitude'], unique=False)
op.create_index('idx_points_district', 'traffic_measurement_points', ['city', 'district'], unique=False)
op.create_index('idx_points_road_type', 'traffic_measurement_points', ['city', 'road_type'], unique=False)
op.create_index('idx_points_active', 'traffic_measurement_points', ['city', 'is_active', 'last_data_received'], unique=False)
# Create traffic_background_jobs table
op.create_table('traffic_background_jobs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('job_type', sa.String(50), nullable=False),
sa.Column('city', sa.String(50), nullable=False),
sa.Column('location_pattern', sa.String(200), nullable=True),
sa.Column('scheduled_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('status', sa.String(20), nullable=False),
sa.Column('progress_percentage', sa.Float(), nullable=True),
sa.Column('records_processed', sa.Integer(), nullable=True),
sa.Column('records_stored', sa.Integer(), nullable=True),
sa.Column('data_start_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('data_end_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('success_count', sa.Integer(), nullable=True),
sa.Column('error_count', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('job_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_traffic_background_jobs_city'), 'traffic_background_jobs', ['city'], unique=False)
op.create_index(op.f('ix_traffic_background_jobs_tenant_id'), 'traffic_background_jobs', ['tenant_id'], unique=False)
op.create_index('idx_jobs_city_status', 'traffic_background_jobs', ['city', 'status', 'scheduled_at'], unique=False)
op.create_index('idx_jobs_tenant_status', 'traffic_background_jobs', ['tenant_id', 'status', 'scheduled_at'], unique=False)
op.create_index('idx_jobs_type_city', 'traffic_background_jobs', ['job_type', 'city', 'scheduled_at'], unique=False)
op.create_index('idx_jobs_completed', 'traffic_background_jobs', ['status', 'completed_at'], unique=False)
# Create weather_data table
op.create_table('weather_data',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('location_id', sa.String(100), nullable=False),
sa.Column('city', sa.String(50), nullable=False),
sa.Column('station_name', sa.String(200), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('temperature', sa.Float(), nullable=True),
sa.Column('temperature_min', sa.Float(), nullable=True),
sa.Column('temperature_max', sa.Float(), nullable=True),
sa.Column('feels_like', sa.Float(), nullable=True),
sa.Column('precipitation', sa.Float(), nullable=True),
sa.Column('precipitation_probability', sa.Float(), nullable=True),
sa.Column('humidity', sa.Float(), nullable=True),
sa.Column('wind_speed', sa.Float(), nullable=True),
sa.Column('wind_direction', sa.Float(), nullable=True),
sa.Column('wind_gust', sa.Float(), nullable=True),
sa.Column('pressure', sa.Float(), nullable=True),
sa.Column('visibility', sa.Float(), nullable=True),
sa.Column('uv_index', sa.Float(), nullable=True),
sa.Column('cloud_cover', sa.Float(), nullable=True),
sa.Column('condition', sa.String(100), nullable=True),
sa.Column('description', sa.String(200), nullable=True),
sa.Column('weather_code', sa.String(20), nullable=True),
sa.Column('source', sa.String(50), nullable=False),
sa.Column('data_type', sa.String(20), nullable=False),
sa.Column('is_forecast', sa.Boolean(), nullable=True),
sa.Column('data_quality_score', sa.Float(), nullable=True),
sa.Column('raw_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('processed_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_weather_data_location_id'), 'weather_data', ['location_id'], unique=False)
op.create_index(op.f('ix_weather_data_city'), 'weather_data', ['city'], unique=False)
op.create_index(op.f('ix_weather_data_date'), 'weather_data', ['date'], unique=False)
op.create_index(op.f('ix_weather_data_tenant_id'), 'weather_data', ['tenant_id'], unique=False)
op.create_index('idx_weather_location_date', 'weather_data', ['location_id', 'date'], unique=False)
# Create weather_forecasts table
op.create_table('weather_forecasts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('location_id', sa.String(100), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('generated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('temperature', sa.Float(), nullable=True),
sa.Column('precipitation', sa.Float(), nullable=True),
sa.Column('humidity', sa.Float(), nullable=True),
sa.Column('wind_speed', sa.Float(), nullable=True),
sa.Column('description', sa.String(200), nullable=True),
sa.Column('source', sa.String(50), nullable=False),
sa.Column('raw_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_weather_forecasts_location_id'), 'weather_forecasts', ['location_id'], unique=False)
op.create_index('idx_forecast_location_date', 'weather_forecasts', ['location_id', 'forecast_date'], unique=False)
def downgrade() -> None:
# Drop weather_forecasts table
op.drop_index('idx_forecast_location_date', table_name='weather_forecasts')
op.drop_index(op.f('ix_weather_forecasts_location_id'), table_name='weather_forecasts')
op.drop_table('weather_forecasts')
# Drop weather_data table
op.drop_index('idx_weather_location_date', table_name='weather_data')
op.drop_index(op.f('ix_weather_data_tenant_id'), table_name='weather_data')
op.drop_index(op.f('ix_weather_data_date'), table_name='weather_data')
op.drop_index(op.f('ix_weather_data_city'), table_name='weather_data')
op.drop_index(op.f('ix_weather_data_location_id'), table_name='weather_data')
op.drop_table('weather_data')
# Drop traffic_background_jobs table
op.drop_index('idx_jobs_completed', table_name='traffic_background_jobs')
op.drop_index('idx_jobs_type_city', table_name='traffic_background_jobs')
op.drop_index('idx_jobs_tenant_status', table_name='traffic_background_jobs')
op.drop_index('idx_jobs_city_status', table_name='traffic_background_jobs')
op.drop_index(op.f('ix_traffic_background_jobs_tenant_id'), table_name='traffic_background_jobs')
op.drop_index(op.f('ix_traffic_background_jobs_city'), table_name='traffic_background_jobs')
op.drop_table('traffic_background_jobs')
# Drop traffic_measurement_points table
op.drop_index('idx_points_active', table_name='traffic_measurement_points')
op.drop_index('idx_points_road_type', table_name='traffic_measurement_points')
op.drop_index('idx_points_district', table_name='traffic_measurement_points')
op.drop_index('idx_points_city_location', table_name='traffic_measurement_points')
op.drop_index('idx_unique_city_point', table_name='traffic_measurement_points')
op.drop_index(op.f('ix_traffic_measurement_points_measurement_point_id'), table_name='traffic_measurement_points')
op.drop_index(op.f('ix_traffic_measurement_points_city'), table_name='traffic_measurement_points')
op.drop_table('traffic_measurement_points')
# Drop traffic_data table
op.drop_index('idx_traffic_quality', table_name='traffic_data')
op.drop_index('idx_traffic_training', table_name='traffic_data')
op.drop_index('idx_traffic_district_date', table_name='traffic_data')
op.drop_index('idx_traffic_measurement_point', table_name='traffic_data')
op.drop_index('idx_traffic_city_location', table_name='traffic_data')
op.drop_index('idx_traffic_tenant_date', table_name='traffic_data')
op.drop_index('idx_traffic_city_date', table_name='traffic_data')
op.drop_index('idx_traffic_location_date', table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_date'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_city'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_location_id'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_tenant_id'), table_name='traffic_data')
op.drop_table('traffic_data')

View File

@@ -0,0 +1,224 @@
"""initial_schema_20251001_1119
Revision ID: 374752db316e
Revises:
Create Date: 2025-10-01 11:19:50.472480+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '374752db316e'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('traffic_background_jobs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('job_type', sa.String(length=50), nullable=False),
sa.Column('city', sa.String(length=50), nullable=False),
sa.Column('location_pattern', sa.String(length=200), nullable=True),
sa.Column('scheduled_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('progress_percentage', sa.Float(), nullable=True),
sa.Column('records_processed', sa.Integer(), nullable=True),
sa.Column('records_stored', sa.Integer(), nullable=True),
sa.Column('data_start_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('data_end_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('success_count', sa.Integer(), nullable=True),
sa.Column('error_count', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('job_metadata', sa.JSON(), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_jobs_city_status', 'traffic_background_jobs', ['city', 'status', 'scheduled_at'], unique=False)
op.create_index('idx_jobs_completed', 'traffic_background_jobs', ['status', 'completed_at'], unique=False)
op.create_index('idx_jobs_tenant_status', 'traffic_background_jobs', ['tenant_id', 'status', 'scheduled_at'], unique=False)
op.create_index('idx_jobs_type_city', 'traffic_background_jobs', ['job_type', 'city', 'scheduled_at'], unique=False)
op.create_index(op.f('ix_traffic_background_jobs_city'), 'traffic_background_jobs', ['city'], unique=False)
op.create_index(op.f('ix_traffic_background_jobs_tenant_id'), 'traffic_background_jobs', ['tenant_id'], unique=False)
op.create_table('traffic_data',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('location_id', sa.String(length=100), nullable=False),
sa.Column('city', sa.String(length=50), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('traffic_volume', sa.Integer(), nullable=True),
sa.Column('congestion_level', sa.String(length=20), nullable=True),
sa.Column('average_speed', sa.Float(), nullable=True),
sa.Column('occupation_percentage', sa.Float(), nullable=True),
sa.Column('load_percentage', sa.Float(), nullable=True),
sa.Column('pedestrian_count', sa.Integer(), nullable=True),
sa.Column('measurement_point_id', sa.String(length=100), nullable=True),
sa.Column('measurement_point_name', sa.String(length=500), nullable=True),
sa.Column('measurement_point_type', sa.String(length=50), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('district', sa.String(length=100), nullable=True),
sa.Column('zone', sa.String(length=100), nullable=True),
sa.Column('source', sa.String(length=50), nullable=False),
sa.Column('data_quality_score', sa.Float(), nullable=True),
sa.Column('is_synthetic', sa.Boolean(), nullable=True),
sa.Column('has_pedestrian_inference', sa.Boolean(), nullable=True),
sa.Column('city_specific_data', sa.JSON(), nullable=True),
sa.Column('raw_data', sa.Text(), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_traffic_city_date', 'traffic_data', ['city', 'date'], unique=False)
op.create_index('idx_traffic_city_location', 'traffic_data', ['city', 'location_id'], unique=False)
op.create_index('idx_traffic_district_date', 'traffic_data', ['city', 'district', 'date'], unique=False)
op.create_index('idx_traffic_location_date', 'traffic_data', ['location_id', 'date'], unique=False)
op.create_index('idx_traffic_measurement_point', 'traffic_data', ['city', 'measurement_point_id'], unique=False)
op.create_index('idx_traffic_quality', 'traffic_data', ['city', 'data_quality_score', 'date'], unique=False)
op.create_index('idx_traffic_tenant_date', 'traffic_data', ['tenant_id', 'date'], unique=False)
op.create_index('idx_traffic_training', 'traffic_data', ['tenant_id', 'city', 'date', 'is_synthetic'], unique=False)
op.create_index(op.f('ix_traffic_data_city'), 'traffic_data', ['city'], unique=False)
op.create_index(op.f('ix_traffic_data_date'), 'traffic_data', ['date'], unique=False)
op.create_index(op.f('ix_traffic_data_location_id'), 'traffic_data', ['location_id'], unique=False)
op.create_index(op.f('ix_traffic_data_measurement_point_id'), 'traffic_data', ['measurement_point_id'], unique=False)
op.create_index(op.f('ix_traffic_data_tenant_id'), 'traffic_data', ['tenant_id'], unique=False)
op.create_table('traffic_measurement_points',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('city', sa.String(length=50), nullable=False),
sa.Column('measurement_point_id', sa.String(length=100), nullable=False),
sa.Column('name', sa.String(length=500), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('latitude', sa.Float(), nullable=False),
sa.Column('longitude', sa.Float(), nullable=False),
sa.Column('district', sa.String(length=100), nullable=True),
sa.Column('zone', sa.String(length=100), nullable=True),
sa.Column('road_type', sa.String(length=50), nullable=True),
sa.Column('measurement_type', sa.String(length=50), nullable=True),
sa.Column('point_category', sa.String(length=50), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('installation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_data_received', sa.DateTime(timezone=True), nullable=True),
sa.Column('data_quality_rating', sa.Float(), nullable=True),
sa.Column('city_specific_metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_points_active', 'traffic_measurement_points', ['city', 'is_active', 'last_data_received'], unique=False)
op.create_index('idx_points_city_location', 'traffic_measurement_points', ['city', 'latitude', 'longitude'], unique=False)
op.create_index('idx_points_district', 'traffic_measurement_points', ['city', 'district'], unique=False)
op.create_index('idx_points_road_type', 'traffic_measurement_points', ['city', 'road_type'], unique=False)
op.create_index('idx_unique_city_point', 'traffic_measurement_points', ['city', 'measurement_point_id'], unique=True)
op.create_index(op.f('ix_traffic_measurement_points_city'), 'traffic_measurement_points', ['city'], unique=False)
op.create_index(op.f('ix_traffic_measurement_points_measurement_point_id'), 'traffic_measurement_points', ['measurement_point_id'], unique=False)
op.create_table('weather_data',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('location_id', sa.String(length=100), nullable=False),
sa.Column('city', sa.String(length=50), nullable=False),
sa.Column('station_name', sa.String(length=200), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('temperature', sa.Float(), nullable=True),
sa.Column('temperature_min', sa.Float(), nullable=True),
sa.Column('temperature_max', sa.Float(), nullable=True),
sa.Column('feels_like', sa.Float(), nullable=True),
sa.Column('precipitation', sa.Float(), nullable=True),
sa.Column('precipitation_probability', sa.Float(), nullable=True),
sa.Column('humidity', sa.Float(), nullable=True),
sa.Column('wind_speed', sa.Float(), nullable=True),
sa.Column('wind_direction', sa.Float(), nullable=True),
sa.Column('wind_gust', sa.Float(), nullable=True),
sa.Column('pressure', sa.Float(), nullable=True),
sa.Column('visibility', sa.Float(), nullable=True),
sa.Column('uv_index', sa.Float(), nullable=True),
sa.Column('cloud_cover', sa.Float(), nullable=True),
sa.Column('condition', sa.String(length=100), nullable=True),
sa.Column('description', sa.String(length=200), nullable=True),
sa.Column('weather_code', sa.String(length=20), nullable=True),
sa.Column('source', sa.String(length=50), nullable=False),
sa.Column('data_type', sa.String(length=20), nullable=False),
sa.Column('is_forecast', sa.Boolean(), nullable=True),
sa.Column('data_quality_score', sa.Float(), nullable=True),
sa.Column('raw_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('processed_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_weather_location_date', 'weather_data', ['location_id', 'date'], unique=False)
op.create_index(op.f('ix_weather_data_date'), 'weather_data', ['date'], unique=False)
op.create_index(op.f('ix_weather_data_location_id'), 'weather_data', ['location_id'], unique=False)
op.create_index(op.f('ix_weather_data_tenant_id'), 'weather_data', ['tenant_id'], unique=False)
op.create_table('weather_forecasts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('location_id', sa.String(length=100), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('generated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('temperature', sa.Float(), nullable=True),
sa.Column('precipitation', sa.Float(), nullable=True),
sa.Column('humidity', sa.Float(), nullable=True),
sa.Column('wind_speed', sa.Float(), nullable=True),
sa.Column('description', sa.String(length=200), nullable=True),
sa.Column('source', sa.String(length=50), nullable=False),
sa.Column('raw_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_forecast_location_date', 'weather_forecasts', ['location_id', 'forecast_date'], unique=False)
op.create_index(op.f('ix_weather_forecasts_location_id'), 'weather_forecasts', ['location_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_weather_forecasts_location_id'), table_name='weather_forecasts')
op.drop_index('idx_forecast_location_date', table_name='weather_forecasts')
op.drop_table('weather_forecasts')
op.drop_index(op.f('ix_weather_data_tenant_id'), table_name='weather_data')
op.drop_index(op.f('ix_weather_data_location_id'), table_name='weather_data')
op.drop_index(op.f('ix_weather_data_date'), table_name='weather_data')
op.drop_index('idx_weather_location_date', table_name='weather_data')
op.drop_table('weather_data')
op.drop_index(op.f('ix_traffic_measurement_points_measurement_point_id'), table_name='traffic_measurement_points')
op.drop_index(op.f('ix_traffic_measurement_points_city'), table_name='traffic_measurement_points')
op.drop_index('idx_unique_city_point', table_name='traffic_measurement_points')
op.drop_index('idx_points_road_type', table_name='traffic_measurement_points')
op.drop_index('idx_points_district', table_name='traffic_measurement_points')
op.drop_index('idx_points_city_location', table_name='traffic_measurement_points')
op.drop_index('idx_points_active', table_name='traffic_measurement_points')
op.drop_table('traffic_measurement_points')
op.drop_index(op.f('ix_traffic_data_tenant_id'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_measurement_point_id'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_location_id'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_date'), table_name='traffic_data')
op.drop_index(op.f('ix_traffic_data_city'), table_name='traffic_data')
op.drop_index('idx_traffic_training', table_name='traffic_data')
op.drop_index('idx_traffic_tenant_date', table_name='traffic_data')
op.drop_index('idx_traffic_quality', table_name='traffic_data')
op.drop_index('idx_traffic_measurement_point', table_name='traffic_data')
op.drop_index('idx_traffic_location_date', table_name='traffic_data')
op.drop_index('idx_traffic_district_date', table_name='traffic_data')
op.drop_index('idx_traffic_city_location', table_name='traffic_data')
op.drop_index('idx_traffic_city_date', table_name='traffic_data')
op.drop_table('traffic_data')
op.drop_index(op.f('ix_traffic_background_jobs_tenant_id'), table_name='traffic_background_jobs')
op.drop_index(op.f('ix_traffic_background_jobs_city'), table_name='traffic_background_jobs')
op.drop_index('idx_jobs_type_city', table_name='traffic_background_jobs')
op.drop_index('idx_jobs_tenant_status', table_name='traffic_background_jobs')
op.drop_index('idx_jobs_completed', table_name='traffic_background_jobs')
op.drop_index('idx_jobs_city_status', table_name='traffic_background_jobs')
op.drop_table('traffic_background_jobs')
# ### end Alembic commands ###

View File

@@ -5,12 +5,13 @@ Import all models to ensure they are registered with SQLAlchemy Base.
"""
# Import all models to register them with the Base metadata
from .forecasts import Forecast
from .forecasts import Forecast, PredictionBatch
from .predictions import ModelPerformanceMetric, PredictionCache
# List all models for easier access
__all__ = [
"Forecast",
"PredictionBatch",
"ModelPerformanceMetric",
"PredictionCache",
]

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for forecasting service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('FORECASTING_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,160 +0,0 @@
"""Initial schema for forecasting service
Revision ID: 000001
Revises:
Create Date: 2025-09-30 18:00:00.00000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '0001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('forecasts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(255), nullable=False),
sa.Column('location', sa.String(255), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('predicted_demand', sa.Float, nullable=False),
sa.Column('confidence_lower', sa.Float, nullable=False),
sa.Column('confidence_upper', sa.Float, nullable=False),
sa.Column('confidence_level', sa.Float, nullable=True),
sa.Column('model_id', sa.String(255), nullable=False),
sa.Column('model_version', sa.String(50), nullable=False),
sa.Column('algorithm', sa.String(50), nullable=True),
sa.Column('business_type', sa.String(50), nullable=True),
sa.Column('day_of_week', sa.Integer, nullable=False),
sa.Column('is_holiday', sa.Boolean, nullable=True),
sa.Column('is_weekend', sa.Boolean, nullable=True),
sa.Column('weather_temperature', sa.Float, nullable=True),
sa.Column('weather_precipitation', sa.Float, nullable=True),
sa.Column('weather_description', sa.String(100), nullable=True),
sa.Column('traffic_volume', sa.Integer, nullable=True),
sa.Column('processing_time_ms', sa.Integer, nullable=True),
sa.Column('features_used', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_forecasts_tenant_id'), 'forecasts', ['tenant_id'], unique=False)
op.create_index(op.f('ix_forecasts_inventory_product_id'), 'forecasts', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_forecasts_product_name'), 'forecasts', ['product_name'], unique=False)
op.create_index(op.f('ix_forecasts_location'), 'forecasts', ['location'], unique=False)
op.create_index(op.f('ix_forecasts_forecast_date'), 'forecasts', ['forecast_date'], unique=False)
op.create_table('predictions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('model_id', sa.String(100), nullable=False),
sa.Column('input_data', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('prediction_value', sa.Float(), nullable=False),
sa.Column('prediction_confidence', sa.Float(), nullable=True),
sa.Column('prediction_date', sa.DateTime(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_predictions_tenant_id'), 'predictions', ['tenant_id'], unique=False)
op.create_index(op.f('ix_predictions_model_id'), 'predictions', ['model_id'], unique=False)
op.create_index(op.f('ix_predictions_prediction_date'), 'predictions', ['prediction_date'], unique=False)
# Create prediction_batches table
op.create_table('prediction_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_name', sa.String(255), nullable=False),
sa.Column('requested_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('total_products', sa.Integer, nullable=True),
sa.Column('completed_products', sa.Integer, nullable=True),
sa.Column('failed_products', sa.Integer, nullable=True),
sa.Column('forecast_days', sa.Integer, nullable=True),
sa.Column('business_type', sa.String(50), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('processing_time_ms', sa.Integer, nullable=True),
sa.Column('cancelled_by', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_prediction_batches_tenant_id'), 'prediction_batches', ['tenant_id'], unique=False)
op.create_index(op.f('ix_prediction_batches_status'), 'prediction_batches', ['status'], unique=False)
op.create_index(op.f('ix_prediction_batches_requested_at'), 'prediction_batches', ['requested_at'], unique=False)
# Create model_performance_metrics table
op.create_table('model_performance_metrics',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('model_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('mae', sa.Float, nullable=True),
sa.Column('mape', sa.Float, nullable=True),
sa.Column('rmse', sa.Float, nullable=True),
sa.Column('accuracy_score', sa.Float, nullable=True),
sa.Column('evaluation_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('evaluation_period_start', sa.DateTime(timezone=True), nullable=True),
sa.Column('evaluation_period_end', sa.DateTime(timezone=True), nullable=True),
sa.Column('sample_size', sa.Integer, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_model_performance_metrics_model_id'), 'model_performance_metrics', ['model_id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_tenant_id'), 'model_performance_metrics', ['tenant_id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_inventory_product_id'), 'model_performance_metrics', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_evaluation_date'), 'model_performance_metrics', ['evaluation_date'], unique=False)
# Create prediction_cache table
op.create_table('prediction_cache',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('cache_key', sa.String(255), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('location', sa.String(255), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('predicted_demand', sa.Float, nullable=False),
sa.Column('confidence_lower', sa.Float, nullable=False),
sa.Column('confidence_upper', sa.Float, nullable=False),
sa.Column('model_id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('hit_count', sa.Integer, nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('cache_key')
)
op.create_index(op.f('ix_prediction_cache_cache_key'), 'prediction_cache', ['cache_key'], unique=False)
op.create_index(op.f('ix_prediction_cache_tenant_id'), 'prediction_cache', ['tenant_id'], unique=False)
op.create_index(op.f('ix_prediction_cache_inventory_product_id'), 'prediction_cache', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_prediction_cache_forecast_date'), 'prediction_cache', ['forecast_date'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_prediction_cache_forecast_date'), table_name='prediction_cache')
op.drop_index(op.f('ix_prediction_cache_inventory_product_id'), table_name='prediction_cache')
op.drop_index(op.f('ix_prediction_cache_tenant_id'), table_name='prediction_cache')
op.drop_index(op.f('ix_prediction_cache_cache_key'), table_name='prediction_cache')
op.drop_table('prediction_cache')
op.drop_index(op.f('ix_model_performance_metrics_evaluation_date'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_inventory_product_id'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_tenant_id'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_model_id'), table_name='model_performance_metrics')
op.drop_table('model_performance_metrics')
op.drop_index(op.f('ix_prediction_batches_requested_at'), table_name='prediction_batches')
op.drop_index(op.f('ix_prediction_batches_status'), table_name='prediction_batches')
op.drop_index(op.f('ix_prediction_batches_tenant_id'), table_name='prediction_batches')
op.drop_table('prediction_batches')
op.drop_index(op.f('ix_predictions_prediction_date'), table_name='predictions')
op.drop_index(op.f('ix_predictions_model_id'), table_name='predictions')
op.drop_index(op.f('ix_predictions_tenant_id'), table_name='predictions')
op.drop_table('predictions')
op.drop_index(op.f('ix_forecasts_forecast_date'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_location'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_product_name'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_inventory_product_id'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_tenant_id'), table_name='forecasts')
op.drop_table('forecasts')

View File

@@ -0,0 +1,128 @@
"""initial_schema_20251001_1119
Revision ID: 186b79e00320
Revises:
Create Date: 2025-10-01 11:19:42.511241+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '186b79e00320'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('forecasts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('location', sa.String(length=255), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('predicted_demand', sa.Float(), nullable=False),
sa.Column('confidence_lower', sa.Float(), nullable=False),
sa.Column('confidence_upper', sa.Float(), nullable=False),
sa.Column('confidence_level', sa.Float(), nullable=True),
sa.Column('model_id', sa.String(length=255), nullable=False),
sa.Column('model_version', sa.String(length=50), nullable=False),
sa.Column('algorithm', sa.String(length=50), nullable=True),
sa.Column('business_type', sa.String(length=50), nullable=True),
sa.Column('day_of_week', sa.Integer(), nullable=False),
sa.Column('is_holiday', sa.Boolean(), nullable=True),
sa.Column('is_weekend', sa.Boolean(), nullable=True),
sa.Column('weather_temperature', sa.Float(), nullable=True),
sa.Column('weather_precipitation', sa.Float(), nullable=True),
sa.Column('weather_description', sa.String(length=100), nullable=True),
sa.Column('traffic_volume', sa.Integer(), nullable=True),
sa.Column('processing_time_ms', sa.Integer(), nullable=True),
sa.Column('features_used', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_forecasts_forecast_date'), 'forecasts', ['forecast_date'], unique=False)
op.create_index(op.f('ix_forecasts_inventory_product_id'), 'forecasts', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_forecasts_location'), 'forecasts', ['location'], unique=False)
op.create_index(op.f('ix_forecasts_product_name'), 'forecasts', ['product_name'], unique=False)
op.create_index(op.f('ix_forecasts_tenant_id'), 'forecasts', ['tenant_id'], unique=False)
op.create_table('model_performance_metrics',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('model_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('mae', sa.Float(), nullable=True),
sa.Column('mape', sa.Float(), nullable=True),
sa.Column('rmse', sa.Float(), nullable=True),
sa.Column('accuracy_score', sa.Float(), nullable=True),
sa.Column('evaluation_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('evaluation_period_start', sa.DateTime(timezone=True), nullable=True),
sa.Column('evaluation_period_end', sa.DateTime(timezone=True), nullable=True),
sa.Column('sample_size', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_model_performance_metrics_model_id'), 'model_performance_metrics', ['model_id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_tenant_id'), 'model_performance_metrics', ['tenant_id'], unique=False)
op.create_table('prediction_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_name', sa.String(length=255), nullable=False),
sa.Column('requested_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('total_products', sa.Integer(), nullable=True),
sa.Column('completed_products', sa.Integer(), nullable=True),
sa.Column('failed_products', sa.Integer(), nullable=True),
sa.Column('forecast_days', sa.Integer(), nullable=True),
sa.Column('business_type', sa.String(length=50), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('processing_time_ms', sa.Integer(), nullable=True),
sa.Column('cancelled_by', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_prediction_batches_tenant_id'), 'prediction_batches', ['tenant_id'], unique=False)
op.create_table('prediction_cache',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('cache_key', sa.String(length=255), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('location', sa.String(length=255), nullable=False),
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('predicted_demand', sa.Float(), nullable=False),
sa.Column('confidence_lower', sa.Float(), nullable=False),
sa.Column('confidence_upper', sa.Float(), nullable=False),
sa.Column('model_id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('hit_count', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_prediction_cache_cache_key'), 'prediction_cache', ['cache_key'], unique=True)
op.create_index(op.f('ix_prediction_cache_tenant_id'), 'prediction_cache', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_prediction_cache_tenant_id'), table_name='prediction_cache')
op.drop_index(op.f('ix_prediction_cache_cache_key'), table_name='prediction_cache')
op.drop_table('prediction_cache')
op.drop_index(op.f('ix_prediction_batches_tenant_id'), table_name='prediction_batches')
op.drop_table('prediction_batches')
op.drop_index(op.f('ix_model_performance_metrics_tenant_id'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_model_id'), table_name='model_performance_metrics')
op.drop_table('model_performance_metrics')
op.drop_index(op.f('ix_forecasts_tenant_id'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_product_name'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_location'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_inventory_product_id'), table_name='forecasts')
op.drop_index(op.f('ix_forecasts_forecast_date'), table_name='forecasts')
op.drop_table('forecasts')
# ### end Alembic commands ###

View File

@@ -0,0 +1,54 @@
"""
Inventory Service Models Package
Import all models to ensure they are registered with SQLAlchemy Base.
"""
# Import all models to register them with the Base metadata
from .inventory import (
Ingredient,
Stock,
StockMovement,
ProductTransformation,
StockAlert,
UnitOfMeasure,
IngredientCategory,
ProductCategory,
ProductType,
ProductionStage,
StockMovementType,
)
from .food_safety import (
FoodSafetyCompliance,
TemperatureLog,
FoodSafetyAlert,
FoodSafetyStandard,
ComplianceStatus,
FoodSafetyAlertType,
)
# List all models for easier access
__all__ = [
# Inventory models
"Ingredient",
"Stock",
"StockMovement",
"ProductTransformation",
"StockAlert",
# Inventory enums
"UnitOfMeasure",
"IngredientCategory",
"ProductCategory",
"ProductType",
"ProductionStage",
"StockMovementType",
# Food safety models
"FoodSafetyCompliance",
"TemperatureLog",
"FoodSafetyAlert",
# Food safety enums
"FoodSafetyStandard",
"ComplianceStatus",
"FoodSafetyAlertType",
]

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for inventory service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('INVENTORY_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,492 +0,0 @@
"""Initial schema for inventory service
Revision ID: 0000001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '0001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create ingredients table
op.create_table('ingredients',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('sku', sa.String(100), nullable=True),
sa.Column('barcode', sa.String(50), nullable=True),
sa.Column('product_type', sa.Enum('INGREDIENT', 'FINISHED_PRODUCT', name='producttype'), nullable=False),
sa.Column('ingredient_category', sa.Enum('FLOUR', 'YEAST', 'DAIRY', 'EGGS', 'SUGAR', 'FATS', 'SALT', 'SPICES', 'ADDITIVES', 'PACKAGING', 'CLEANING', 'OTHER', name='ingredientcategory'), nullable=True),
sa.Column('product_category', sa.Enum('BREAD', 'CROISSANTS', 'PASTRIES', 'CAKES', 'COOKIES', 'MUFFINS', 'SANDWICHES', 'SEASONAL', 'BEVERAGES', 'OTHER_PRODUCTS', name='productcategory'), nullable=True),
sa.Column('subcategory', sa.String(100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('brand', sa.String(100), nullable=True),
sa.Column('unit_of_measure', sa.Enum('KILOGRAMS', 'GRAMS', 'LITERS', 'MILLILITERS', 'UNITS', 'PIECES', 'PACKAGES', 'BAGS', 'BOXES', name='unitofmeasure'), nullable=False),
sa.Column('package_size', sa.Float(), nullable=True),
sa.Column('average_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('last_purchase_price', sa.Numeric(10, 2), nullable=True),
sa.Column('standard_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('low_stock_threshold', sa.Float(), nullable=False),
sa.Column('reorder_point', sa.Float(), nullable=False),
sa.Column('reorder_quantity', sa.Float(), nullable=False),
sa.Column('max_stock_level', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('display_life_hours', sa.Integer(), nullable=True),
sa.Column('best_before_hours', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('central_baker_product_code', sa.String(100), nullable=True),
sa.Column('delivery_days', sa.String(20), nullable=True),
sa.Column('minimum_order_quantity', sa.Float(), nullable=True),
sa.Column('pack_size', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_perishable', sa.Boolean(), nullable=True),
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_ingredients_tenant_id'), 'ingredients', ['tenant_id'], unique=False)
op.create_index(op.f('ix_ingredients_name'), 'ingredients', ['name'], unique=False)
op.create_index(op.f('ix_ingredients_sku'), 'ingredients', ['sku'], unique=False)
op.create_index(op.f('ix_ingredients_barcode'), 'ingredients', ['barcode'], unique=False)
op.create_index(op.f('ix_ingredients_product_type'), 'ingredients', ['product_type'], unique=False)
op.create_index(op.f('ix_ingredients_ingredient_category'), 'ingredients', ['ingredient_category'], unique=False)
op.create_index(op.f('ix_ingredients_product_category'), 'ingredients', ['product_category'], unique=False)
op.create_index('idx_ingredients_tenant_name', 'ingredients', ['tenant_id', 'name'], unique=True)
op.create_index('idx_ingredients_tenant_sku', 'ingredients', ['tenant_id', 'sku'], unique=False)
op.create_index('idx_ingredients_barcode', 'ingredients', ['barcode'], unique=False)
op.create_index('idx_ingredients_product_type', 'ingredients', ['tenant_id', 'product_type'], unique=False)
op.create_index('idx_ingredients_ingredient_category', 'ingredients', ['tenant_id', 'ingredient_category'], unique=False)
op.create_index('idx_ingredients_product_category', 'ingredients', ['tenant_id', 'product_category'], unique=False)
op.create_index('idx_ingredients_stock_levels', 'ingredients', ['tenant_id', 'low_stock_threshold', 'reorder_point'], unique=False)
# Create stock table
op.create_table('stock',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=True),
sa.Column('batch_number', sa.String(100), nullable=True),
sa.Column('lot_number', sa.String(100), nullable=True),
sa.Column('supplier_batch_ref', sa.String(100), nullable=True),
sa.Column('production_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('transformation_reference', sa.String(100), nullable=True),
sa.Column('current_quantity', sa.Float(), nullable=False),
sa.Column('reserved_quantity', sa.Float(), nullable=False),
sa.Column('available_quantity', sa.Float(), nullable=False),
sa.Column('received_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('best_before_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('original_expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('final_expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('storage_location', sa.String(100), nullable=True),
sa.Column('warehouse_zone', sa.String(50), nullable=True),
sa.Column('shelf_position', sa.String(50), nullable=True),
sa.Column('requires_refrigeration', sa.Boolean(), nullable=True),
sa.Column('requires_freezing', sa.Boolean(), nullable=True),
sa.Column('storage_temperature_min', sa.Float(), nullable=True),
sa.Column('storage_temperature_max', sa.Float(), nullable=True),
sa.Column('storage_humidity_max', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_expired', sa.Boolean(), nullable=True),
sa.Column('quality_status', sa.String(20), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_stock_tenant_id'), 'stock', ['tenant_id'], unique=False)
op.create_index(op.f('ix_stock_ingredient_id'), 'stock', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_supplier_id'), 'stock', ['supplier_id'], unique=False)
op.create_index(op.f('ix_stock_batch_number'), 'stock', ['batch_number'], unique=False)
op.create_index(op.f('ix_stock_lot_number'), 'stock', ['lot_number'], unique=False)
op.create_index(op.f('ix_stock_transformation_reference'), 'stock', ['transformation_reference'], unique=False)
op.create_index(op.f('ix_stock_expiration_date'), 'stock', ['expiration_date'], unique=False)
op.create_index(op.f('ix_stock_is_expired'), 'stock', ['is_expired'], unique=False)
op.create_index('idx_stock_tenant_ingredient', 'stock', ['tenant_id', 'ingredient_id'], unique=False)
op.create_index('idx_stock_expiration', 'stock', ['tenant_id', 'expiration_date', 'is_available'], unique=False)
op.create_index('idx_stock_batch', 'stock', ['tenant_id', 'batch_number'], unique=False)
op.create_index('idx_stock_low_levels', 'stock', ['tenant_id', 'current_quantity', 'is_available'], unique=False)
op.create_index('idx_stock_quality', 'stock', ['tenant_id', 'quality_status', 'is_available'], unique=False)
op.create_index('idx_stock_production_stage', 'stock', ['tenant_id', 'production_stage', 'is_available'], unique=False)
op.create_index('idx_stock_transformation', 'stock', ['tenant_id', 'transformation_reference'], unique=False)
op.create_index('idx_stock_final_expiration', 'stock', ['tenant_id', 'final_expiration_date', 'is_available'], unique=False)
# Create stock_movements table
op.create_table('stock_movements',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('movement_type', sa.Enum('PURCHASE', 'PRODUCTION_USE', 'ADJUSTMENT', 'WASTE', 'TRANSFER', 'RETURN', 'INITIAL_STOCK', name='stockmovementtype'), nullable=False),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('quantity_before', sa.Float(), nullable=True),
sa.Column('quantity_after', sa.Float(), nullable=True),
sa.Column('reference_number', sa.String(100), nullable=True),
sa.Column('supplier_id', sa.UUID(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('reason_code', sa.String(50), nullable=True),
sa.Column('movement_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_stock_movements_tenant_id'), 'stock_movements', ['tenant_id'], unique=False)
op.create_index(op.f('ix_stock_movements_ingredient_id'), 'stock_movements', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_movements_stock_id'), 'stock_movements', ['stock_id'], unique=False)
op.create_index(op.f('ix_stock_movements_movement_type'), 'stock_movements', ['movement_type'], unique=False)
op.create_index(op.f('ix_stock_movements_reference_number'), 'stock_movements', ['reference_number'], unique=False)
op.create_index(op.f('ix_stock_movements_supplier_id'), 'stock_movements', ['supplier_id'], unique=False)
op.create_index(op.f('ix_stock_movements_movement_date'), 'stock_movements', ['movement_date'], unique=False)
op.create_index('idx_movements_tenant_date', 'stock_movements', ['tenant_id', 'movement_date'], unique=False)
op.create_index('idx_movements_tenant_ingredient', 'stock_movements', ['tenant_id', 'ingredient_id', 'movement_date'], unique=False)
op.create_index('idx_movements_type', 'stock_movements', ['tenant_id', 'movement_type', 'movement_date'], unique=False)
op.create_index('idx_movements_reference', 'stock_movements', ['reference_number'], unique=False)
op.create_index('idx_movements_supplier', 'stock_movements', ['supplier_id', 'movement_date'], unique=False)
# Create product_transformations table
op.create_table('product_transformations',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('transformation_reference', sa.String(100), nullable=False),
sa.Column('source_ingredient_id', sa.UUID(), nullable=False),
sa.Column('target_ingredient_id', sa.UUID(), nullable=False),
sa.Column('source_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('target_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('source_quantity', sa.Float(), nullable=False),
sa.Column('target_quantity', sa.Float(), nullable=False),
sa.Column('conversion_ratio', sa.Float(), nullable=False),
sa.Column('expiration_calculation_method', sa.String(50), nullable=False),
sa.Column('expiration_days_offset', sa.Integer(), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('process_notes', sa.Text(), nullable=True),
sa.Column('performed_by', sa.UUID(), nullable=True),
sa.Column('source_batch_numbers', sa.Text(), nullable=True),
sa.Column('target_batch_number', sa.String(100), nullable=True),
sa.Column('is_completed', sa.Boolean(), nullable=True),
sa.Column('is_reversed', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['source_ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['target_ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_product_transformations_tenant_id'), 'product_transformations', ['tenant_id'], unique=False)
op.create_index(op.f('ix_product_transformations_transformation_reference'), 'product_transformations', ['transformation_reference'], unique=False)
op.create_index(op.f('ix_product_transformations_source_ingredient_id'), 'product_transformations', ['source_ingredient_id'], unique=False)
op.create_index(op.f('ix_product_transformations_target_ingredient_id'), 'product_transformations', ['target_ingredient_id'], unique=False)
op.create_index('idx_transformations_tenant_date', 'product_transformations', ['tenant_id', 'transformation_date'], unique=False)
op.create_index('idx_transformations_reference', 'product_transformations', ['transformation_reference'], unique=False)
op.create_index('idx_transformations_source', 'product_transformations', ['tenant_id', 'source_ingredient_id'], unique=False)
op.create_index('idx_transformations_target', 'product_transformations', ['tenant_id', 'target_ingredient_id'], unique=False)
op.create_index('idx_transformations_stages', 'product_transformations', ['source_stage', 'target_stage'], unique=False)
# Create stock_alerts table
op.create_table('stock_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('alert_type', sa.String(50), nullable=False),
sa.Column('severity', sa.String(20), nullable=False),
sa.Column('title', sa.String(255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('current_quantity', sa.Float(), nullable=True),
sa.Column('threshold_value', sa.Float(), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_acknowledged', sa.Boolean(), nullable=True),
sa.Column('acknowledged_by', sa.UUID(), nullable=True),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_resolved', sa.Boolean(), nullable=True),
sa.Column('resolved_by', sa.UUID(), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_stock_alerts_tenant_id'), 'stock_alerts', ['tenant_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_ingredient_id'), 'stock_alerts', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_stock_id'), 'stock_alerts', ['stock_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_alert_type'), 'stock_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_stock_alerts_is_resolved'), 'stock_alerts', ['is_resolved'], unique=False)
op.create_index(op.f('ix_stock_alerts_is_active'), 'stock_alerts', ['is_active'], unique=False)
op.create_index('idx_alerts_tenant_active', 'stock_alerts', ['tenant_id', 'is_active', 'created_at'], unique=False)
op.create_index('idx_alerts_type_severity', 'stock_alerts', ['alert_type', 'severity', 'is_active'], unique=False)
op.create_index('idx_alerts_ingredient', 'stock_alerts', ['ingredient_id', 'is_active'], unique=False)
op.create_index('idx_alerts_unresolved', 'stock_alerts', ['tenant_id', 'is_resolved', 'is_active'], unique=False)
# Create food_safety_compliance table
op.create_table('food_safety_compliance',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('standard', sa.Enum('HACCP', 'FDA', 'USDA', 'FSMA', 'SQF', 'BRC', 'IFS', 'ISO22000', 'ORGANIC', 'NON_GMO', 'ALLERGEN_FREE', 'KOSHER', 'HALAL', name='foodsafetystandard'), nullable=False),
sa.Column('compliance_status', sa.Enum('COMPLIANT', 'NON_COMPLIANT', 'PENDING_REVIEW', 'EXPIRED', 'WARNING', name='compliancestatus'), nullable=False),
sa.Column('certification_number', sa.String(100), nullable=True),
sa.Column('certifying_body', sa.String(200), nullable=True),
sa.Column('certification_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('compliance_notes', sa.Text(), nullable=True),
sa.Column('documentation_url', sa.String(500), nullable=True),
sa.Column('last_audit_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_audit_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('auditor_name', sa.String(200), nullable=True),
sa.Column('audit_score', sa.Float(), nullable=True),
sa.Column('risk_level', sa.String(20), nullable=False),
sa.Column('risk_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('mitigation_measures', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('requires_monitoring', sa.Boolean(), nullable=False),
sa.Column('monitoring_frequency_days', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_safety_compliance_tenant_id'), 'food_safety_compliance', ['tenant_id'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_ingredient_id'), 'food_safety_compliance', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_standard'), 'food_safety_compliance', ['standard'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_expiration_date'), 'food_safety_compliance', ['expiration_date'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_next_audit_date'), 'food_safety_compliance', ['next_audit_date'], unique=False)
# Create temperature_logs table
op.create_table('temperature_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('storage_location', sa.String(100), nullable=False),
sa.Column('warehouse_zone', sa.String(50), nullable=True),
sa.Column('equipment_id', sa.String(100), nullable=True),
sa.Column('temperature_celsius', sa.Float(), nullable=False),
sa.Column('humidity_percentage', sa.Float(), nullable=True),
sa.Column('target_temperature_min', sa.Float(), nullable=True),
sa.Column('target_temperature_max', sa.Float(), nullable=True),
sa.Column('is_within_range', sa.Boolean(), nullable=False),
sa.Column('alert_triggered', sa.Boolean(), nullable=False),
sa.Column('deviation_minutes', sa.Integer(), nullable=True),
sa.Column('measurement_method', sa.String(50), nullable=False),
sa.Column('device_id', sa.String(100), nullable=True),
sa.Column('calibration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('recorded_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('recorded_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_temperature_logs_tenant_id'), 'temperature_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_temperature_logs_storage_location'), 'temperature_logs', ['storage_location'], unique=False)
op.create_index(op.f('ix_temperature_logs_equipment_id'), 'temperature_logs', ['equipment_id'], unique=False)
op.create_index(op.f('ix_temperature_logs_recorded_at'), 'temperature_logs', ['recorded_at'], unique=False)
# Create food_safety_alerts table
op.create_table('food_safety_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('alert_code', sa.String(50), nullable=False),
sa.Column('alert_type', sa.Enum('TEMPERATURE_VIOLATION', 'EXPIRATION_WARNING', 'EXPIRED_PRODUCT', 'CONTAMINATION_RISK', 'ALLERGEN_CROSS_CONTAMINATION', 'STORAGE_VIOLATION', 'QUALITY_DEGRADATION', 'RECALL_NOTICE', 'CERTIFICATION_EXPIRY', 'SUPPLIER_COMPLIANCE_ISSUE', name='foodsafetyalerttype'), nullable=False),
sa.Column('severity', sa.String(20), nullable=False),
sa.Column('risk_level', sa.String(20), nullable=False),
sa.Column('source_entity_type', sa.String(50), nullable=False),
sa.Column('source_entity_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=True),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('title', sa.String(200), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('detailed_message', sa.Text(), nullable=True),
sa.Column('regulatory_requirement', sa.String(100), nullable=True),
sa.Column('compliance_standard', sa.Enum('HACCP', 'FDA', 'USDA', 'FSMA', 'SQF', 'BRC', 'IFS', 'ISO22000', 'ORGANIC', 'NON_GMO', 'ALLERGEN_FREE', 'KOSHER', 'HALAL', name='foodsafetystandard'), nullable=True),
sa.Column('regulatory_action_required', sa.Boolean(), nullable=False),
sa.Column('trigger_condition', sa.String(200), nullable=True),
sa.Column('threshold_value', sa.Numeric(15, 4), nullable=True),
sa.Column('actual_value', sa.Numeric(15, 4), nullable=True),
sa.Column('alert_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('environmental_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('affected_products', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('public_health_risk', sa.Boolean(), nullable=False),
sa.Column('business_impact', sa.Text(), nullable=True),
sa.Column('estimated_loss', sa.Numeric(12, 2), nullable=True),
sa.Column('status', sa.String(50), nullable=False),
sa.Column('alert_state', sa.String(50), nullable=False),
sa.Column('immediate_actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('investigation_notes', sa.Text(), nullable=True),
sa.Column('resolution_action', sa.String(200), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('corrective_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('preventive_measures', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('first_occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('escalation_deadline', sa.DateTime(timezone=True), nullable=True),
sa.Column('occurrence_count', sa.Integer(), nullable=False),
sa.Column('is_recurring', sa.Boolean(), nullable=False),
sa.Column('recurrence_pattern', sa.String(100), nullable=True),
sa.Column('assigned_to', sa.UUID(), nullable=True),
sa.Column('assigned_role', sa.String(50), nullable=True),
sa.Column('escalated_to', sa.UUID(), nullable=True),
sa.Column('escalation_level', sa.Integer(), nullable=False),
sa.Column('notification_sent', sa.Boolean(), nullable=False),
sa.Column('notification_methods', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('regulatory_notification_required', sa.Boolean(), nullable=False),
sa.Column('regulatory_notification_sent', sa.Boolean(), nullable=False),
sa.Column('documentation', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('audit_trail', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('external_reference', sa.String(100), nullable=True),
sa.Column('detection_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('response_time_minutes', sa.Integer(), nullable=True),
sa.Column('resolution_time_minutes', sa.Integer(), nullable=True),
sa.Column('alert_accuracy', sa.Boolean(), nullable=True),
sa.Column('false_positive', sa.Boolean(), nullable=False),
sa.Column('feedback_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_safety_alerts_tenant_id'), 'food_safety_alerts', ['tenant_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_alert_code'), 'food_safety_alerts', ['alert_code'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_alert_type'), 'food_safety_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_severity'), 'food_safety_alerts', ['severity'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_source_entity_id'), 'food_safety_alerts', ['source_entity_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_ingredient_id'), 'food_safety_alerts', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_stock_id'), 'food_safety_alerts', ['stock_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_first_occurred_at'), 'food_safety_alerts', ['first_occurred_at'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_status'), 'food_safety_alerts', ['status'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_assigned_to'), 'food_safety_alerts', ['assigned_to'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_escalated_to'), 'food_safety_alerts', ['escalated_to'], unique=False)
def downgrade() -> None:
# Drop food_safety_alerts table
op.drop_index(op.f('ix_food_safety_alerts_escalated_to'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_assigned_to'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_status'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_first_occurred_at'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_stock_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_ingredient_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_source_entity_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_severity'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_alert_type'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_alert_code'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_tenant_id'), table_name='food_safety_alerts')
op.drop_table('food_safety_alerts')
# Drop temperature_logs table
op.drop_index(op.f('ix_temperature_logs_recorded_at'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_equipment_id'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_storage_location'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_tenant_id'), table_name='temperature_logs')
op.drop_table('temperature_logs')
# Drop food_safety_compliance table
op.drop_index(op.f('ix_food_safety_compliance_next_audit_date'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_expiration_date'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_standard'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_ingredient_id'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_tenant_id'), table_name='food_safety_compliance')
op.drop_table('food_safety_compliance')
# Drop stock_alerts table
op.drop_index('idx_alerts_unresolved', table_name='stock_alerts')
op.drop_index('idx_alerts_ingredient', table_name='stock_alerts')
op.drop_index('idx_alerts_type_severity', table_name='stock_alerts')
op.drop_index('idx_alerts_tenant_active', table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_is_active'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_is_resolved'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_alert_type'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_stock_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_ingredient_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_tenant_id'), table_name='stock_alerts')
op.drop_table('stock_alerts')
# Drop product_transformations table
op.drop_index('idx_transformations_stages', table_name='product_transformations')
op.drop_index('idx_transformations_target', table_name='product_transformations')
op.drop_index('idx_transformations_source', table_name='product_transformations')
op.drop_index('idx_transformations_reference', table_name='product_transformations')
op.drop_index('idx_transformations_tenant_date', table_name='product_transformations')
op.drop_index(op.f('ix_product_transformations_target_ingredient_id'), table_name='product_transformations')
op.drop_index(op.f('ix_product_transformations_source_ingredient_id'), table_name='product_transformations')
op.drop_index(op.f('ix_product_transformations_transformation_reference'), table_name='product_transformations')
op.drop_index(op.f('ix_product_transformations_tenant_id'), table_name='product_transformations')
op.drop_table('product_transformations')
# Drop stock_movements table
op.drop_index('idx_movements_supplier', table_name='stock_movements')
op.drop_index('idx_movements_reference', table_name='stock_movements')
op.drop_index('idx_movements_type', table_name='stock_movements')
op.drop_index('idx_movements_tenant_ingredient', table_name='stock_movements')
op.drop_index('idx_movements_tenant_date', table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_movement_date'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_supplier_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_reference_number'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_movement_type'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_stock_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_ingredient_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_tenant_id'), table_name='stock_movements')
op.drop_table('stock_movements')
# Drop stock table
op.drop_index('idx_stock_final_expiration', table_name='stock')
op.drop_index('idx_stock_transformation', table_name='stock')
op.drop_index('idx_stock_production_stage', table_name='stock')
op.drop_index('idx_stock_quality', table_name='stock')
op.drop_index('idx_stock_low_levels', table_name='stock')
op.drop_index('idx_stock_batch', table_name='stock')
op.drop_index('idx_stock_expiration', table_name='stock')
op.drop_index('idx_stock_tenant_ingredient', table_name='stock')
op.drop_index(op.f('ix_stock_is_expired'), table_name='stock')
op.drop_index(op.f('ix_stock_expiration_date'), table_name='stock')
op.drop_index(op.f('ix_stock_transformation_reference'), table_name='stock')
op.drop_index(op.f('ix_stock_lot_number'), table_name='stock')
op.drop_index(op.f('ix_stock_batch_number'), table_name='stock')
op.drop_index(op.f('ix_stock_supplier_id'), table_name='stock')
op.drop_index(op.f('ix_stock_ingredient_id'), table_name='stock')
op.drop_index(op.f('ix_stock_tenant_id'), table_name='stock')
op.drop_table('stock')
# Drop ingredients table
op.drop_index('idx_ingredients_stock_levels', table_name='ingredients')
op.drop_index('idx_ingredients_product_category', table_name='ingredients')
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
op.drop_index('idx_ingredients_product_type', table_name='ingredients')
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
op.drop_index('idx_ingredients_tenant_sku', table_name='ingredients')
op.drop_index('idx_ingredients_tenant_name', table_name='ingredients')
op.drop_index(op.f('ix_ingredients_product_category'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_ingredient_category'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_product_type'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_barcode'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_sku'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_name'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_tenant_id'), table_name='ingredients')
op.drop_table('ingredients')

View File

@@ -0,0 +1,456 @@
"""initial_schema_20251001_1118
Revision ID: d0a91cdc45f1
Revises:
Create Date: 2025-10-01 11:19:01.146238+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'd0a91cdc45f1'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('ingredients',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('sku', sa.String(length=100), nullable=True),
sa.Column('barcode', sa.String(length=50), nullable=True),
sa.Column('product_type', sa.Enum('INGREDIENT', 'FINISHED_PRODUCT', name='producttype'), nullable=False),
sa.Column('ingredient_category', sa.Enum('FLOUR', 'YEAST', 'DAIRY', 'EGGS', 'SUGAR', 'FATS', 'SALT', 'SPICES', 'ADDITIVES', 'PACKAGING', 'CLEANING', 'OTHER', name='ingredientcategory'), nullable=True),
sa.Column('product_category', sa.Enum('BREAD', 'CROISSANTS', 'PASTRIES', 'CAKES', 'COOKIES', 'MUFFINS', 'SANDWICHES', 'SEASONAL', 'BEVERAGES', 'OTHER_PRODUCTS', name='productcategory'), nullable=True),
sa.Column('subcategory', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('brand', sa.String(length=100), nullable=True),
sa.Column('unit_of_measure', sa.Enum('KILOGRAMS', 'GRAMS', 'LITERS', 'MILLILITERS', 'UNITS', 'PIECES', 'PACKAGES', 'BAGS', 'BOXES', name='unitofmeasure'), nullable=False),
sa.Column('package_size', sa.Float(), nullable=True),
sa.Column('average_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('last_purchase_price', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('standard_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('low_stock_threshold', sa.Float(), nullable=False),
sa.Column('reorder_point', sa.Float(), nullable=False),
sa.Column('reorder_quantity', sa.Float(), nullable=False),
sa.Column('max_stock_level', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('display_life_hours', sa.Integer(), nullable=True),
sa.Column('best_before_hours', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('central_baker_product_code', sa.String(length=100), nullable=True),
sa.Column('delivery_days', sa.String(length=20), nullable=True),
sa.Column('minimum_order_quantity', sa.Float(), nullable=True),
sa.Column('pack_size', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_perishable', sa.Boolean(), nullable=True),
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_ingredients_barcode', 'ingredients', ['barcode'], unique=False)
op.create_index('idx_ingredients_ingredient_category', 'ingredients', ['tenant_id', 'ingredient_category', 'is_active'], unique=False)
op.create_index('idx_ingredients_product_category', 'ingredients', ['tenant_id', 'product_category', 'is_active'], unique=False)
op.create_index('idx_ingredients_product_type', 'ingredients', ['tenant_id', 'product_type', 'is_active'], unique=False)
op.create_index('idx_ingredients_stock_levels', 'ingredients', ['tenant_id', 'low_stock_threshold', 'reorder_point'], unique=False)
op.create_index('idx_ingredients_tenant_name', 'ingredients', ['tenant_id', 'name'], unique=True)
op.create_index('idx_ingredients_tenant_sku', 'ingredients', ['tenant_id', 'sku'], unique=False)
op.create_index(op.f('ix_ingredients_barcode'), 'ingredients', ['barcode'], unique=False)
op.create_index(op.f('ix_ingredients_ingredient_category'), 'ingredients', ['ingredient_category'], unique=False)
op.create_index(op.f('ix_ingredients_name'), 'ingredients', ['name'], unique=False)
op.create_index(op.f('ix_ingredients_product_category'), 'ingredients', ['product_category'], unique=False)
op.create_index(op.f('ix_ingredients_product_type'), 'ingredients', ['product_type'], unique=False)
op.create_index(op.f('ix_ingredients_sku'), 'ingredients', ['sku'], unique=False)
op.create_index(op.f('ix_ingredients_tenant_id'), 'ingredients', ['tenant_id'], unique=False)
op.create_table('temperature_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('storage_location', sa.String(length=100), nullable=False),
sa.Column('warehouse_zone', sa.String(length=50), nullable=True),
sa.Column('equipment_id', sa.String(length=100), nullable=True),
sa.Column('temperature_celsius', sa.Float(), nullable=False),
sa.Column('humidity_percentage', sa.Float(), nullable=True),
sa.Column('target_temperature_min', sa.Float(), nullable=True),
sa.Column('target_temperature_max', sa.Float(), nullable=True),
sa.Column('is_within_range', sa.Boolean(), nullable=False),
sa.Column('alert_triggered', sa.Boolean(), nullable=False),
sa.Column('deviation_minutes', sa.Integer(), nullable=True),
sa.Column('measurement_method', sa.String(length=50), nullable=False),
sa.Column('device_id', sa.String(length=100), nullable=True),
sa.Column('calibration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('recorded_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('recorded_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_temperature_logs_recorded_at'), 'temperature_logs', ['recorded_at'], unique=False)
op.create_index(op.f('ix_temperature_logs_storage_location'), 'temperature_logs', ['storage_location'], unique=False)
op.create_index(op.f('ix_temperature_logs_tenant_id'), 'temperature_logs', ['tenant_id'], unique=False)
op.create_table('food_safety_compliance',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('standard', sa.Enum('HACCP', 'FDA', 'USDA', 'FSMA', 'SQF', 'BRC', 'IFS', 'ISO22000', 'ORGANIC', 'NON_GMO', 'ALLERGEN_FREE', 'KOSHER', 'HALAL', name='foodsafetystandard'), nullable=False),
sa.Column('compliance_status', sa.Enum('COMPLIANT', 'NON_COMPLIANT', 'PENDING_REVIEW', 'EXPIRED', 'WARNING', name='compliancestatus'), nullable=False),
sa.Column('certification_number', sa.String(length=100), nullable=True),
sa.Column('certifying_body', sa.String(length=200), nullable=True),
sa.Column('certification_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('compliance_notes', sa.Text(), nullable=True),
sa.Column('documentation_url', sa.String(length=500), nullable=True),
sa.Column('last_audit_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_audit_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('auditor_name', sa.String(length=200), nullable=True),
sa.Column('audit_score', sa.Float(), nullable=True),
sa.Column('risk_level', sa.String(length=20), nullable=False),
sa.Column('risk_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('mitigation_measures', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('requires_monitoring', sa.Boolean(), nullable=False),
sa.Column('monitoring_frequency_days', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_safety_compliance_expiration_date'), 'food_safety_compliance', ['expiration_date'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_ingredient_id'), 'food_safety_compliance', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_next_audit_date'), 'food_safety_compliance', ['next_audit_date'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_standard'), 'food_safety_compliance', ['standard'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_tenant_id'), 'food_safety_compliance', ['tenant_id'], unique=False)
op.create_table('product_transformations',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('transformation_reference', sa.String(length=100), nullable=False),
sa.Column('source_ingredient_id', sa.UUID(), nullable=False),
sa.Column('target_ingredient_id', sa.UUID(), nullable=False),
sa.Column('source_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('target_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('source_quantity', sa.Float(), nullable=False),
sa.Column('target_quantity', sa.Float(), nullable=False),
sa.Column('conversion_ratio', sa.Float(), nullable=False),
sa.Column('expiration_calculation_method', sa.String(length=50), nullable=False),
sa.Column('expiration_days_offset', sa.Integer(), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('process_notes', sa.Text(), nullable=True),
sa.Column('performed_by', sa.UUID(), nullable=True),
sa.Column('source_batch_numbers', sa.Text(), nullable=True),
sa.Column('target_batch_number', sa.String(length=100), nullable=True),
sa.Column('is_completed', sa.Boolean(), nullable=True),
sa.Column('is_reversed', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['source_ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['target_ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_transformations_reference', 'product_transformations', ['transformation_reference'], unique=False)
op.create_index('idx_transformations_source', 'product_transformations', ['tenant_id', 'source_ingredient_id'], unique=False)
op.create_index('idx_transformations_stages', 'product_transformations', ['source_stage', 'target_stage'], unique=False)
op.create_index('idx_transformations_target', 'product_transformations', ['tenant_id', 'target_ingredient_id'], unique=False)
op.create_index('idx_transformations_tenant_date', 'product_transformations', ['tenant_id', 'transformation_date'], unique=False)
op.create_index(op.f('ix_product_transformations_tenant_id'), 'product_transformations', ['tenant_id'], unique=False)
op.create_index(op.f('ix_product_transformations_transformation_reference'), 'product_transformations', ['transformation_reference'], unique=False)
op.create_table('stock',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=True),
sa.Column('batch_number', sa.String(length=100), nullable=True),
sa.Column('lot_number', sa.String(length=100), nullable=True),
sa.Column('supplier_batch_ref', sa.String(length=100), nullable=True),
sa.Column('production_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('transformation_reference', sa.String(length=100), nullable=True),
sa.Column('current_quantity', sa.Float(), nullable=False),
sa.Column('reserved_quantity', sa.Float(), nullable=False),
sa.Column('available_quantity', sa.Float(), nullable=False),
sa.Column('received_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('best_before_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('original_expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('final_expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('unit_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('storage_location', sa.String(length=100), nullable=True),
sa.Column('warehouse_zone', sa.String(length=50), nullable=True),
sa.Column('shelf_position', sa.String(length=50), nullable=True),
sa.Column('requires_refrigeration', sa.Boolean(), nullable=True),
sa.Column('requires_freezing', sa.Boolean(), nullable=True),
sa.Column('storage_temperature_min', sa.Float(), nullable=True),
sa.Column('storage_temperature_max', sa.Float(), nullable=True),
sa.Column('storage_humidity_max', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_expired', sa.Boolean(), nullable=True),
sa.Column('quality_status', sa.String(length=20), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_stock_batch', 'stock', ['tenant_id', 'batch_number'], unique=False)
op.create_index('idx_stock_expiration', 'stock', ['tenant_id', 'expiration_date', 'is_available'], unique=False)
op.create_index('idx_stock_final_expiration', 'stock', ['tenant_id', 'final_expiration_date', 'is_available'], unique=False)
op.create_index('idx_stock_low_levels', 'stock', ['tenant_id', 'current_quantity', 'is_available'], unique=False)
op.create_index('idx_stock_production_stage', 'stock', ['tenant_id', 'production_stage', 'is_available'], unique=False)
op.create_index('idx_stock_quality', 'stock', ['tenant_id', 'quality_status', 'is_available'], unique=False)
op.create_index('idx_stock_tenant_ingredient', 'stock', ['tenant_id', 'ingredient_id'], unique=False)
op.create_index('idx_stock_transformation', 'stock', ['tenant_id', 'transformation_reference'], unique=False)
op.create_index(op.f('ix_stock_batch_number'), 'stock', ['batch_number'], unique=False)
op.create_index(op.f('ix_stock_expiration_date'), 'stock', ['expiration_date'], unique=False)
op.create_index(op.f('ix_stock_ingredient_id'), 'stock', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_is_expired'), 'stock', ['is_expired'], unique=False)
op.create_index(op.f('ix_stock_lot_number'), 'stock', ['lot_number'], unique=False)
op.create_index(op.f('ix_stock_production_stage'), 'stock', ['production_stage'], unique=False)
op.create_index(op.f('ix_stock_supplier_id'), 'stock', ['supplier_id'], unique=False)
op.create_index(op.f('ix_stock_tenant_id'), 'stock', ['tenant_id'], unique=False)
op.create_index(op.f('ix_stock_transformation_reference'), 'stock', ['transformation_reference'], unique=False)
op.create_table('food_safety_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('alert_code', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.Enum('TEMPERATURE_VIOLATION', 'EXPIRATION_WARNING', 'EXPIRED_PRODUCT', 'CONTAMINATION_RISK', 'ALLERGEN_CROSS_CONTAMINATION', 'STORAGE_VIOLATION', 'QUALITY_DEGRADATION', 'RECALL_NOTICE', 'CERTIFICATION_EXPIRY', 'SUPPLIER_COMPLIANCE_ISSUE', name='foodsafetyalerttype'), nullable=False),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('risk_level', sa.String(length=20), nullable=False),
sa.Column('source_entity_type', sa.String(length=50), nullable=False),
sa.Column('source_entity_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=True),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('title', sa.String(length=200), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('detailed_message', sa.Text(), nullable=True),
sa.Column('regulatory_requirement', sa.String(length=100), nullable=True),
sa.Column('compliance_standard', sa.Enum('HACCP', 'FDA', 'USDA', 'FSMA', 'SQF', 'BRC', 'IFS', 'ISO22000', 'ORGANIC', 'NON_GMO', 'ALLERGEN_FREE', 'KOSHER', 'HALAL', name='foodsafetystandard'), nullable=True),
sa.Column('regulatory_action_required', sa.Boolean(), nullable=False),
sa.Column('trigger_condition', sa.String(length=200), nullable=True),
sa.Column('threshold_value', sa.Numeric(precision=15, scale=4), nullable=True),
sa.Column('actual_value', sa.Numeric(precision=15, scale=4), nullable=True),
sa.Column('alert_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('environmental_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('affected_products', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('public_health_risk', sa.Boolean(), nullable=False),
sa.Column('business_impact', sa.Text(), nullable=True),
sa.Column('estimated_loss', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('alert_state', sa.String(length=50), nullable=False),
sa.Column('immediate_actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('investigation_notes', sa.Text(), nullable=True),
sa.Column('resolution_action', sa.String(length=200), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('corrective_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('preventive_measures', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('first_occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('escalation_deadline', sa.DateTime(timezone=True), nullable=True),
sa.Column('occurrence_count', sa.Integer(), nullable=False),
sa.Column('is_recurring', sa.Boolean(), nullable=False),
sa.Column('recurrence_pattern', sa.String(length=100), nullable=True),
sa.Column('assigned_to', sa.UUID(), nullable=True),
sa.Column('assigned_role', sa.String(length=50), nullable=True),
sa.Column('escalated_to', sa.UUID(), nullable=True),
sa.Column('escalation_level', sa.Integer(), nullable=False),
sa.Column('notification_sent', sa.Boolean(), nullable=False),
sa.Column('notification_methods', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('regulatory_notification_required', sa.Boolean(), nullable=False),
sa.Column('regulatory_notification_sent', sa.Boolean(), nullable=False),
sa.Column('documentation', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('audit_trail', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('external_reference', sa.String(length=100), nullable=True),
sa.Column('detection_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('response_time_minutes', sa.Integer(), nullable=True),
sa.Column('resolution_time_minutes', sa.Integer(), nullable=True),
sa.Column('alert_accuracy', sa.Boolean(), nullable=True),
sa.Column('false_positive', sa.Boolean(), nullable=False),
sa.Column('feedback_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_safety_alerts_alert_code'), 'food_safety_alerts', ['alert_code'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_alert_type'), 'food_safety_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_first_occurred_at'), 'food_safety_alerts', ['first_occurred_at'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_ingredient_id'), 'food_safety_alerts', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_severity'), 'food_safety_alerts', ['severity'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_source_entity_id'), 'food_safety_alerts', ['source_entity_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_status'), 'food_safety_alerts', ['status'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_stock_id'), 'food_safety_alerts', ['stock_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_tenant_id'), 'food_safety_alerts', ['tenant_id'], unique=False)
op.create_table('stock_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('alert_type', sa.String(length=50), nullable=False),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('current_quantity', sa.Float(), nullable=True),
sa.Column('threshold_value', sa.Float(), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_acknowledged', sa.Boolean(), nullable=True),
sa.Column('acknowledged_by', sa.UUID(), nullable=True),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_resolved', sa.Boolean(), nullable=True),
sa.Column('resolved_by', sa.UUID(), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_alerts_ingredient', 'stock_alerts', ['ingredient_id', 'is_active'], unique=False)
op.create_index('idx_alerts_tenant_active', 'stock_alerts', ['tenant_id', 'is_active', 'created_at'], unique=False)
op.create_index('idx_alerts_type_severity', 'stock_alerts', ['alert_type', 'severity', 'is_active'], unique=False)
op.create_index('idx_alerts_unresolved', 'stock_alerts', ['tenant_id', 'is_resolved', 'is_active'], unique=False)
op.create_index(op.f('ix_stock_alerts_alert_type'), 'stock_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_stock_alerts_ingredient_id'), 'stock_alerts', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_stock_id'), 'stock_alerts', ['stock_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_tenant_id'), 'stock_alerts', ['tenant_id'], unique=False)
op.create_table('stock_movements',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('movement_type', sa.Enum('PURCHASE', 'PRODUCTION_USE', 'ADJUSTMENT', 'WASTE', 'TRANSFER', 'RETURN', 'INITIAL_STOCK', name='stockmovementtype'), nullable=False),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('quantity_before', sa.Float(), nullable=True),
sa.Column('quantity_after', sa.Float(), nullable=True),
sa.Column('reference_number', sa.String(length=100), nullable=True),
sa.Column('supplier_id', sa.UUID(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('reason_code', sa.String(length=50), nullable=True),
sa.Column('movement_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_movements_reference', 'stock_movements', ['reference_number'], unique=False)
op.create_index('idx_movements_supplier', 'stock_movements', ['supplier_id', 'movement_date'], unique=False)
op.create_index('idx_movements_tenant_date', 'stock_movements', ['tenant_id', 'movement_date'], unique=False)
op.create_index('idx_movements_tenant_ingredient', 'stock_movements', ['tenant_id', 'ingredient_id', 'movement_date'], unique=False)
op.create_index('idx_movements_type', 'stock_movements', ['tenant_id', 'movement_type', 'movement_date'], unique=False)
op.create_index(op.f('ix_stock_movements_ingredient_id'), 'stock_movements', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_movements_movement_date'), 'stock_movements', ['movement_date'], unique=False)
op.create_index(op.f('ix_stock_movements_movement_type'), 'stock_movements', ['movement_type'], unique=False)
op.create_index(op.f('ix_stock_movements_reference_number'), 'stock_movements', ['reference_number'], unique=False)
op.create_index(op.f('ix_stock_movements_stock_id'), 'stock_movements', ['stock_id'], unique=False)
op.create_index(op.f('ix_stock_movements_supplier_id'), 'stock_movements', ['supplier_id'], unique=False)
op.create_index(op.f('ix_stock_movements_tenant_id'), 'stock_movements', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_stock_movements_tenant_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_supplier_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_stock_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_reference_number'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_movement_type'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_movement_date'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_ingredient_id'), table_name='stock_movements')
op.drop_index('idx_movements_type', table_name='stock_movements')
op.drop_index('idx_movements_tenant_ingredient', table_name='stock_movements')
op.drop_index('idx_movements_tenant_date', table_name='stock_movements')
op.drop_index('idx_movements_supplier', table_name='stock_movements')
op.drop_index('idx_movements_reference', table_name='stock_movements')
op.drop_table('stock_movements')
op.drop_index(op.f('ix_stock_alerts_tenant_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_stock_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_ingredient_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_alert_type'), table_name='stock_alerts')
op.drop_index('idx_alerts_unresolved', table_name='stock_alerts')
op.drop_index('idx_alerts_type_severity', table_name='stock_alerts')
op.drop_index('idx_alerts_tenant_active', table_name='stock_alerts')
op.drop_index('idx_alerts_ingredient', table_name='stock_alerts')
op.drop_table('stock_alerts')
op.drop_index(op.f('ix_food_safety_alerts_tenant_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_stock_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_status'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_source_entity_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_severity'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_ingredient_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_first_occurred_at'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_alert_type'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_alert_code'), table_name='food_safety_alerts')
op.drop_table('food_safety_alerts')
op.drop_index(op.f('ix_stock_transformation_reference'), table_name='stock')
op.drop_index(op.f('ix_stock_tenant_id'), table_name='stock')
op.drop_index(op.f('ix_stock_supplier_id'), table_name='stock')
op.drop_index(op.f('ix_stock_production_stage'), table_name='stock')
op.drop_index(op.f('ix_stock_lot_number'), table_name='stock')
op.drop_index(op.f('ix_stock_is_expired'), table_name='stock')
op.drop_index(op.f('ix_stock_ingredient_id'), table_name='stock')
op.drop_index(op.f('ix_stock_expiration_date'), table_name='stock')
op.drop_index(op.f('ix_stock_batch_number'), table_name='stock')
op.drop_index('idx_stock_transformation', table_name='stock')
op.drop_index('idx_stock_tenant_ingredient', table_name='stock')
op.drop_index('idx_stock_quality', table_name='stock')
op.drop_index('idx_stock_production_stage', table_name='stock')
op.drop_index('idx_stock_low_levels', table_name='stock')
op.drop_index('idx_stock_final_expiration', table_name='stock')
op.drop_index('idx_stock_expiration', table_name='stock')
op.drop_index('idx_stock_batch', table_name='stock')
op.drop_table('stock')
op.drop_index(op.f('ix_product_transformations_transformation_reference'), table_name='product_transformations')
op.drop_index(op.f('ix_product_transformations_tenant_id'), table_name='product_transformations')
op.drop_index('idx_transformations_tenant_date', table_name='product_transformations')
op.drop_index('idx_transformations_target', table_name='product_transformations')
op.drop_index('idx_transformations_stages', table_name='product_transformations')
op.drop_index('idx_transformations_source', table_name='product_transformations')
op.drop_index('idx_transformations_reference', table_name='product_transformations')
op.drop_table('product_transformations')
op.drop_index(op.f('ix_food_safety_compliance_tenant_id'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_standard'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_next_audit_date'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_ingredient_id'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_expiration_date'), table_name='food_safety_compliance')
op.drop_table('food_safety_compliance')
op.drop_index(op.f('ix_temperature_logs_tenant_id'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_storage_location'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_recorded_at'), table_name='temperature_logs')
op.drop_table('temperature_logs')
op.drop_index(op.f('ix_ingredients_tenant_id'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_sku'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_product_type'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_product_category'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_name'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_ingredient_category'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_barcode'), table_name='ingredients')
op.drop_index('idx_ingredients_tenant_sku', table_name='ingredients')
op.drop_index('idx_ingredients_tenant_name', table_name='ingredients')
op.drop_index('idx_ingredients_stock_levels', table_name='ingredients')
op.drop_index('idx_ingredients_product_type', table_name='ingredients')
op.drop_index('idx_ingredients_product_category', table_name='ingredients')
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
op.drop_table('ingredients')
# ### end Alembic commands ###

View File

@@ -7,6 +7,7 @@ Import all models to ensure they are registered with SQLAlchemy Base.
# Import all models to register them with the Base metadata
from .notifications import (
Notification,
NotificationTemplate,
NotificationType,
NotificationStatus,
NotificationPriority,
@@ -21,6 +22,7 @@ from .templates import (
# List all models for easier access
__all__ = [
"Notification",
"NotificationTemplate",
"NotificationType",
"NotificationStatus",
"NotificationPriority",

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for notification service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('NOTIFICATION_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,245 +0,0 @@
"""Initial schema for notification service
Revision ID: 000001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '000001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create notifications table
op.create_table('notifications',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('sender_id', sa.UUID(), nullable=False),
sa.Column('recipient_id', sa.UUID(), nullable=True),
sa.Column('type', sa.Enum('EMAIL', 'WHATSAPP', 'PUSH', 'SMS', name='notificationtype'), nullable=False),
sa.Column('status', sa.Enum('PENDING', 'SENT', 'DELIVERED', 'FAILED', 'CANCELLED', name='notificationstatus'), nullable=True),
sa.Column('priority', sa.Enum('LOW', 'NORMAL', 'HIGH', 'URGENT', name='notificationpriority'), nullable=True),
sa.Column('subject', sa.String(255), nullable=True),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('html_content', sa.Text(), nullable=True),
sa.Column('template_id', sa.String(100), nullable=True),
sa.Column('template_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('recipient_email', sa.String(255), nullable=True),
sa.Column('recipient_phone', sa.String(20), nullable=True),
sa.Column('delivery_channel', sa.String(50), nullable=True),
sa.Column('scheduled_at', sa.DateTime(), nullable=True),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('log_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=True),
sa.Column('max_retries', sa.Integer(), nullable=True),
sa.Column('broadcast', sa.Boolean(), nullable=True),
sa.Column('read', sa.Boolean(), nullable=True),
sa.Column('read_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notifications_tenant_id'), 'notifications', ['tenant_id'], unique=False)
op.create_index(op.f('ix_notifications_sender_id'), 'notifications', ['sender_id'], unique=False)
op.create_index(op.f('ix_notifications_recipient_id'), 'notifications', ['recipient_id'], unique=False)
op.create_index(op.f('ix_notifications_type'), 'notifications', ['type'], unique=False)
op.create_index(op.f('ix_notifications_status'), 'notifications', ['status'], unique=False)
op.create_index(op.f('ix_notifications_priority'), 'notifications', ['priority'], unique=False)
op.create_index(op.f('ix_notifications_created_at'), 'notifications', ['created_at'], unique=False)
op.create_index(op.f('ix_notifications_scheduled_at'), 'notifications', ['scheduled_at'], unique=False)
op.create_index(op.f('ix_notifications_sent_at'), 'notifications', ['sent_at'], unique=False)
# Create notification_templates table
op.create_table('notification_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('template_key', sa.String(100), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(50), nullable=False),
sa.Column('type', sa.Enum('EMAIL', 'WHATSAPP', 'PUSH', 'SMS', name='notificationtype'), nullable=False),
sa.Column('subject_template', sa.String(255), nullable=True),
sa.Column('body_template', sa.Text(), nullable=False),
sa.Column('html_template', sa.Text(), nullable=True),
sa.Column('language', sa.String(2), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_system', sa.Boolean(), nullable=True),
sa.Column('default_priority', sa.Enum('LOW', 'NORMAL', 'HIGH', 'URGENT', name='notificationpriority'), nullable=True),
sa.Column('required_variables', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('template_key')
)
op.create_index(op.f('ix_notification_templates_tenant_id'), 'notification_templates', ['tenant_id'], unique=False)
op.create_index(op.f('ix_notification_templates_name'), 'notification_templates', ['name'], unique=False)
op.create_index(op.f('ix_notification_templates_category'), 'notification_templates', ['category'], unique=False)
op.create_index(op.f('ix_notification_templates_type'), 'notification_templates', ['type'], unique=False)
# Create notification_preferences table
op.create_table('notification_preferences',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('email_enabled', sa.Boolean(), nullable=True),
sa.Column('email_alerts', sa.Boolean(), nullable=True),
sa.Column('email_marketing', sa.Boolean(), nullable=True),
sa.Column('email_reports', sa.Boolean(), nullable=True),
sa.Column('whatsapp_enabled', sa.Boolean(), nullable=True),
sa.Column('whatsapp_alerts', sa.Boolean(), nullable=True),
sa.Column('whatsapp_reports', sa.Boolean(), nullable=True),
sa.Column('push_enabled', sa.Boolean(), nullable=True),
sa.Column('push_alerts', sa.Boolean(), nullable=True),
sa.Column('push_reports', sa.Boolean(), nullable=True),
sa.Column('quiet_hours_start', sa.String(5), nullable=True),
sa.Column('quiet_hours_end', sa.String(5), nullable=True),
sa.Column('timezone', sa.String(50), nullable=True),
sa.Column('digest_frequency', sa.String(20), nullable=True),
sa.Column('max_emails_per_day', sa.Integer(), nullable=True),
sa.Column('language', sa.String(2), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id')
)
op.create_index(op.f('ix_notification_preferences_user_id'), 'notification_preferences', ['user_id'], unique=False)
op.create_index(op.f('ix_notification_preferences_tenant_id'), 'notification_preferences', ['tenant_id'], unique=False)
# Create notification_logs table
op.create_table('notification_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('notification_id', sa.UUID(), nullable=False),
sa.Column('attempt_number', sa.Integer(), nullable=False),
sa.Column('status', sa.Enum('PENDING', 'SENT', 'DELIVERED', 'FAILED', 'CANCELLED', name='notificationstatus'), nullable=False),
sa.Column('provider', sa.String(50), nullable=True),
sa.Column('provider_message_id', sa.String(255), nullable=True),
sa.Column('provider_response', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('attempted_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('response_time_ms', sa.Integer(), nullable=True),
sa.Column('error_code', sa.String(50), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('log_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notification_logs_notification_id'), 'notification_logs', ['notification_id'], unique=False)
op.create_index(op.f('ix_notification_logs_attempted_at'), 'notification_logs', ['attempted_at'], unique=False)
op.create_index(op.f('ix_notification_logs_provider'), 'notification_logs', ['provider'], unique=False)
op.create_index(op.f('ix_notification_logs_status'), 'notification_logs', ['status'], unique=False)
# Create email_templates table
op.create_table('email_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('template_key', sa.String(100), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('subject', sa.String(255), nullable=False),
sa.Column('html_body', sa.Text(), nullable=False),
sa.Column('text_body', sa.Text(), nullable=True),
sa.Column('from_email', sa.String(255), nullable=True),
sa.Column('from_name', sa.String(255), nullable=True),
sa.Column('reply_to', sa.String(255), nullable=True),
sa.Column('variables', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('sample_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('language', sa.String(2), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_system', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('template_key')
)
op.create_index(op.f('ix_email_templates_tenant_id'), 'email_templates', ['tenant_id'], unique=False)
op.create_index(op.f('ix_email_templates_template_key'), 'email_templates', ['template_key'], unique=False)
op.create_index(op.f('ix_email_templates_name'), 'email_templates', ['name'], unique=False)
# Create whatsapp_templates table
op.create_table('whatsapp_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('template_key', sa.String(100), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('whatsapp_template_name', sa.String(255), nullable=False),
sa.Column('whatsapp_template_id', sa.String(255), nullable=True),
sa.Column('language_code', sa.String(10), nullable=True),
sa.Column('header_text', sa.String(60), nullable=True),
sa.Column('body_text', sa.Text(), nullable=False),
sa.Column('footer_text', sa.String(60), nullable=True),
sa.Column('parameter_count', sa.Integer(), nullable=True),
sa.Column('parameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('approval_status', sa.String(20), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('template_key')
)
op.create_index(op.f('ix_whatsapp_templates_tenant_id'), 'whatsapp_templates', ['tenant_id'], unique=False)
op.create_index(op.f('ix_whatsapp_templates_template_key'), 'whatsapp_templates', ['template_key'], unique=False)
op.create_index(op.f('ix_whatsapp_templates_name'), 'whatsapp_templates', ['name'], unique=False)
op.create_index(op.f('ix_whatsapp_templates_approval_status'), 'whatsapp_templates', ['approval_status'], unique=False)
def downgrade() -> None:
# Drop whatsapp_templates table
op.drop_index(op.f('ix_whatsapp_templates_approval_status'), table_name='whatsapp_templates')
op.drop_index(op.f('ix_whatsapp_templates_name'), table_name='whatsapp_templates')
op.drop_index(op.f('ix_whatsapp_templates_template_key'), table_name='whatsapp_templates')
op.drop_index(op.f('ix_whatsapp_templates_tenant_id'), table_name='whatsapp_templates')
op.drop_table('whatsapp_templates')
# Drop email_templates table
op.drop_index(op.f('ix_email_templates_name'), table_name='email_templates')
op.drop_index(op.f('ix_email_templates_template_key'), table_name='email_templates')
op.drop_index(op.f('ix_email_templates_tenant_id'), table_name='email_templates')
op.drop_table('email_templates')
# Drop notification_logs table
op.drop_index(op.f('ix_notification_logs_status'), table_name='notification_logs')
op.drop_index(op.f('ix_notification_logs_provider'), table_name='notification_logs')
op.drop_index(op.f('ix_notification_logs_attempted_at'), table_name='notification_logs')
op.drop_index(op.f('ix_notification_logs_notification_id'), table_name='notification_logs')
op.drop_table('notification_logs')
# Drop notification_preferences table
op.drop_index(op.f('ix_notification_preferences_tenant_id'), table_name='notification_preferences')
op.drop_index(op.f('ix_notification_preferences_user_id'), table_name='notification_preferences')
op.drop_table('notification_preferences')
# Drop notification_templates table
op.drop_index(op.f('ix_notification_templates_type'), table_name='notification_templates')
op.drop_index(op.f('ix_notification_templates_category'), table_name='notification_templates')
op.drop_index(op.f('ix_notification_templates_name'), table_name='notification_templates')
op.drop_index(op.f('ix_notification_templates_tenant_id'), table_name='notification_templates')
op.drop_table('notification_templates')
# Drop notifications table
op.drop_index(op.f('ix_notifications_sent_at'), table_name='notifications')
op.drop_index(op.f('ix_notifications_scheduled_at'), table_name='notifications')
op.drop_index(op.f('ix_notifications_created_at'), table_name='notifications')
op.drop_index(op.f('ix_notifications_priority'), table_name='notifications')
op.drop_index(op.f('ix_notifications_status'), table_name='notifications')
op.drop_index(op.f('ix_notifications_type'), table_name='notifications')
op.drop_index(op.f('ix_notifications_recipient_id'), table_name='notifications')
op.drop_index(op.f('ix_notifications_sender_id'), table_name='notifications')
op.drop_index(op.f('ix_notifications_tenant_id'), table_name='notifications')
op.drop_table('notifications')
# Drop enums
notification_priority_enum = sa.Enum(name='notificationpriority')
notification_priority_enum.drop(op.get_bind(), checkfirst=True)
notification_status_enum = sa.Enum(name='notificationstatus')
notification_status_enum.drop(op.get_bind(), checkfirst=True)
notification_type_enum = sa.Enum(name='notificationtype')
notification_type_enum.drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,184 @@
"""initial_schema_20251001_1119
Revision ID: 51fa7b09a051
Revises:
Create Date: 2025-10-01 11:19:26.135733+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '51fa7b09a051'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('email_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('template_key', sa.String(length=100), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('subject', sa.String(length=255), nullable=False),
sa.Column('html_body', sa.Text(), nullable=False),
sa.Column('text_body', sa.Text(), nullable=True),
sa.Column('from_email', sa.String(length=255), nullable=True),
sa.Column('from_name', sa.String(length=255), nullable=True),
sa.Column('reply_to', sa.String(length=255), nullable=True),
sa.Column('variables', sa.JSON(), nullable=True),
sa.Column('sample_data', sa.JSON(), nullable=True),
sa.Column('language', sa.String(length=2), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_system', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('template_key')
)
op.create_index(op.f('ix_email_templates_tenant_id'), 'email_templates', ['tenant_id'], unique=False)
op.create_table('notification_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('notification_id', sa.UUID(), nullable=False),
sa.Column('attempt_number', sa.Integer(), nullable=False),
sa.Column('status', sa.Enum('PENDING', 'SENT', 'DELIVERED', 'FAILED', 'CANCELLED', name='notificationstatus'), nullable=False),
sa.Column('provider', sa.String(length=50), nullable=True),
sa.Column('provider_message_id', sa.String(length=255), nullable=True),
sa.Column('provider_response', sa.JSON(), nullable=True),
sa.Column('attempted_at', sa.DateTime(), nullable=True),
sa.Column('response_time_ms', sa.Integer(), nullable=True),
sa.Column('error_code', sa.String(length=50), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('log_metadata', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notification_logs_notification_id'), 'notification_logs', ['notification_id'], unique=False)
op.create_table('notification_preferences',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('email_enabled', sa.Boolean(), nullable=True),
sa.Column('email_alerts', sa.Boolean(), nullable=True),
sa.Column('email_marketing', sa.Boolean(), nullable=True),
sa.Column('email_reports', sa.Boolean(), nullable=True),
sa.Column('whatsapp_enabled', sa.Boolean(), nullable=True),
sa.Column('whatsapp_alerts', sa.Boolean(), nullable=True),
sa.Column('whatsapp_reports', sa.Boolean(), nullable=True),
sa.Column('push_enabled', sa.Boolean(), nullable=True),
sa.Column('push_alerts', sa.Boolean(), nullable=True),
sa.Column('push_reports', sa.Boolean(), nullable=True),
sa.Column('quiet_hours_start', sa.String(length=5), nullable=True),
sa.Column('quiet_hours_end', sa.String(length=5), nullable=True),
sa.Column('timezone', sa.String(length=50), nullable=True),
sa.Column('digest_frequency', sa.String(length=20), nullable=True),
sa.Column('max_emails_per_day', sa.Integer(), nullable=True),
sa.Column('language', sa.String(length=2), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notification_preferences_tenant_id'), 'notification_preferences', ['tenant_id'], unique=False)
op.create_index(op.f('ix_notification_preferences_user_id'), 'notification_preferences', ['user_id'], unique=True)
op.create_table('notification_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('template_key', sa.String(length=100), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('type', sa.Enum('EMAIL', 'WHATSAPP', 'PUSH', 'SMS', name='notificationtype'), nullable=False),
sa.Column('subject_template', sa.String(length=255), nullable=True),
sa.Column('body_template', sa.Text(), nullable=False),
sa.Column('html_template', sa.Text(), nullable=True),
sa.Column('language', sa.String(length=2), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_system', sa.Boolean(), nullable=True),
sa.Column('default_priority', sa.Enum('LOW', 'NORMAL', 'HIGH', 'URGENT', name='notificationpriority'), nullable=True),
sa.Column('required_variables', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('template_key')
)
op.create_index(op.f('ix_notification_templates_tenant_id'), 'notification_templates', ['tenant_id'], unique=False)
op.create_table('notifications',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('sender_id', sa.UUID(), nullable=False),
sa.Column('recipient_id', sa.UUID(), nullable=True),
sa.Column('type', sa.Enum('EMAIL', 'WHATSAPP', 'PUSH', 'SMS', name='notificationtype'), nullable=False),
sa.Column('status', sa.Enum('PENDING', 'SENT', 'DELIVERED', 'FAILED', 'CANCELLED', name='notificationstatus'), nullable=True),
sa.Column('priority', sa.Enum('LOW', 'NORMAL', 'HIGH', 'URGENT', name='notificationpriority'), nullable=True),
sa.Column('subject', sa.String(length=255), nullable=True),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('html_content', sa.Text(), nullable=True),
sa.Column('template_id', sa.String(length=100), nullable=True),
sa.Column('template_data', sa.JSON(), nullable=True),
sa.Column('recipient_email', sa.String(length=255), nullable=True),
sa.Column('recipient_phone', sa.String(length=20), nullable=True),
sa.Column('delivery_channel', sa.String(length=50), nullable=True),
sa.Column('scheduled_at', sa.DateTime(), nullable=True),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('log_metadata', sa.JSON(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=True),
sa.Column('max_retries', sa.Integer(), nullable=True),
sa.Column('broadcast', sa.Boolean(), nullable=True),
sa.Column('read', sa.Boolean(), nullable=True),
sa.Column('read_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notifications_created_at'), 'notifications', ['created_at'], unique=False)
op.create_index(op.f('ix_notifications_status'), 'notifications', ['status'], unique=False)
op.create_index(op.f('ix_notifications_tenant_id'), 'notifications', ['tenant_id'], unique=False)
op.create_table('whatsapp_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('template_key', sa.String(length=100), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('whatsapp_template_name', sa.String(length=255), nullable=False),
sa.Column('whatsapp_template_id', sa.String(length=255), nullable=True),
sa.Column('language_code', sa.String(length=10), nullable=True),
sa.Column('header_text', sa.String(length=60), nullable=True),
sa.Column('body_text', sa.Text(), nullable=False),
sa.Column('footer_text', sa.String(length=60), nullable=True),
sa.Column('parameter_count', sa.Integer(), nullable=True),
sa.Column('parameters', sa.JSON(), nullable=True),
sa.Column('approval_status', sa.String(length=20), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('template_key')
)
op.create_index(op.f('ix_whatsapp_templates_tenant_id'), 'whatsapp_templates', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_whatsapp_templates_tenant_id'), table_name='whatsapp_templates')
op.drop_table('whatsapp_templates')
op.drop_index(op.f('ix_notifications_tenant_id'), table_name='notifications')
op.drop_index(op.f('ix_notifications_status'), table_name='notifications')
op.drop_index(op.f('ix_notifications_created_at'), table_name='notifications')
op.drop_table('notifications')
op.drop_index(op.f('ix_notification_templates_tenant_id'), table_name='notification_templates')
op.drop_table('notification_templates')
op.drop_index(op.f('ix_notification_preferences_user_id'), table_name='notification_preferences')
op.drop_index(op.f('ix_notification_preferences_tenant_id'), table_name='notification_preferences')
op.drop_table('notification_preferences')
op.drop_index(op.f('ix_notification_logs_notification_id'), table_name='notification_logs')
op.drop_table('notification_logs')
op.drop_index(op.f('ix_email_templates_tenant_id'), table_name='email_templates')
op.drop_table('email_templates')
# ### end Alembic commands ###

View File

@@ -9,13 +9,55 @@ from .customer import Customer, CustomerContact
from .order import CustomerOrder, OrderItem, OrderStatusHistory
from .procurement import ProcurementPlan, ProcurementRequirement
# Import enums
from .enums import (
CustomerType,
DeliveryMethod,
PaymentTerms,
PaymentMethod,
PaymentStatus,
CustomerSegment,
SalesChannel,
BusinessModel,
OrderType,
OrderSource,
OrderStatus,
DeliveryStatus,
ProcurementPlanType,
ProcurementStrategy,
PlanStatus,
PriorityLevel,
RequirementStatus,
RiskLevel,
)
# List all models for easier access
__all__ = [
# Models
"Customer",
"CustomerContact",
"CustomerContact",
"CustomerOrder",
"OrderItem",
"OrderStatusHistory",
"ProcurementPlan",
"ProcurementRequirement"
"ProcurementRequirement",
# Enums
"CustomerType",
"DeliveryMethod",
"PaymentTerms",
"PaymentMethod",
"PaymentStatus",
"CustomerSegment",
"SalesChannel",
"BusinessModel",
"OrderType",
"OrderSource",
"OrderStatus",
"DeliveryStatus",
"ProcurementPlanType",
"ProcurementStrategy",
"PlanStatus",
"PriorityLevel",
"RequirementStatus",
"RiskLevel",
]

View File

@@ -14,7 +14,7 @@ from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.core.database import Base
from shared.database.base import Base
class Customer(Base):

View File

@@ -14,7 +14,7 @@ from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.core.database import Base
from shared.database.base import Base
class CustomerOrder(Base):

View File

@@ -14,7 +14,7 @@ from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.core.database import Base
from shared.database.base import Base
class ProcurementPlan(Base):

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for orders service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('ORDERS_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,396 +0,0 @@
"""Initial schema for orders service
Revision ID: 000001
Revises:
Create Date: 2025-09-30 18:00:00.000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create customers table
op.create_table('customers',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('customer_code', sa.String(50), nullable=False),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('business_name', sa.String(200), nullable=True),
sa.Column('customer_type', sa.String(50), nullable=True),
sa.Column('email', sa.String(255), nullable=True),
sa.Column('phone', sa.String(50), nullable=True),
sa.Column('address_line1', sa.String(255), nullable=True),
sa.Column('address_line2', sa.String(255), nullable=True),
sa.Column('city', sa.String(100), nullable=True),
sa.Column('state', sa.String(100), nullable=True),
sa.Column('postal_code', sa.String(20), nullable=True),
sa.Column('country', sa.String(100), nullable=True),
sa.Column('tax_id', sa.String(50), nullable=True),
sa.Column('business_license', sa.String(100), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('preferred_delivery_method', sa.String(50), nullable=True),
sa.Column('payment_terms', sa.String(50), nullable=True),
sa.Column('credit_limit', sa.Numeric(10, 2), nullable=True),
sa.Column('discount_percentage', sa.Numeric(5, 2), nullable=True),
sa.Column('customer_segment', sa.String(50), nullable=True),
sa.Column('priority_level', sa.String(20), nullable=True),
sa.Column('special_instructions', sa.Text(), nullable=True),
sa.Column('delivery_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('product_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('total_orders', sa.Integer(), nullable=True),
sa.Column('total_spent', sa.Numeric(12, 2), nullable=True),
sa.Column('average_order_value', sa.Numeric(10, 2), nullable=True),
sa.Column('last_order_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_customers_tenant_id'), 'customers', ['tenant_id'], unique=False)
op.create_index(op.f('ix_customers_customer_code'), 'customers', ['customer_code'], unique=False)
op.create_index(op.f('ix_customers_name'), 'customers', ['name'], unique=False)
op.create_index(op.f('ix_customers_email'), 'customers', ['email'], unique=False)
op.create_index(op.f('ix_customers_phone'), 'customers', ['phone'], unique=False)
# Create customer_contacts table
op.create_table('customer_contacts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('customer_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('title', sa.String(100), nullable=True),
sa.Column('department', sa.String(10), nullable=True),
sa.Column('email', sa.String(255), nullable=True),
sa.Column('phone', sa.String(50), nullable=True),
sa.Column('mobile', sa.String(50), nullable=True),
sa.Column('is_primary', sa.Boolean(), nullable=True),
sa.Column('contact_for_orders', sa.Boolean(), nullable=True),
sa.Column('contact_for_delivery', sa.Boolean(), nullable=True),
sa.Column('contact_for_billing', sa.Boolean(), nullable=True),
sa.Column('contact_for_support', sa.Boolean(), nullable=True),
sa.Column('preferred_contact_method', sa.String(50), nullable=True),
sa.Column('contact_time_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_customer_contacts_customer_id'), 'customer_contacts', ['customer_id'], unique=False)
op.create_index(op.f('ix_customer_contacts_name'), 'customer_contacts', ['name'], unique=False)
op.create_index(op.f('ix_customer_contacts_email'), 'customer_contacts', ['email'], unique=False)
# Create customer_orders table
op.create_table('customer_orders',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('order_number', sa.String(50), nullable=False),
sa.Column('customer_id', sa.UUID(), nullable=False),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('order_type', sa.String(50), nullable=True),
sa.Column('priority', sa.String(20), nullable=True),
sa.Column('order_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('requested_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('confirmed_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivery_method', sa.String(50), nullable=True),
sa.Column('delivery_address', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('delivery_instructions', sa.Text(), nullable=True),
sa.Column('delivery_window_start', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivery_window_end', sa.DateTime(timezone=True), nullable=True),
sa.Column('subtotal', sa.Numeric(10, 2), nullable=True),
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=True),
sa.Column('discount_percentage', sa.Numeric(5, 2), nullable=True),
sa.Column('tax_amount', sa.Numeric(10, 2), nullable=True),
sa.Column('delivery_fee', sa.Numeric(10, 2), nullable=True),
sa.Column('total_amount', sa.Numeric(10, 2), nullable=True),
sa.Column('payment_status', sa.String(50), nullable=True),
sa.Column('payment_method', sa.String(50), nullable=True),
sa.Column('payment_terms', sa.String(50), nullable=True),
sa.Column('payment_due_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('special_instructions', sa.Text(), nullable=True),
sa.Column('custom_requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('allergen_warnings', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('business_model', sa.String(50), nullable=True),
sa.Column('estimated_business_model', sa.String(50), nullable=True),
sa.Column('order_source', sa.String(50), nullable=True),
sa.Column('sales_channel', sa.String(50), nullable=True),
sa.Column('order_origin', sa.String(100), nullable=True),
sa.Column('production_batch_id', sa.UUID(), nullable=True),
sa.Column('fulfillment_location', sa.String(100), nullable=True),
sa.Column('estimated_preparation_time', sa.Integer(), nullable=True),
sa.Column('actual_preparation_time', sa.Integer(), nullable=True),
sa.Column('customer_notified_confirmed', sa.Boolean(), nullable=True),
sa.Column('customer_notified_ready', sa.Boolean(), nullable=True),
sa.Column('customer_notified_delivered', sa.Boolean(), nullable=True),
sa.Column('communication_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('quality_score', sa.Numeric(3, 1), nullable=True),
sa.Column('customer_rating', sa.Integer(), nullable=True),
sa.Column('customer_feedback', sa.Text(), nullable=True),
sa.Column('cancellation_reason', sa.String(200), nullable=True),
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancelled_by', sa.UUID(), nullable=True),
sa.Column('refund_amount', sa.Numeric(10, 2), nullable=True),
sa.Column('refund_processed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.Column('order_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('order_number')
)
op.create_index(op.f('ix_customer_orders_tenant_id'), 'customer_orders', ['tenant_id'], unique=False)
op.create_index(op.f('ix_customer_orders_customer_id'), 'customer_orders', ['customer_id'], unique=False)
op.create_index(op.f('ix_customer_orders_order_number'), 'customer_orders', ['order_number'], unique=False)
op.create_index(op.f('ix_customer_orders_status'), 'customer_orders', ['status'], unique=False)
op.create_index(op.f('ix_customer_orders_order_date'), 'customer_orders', ['order_date'], unique=False)
# Create order_items table
op.create_table('order_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('order_id', sa.UUID(), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(200), nullable=False),
sa.Column('product_sku', sa.String(100), nullable=True),
sa.Column('product_category', sa.String(100), nullable=True),
sa.Column('quantity', sa.Numeric(10, 3), nullable=True),
sa.Column('unit_of_measure', sa.String(50), nullable=True),
sa.Column('weight', sa.Numeric(10, 3), nullable=True),
sa.Column('unit_price', sa.Numeric(10, 2), nullable=True),
sa.Column('line_discount', sa.Numeric(10, 2), nullable=True),
sa.Column('line_total', sa.Numeric(10, 2), nullable=True),
sa.Column('product_specifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('customization_details', sa.Text(), nullable=True),
sa.Column('special_instructions', sa.Text(), nullable=True),
sa.Column('recipe_id', sa.UUID(), nullable=True),
sa.Column('production_requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('estimated_production_time', sa.Integer(), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('production_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('production_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('quality_checked', sa.Boolean(), nullable=True),
sa.Column('quality_score', sa.Numeric(3, 1), nullable=True),
sa.Column('ingredient_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('labor_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('overhead_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('margin', sa.Numeric(10, 2), nullable=True),
sa.Column('reserved_inventory', sa.Boolean(), nullable=True),
sa.Column('inventory_allocated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('customer_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['customer_orders.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_order_items_order_id'), 'order_items', ['order_id'], unique=False)
op.create_index(op.f('ix_order_items_product_id'), 'order_items', ['product_id'], unique=False)
op.create_index(op.f('ix_order_items_product_name'), 'order_items', ['product_name'], unique=False)
# Create order_status_history table
op.create_table('order_status_history',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('order_id', sa.UUID(), nullable=False),
sa.Column('from_status', sa.String(50), nullable=True),
sa.Column('to_status', sa.String(50), nullable=False),
sa.Column('change_reason', sa.String(200), nullable=True),
sa.Column('event_type', sa.String(50), nullable=True),
sa.Column('event_description', sa.Text(), nullable=True),
sa.Column('event_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('changed_by', sa.UUID(), nullable=True),
sa.Column('change_source', sa.String(50), nullable=True),
sa.Column('changed_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('customer_notified', sa.Boolean(), nullable=True),
sa.Column('notification_method', sa.String(50), nullable=True),
sa.Column('notification_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['customer_orders.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_order_status_history_order_id'), 'order_status_history', ['order_id'], unique=False)
op.create_index(op.f('ix_order_status_history_to_status'), 'order_status_history', ['to_status'], unique=False)
op.create_index(op.f('ix_order_status_history_event_type'), 'order_status_history', ['event_type'], unique=False)
op.create_index(op.f('ix_order_status_history_changed_at'), 'order_status_history', ['changed_at'], unique=False)
# Create procurement_plans table
op.create_table('procurement_plans',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('plan_number', sa.String(50), nullable=False),
sa.Column('plan_date', sa.Date(), nullable=True),
sa.Column('plan_period_start', sa.Date(), nullable=True),
sa.Column('plan_period_end', sa.Date(), nullable=True),
sa.Column('planning_horizon_days', sa.Integer(), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('plan_type', sa.String(50), nullable=True),
sa.Column('priority', sa.String(20), nullable=True),
sa.Column('business_model', sa.String(50), nullable=True),
sa.Column('procurement_strategy', sa.String(50), nullable=True),
sa.Column('total_requirements', sa.Integer(), nullable=True),
sa.Column('total_estimated_cost', sa.Numeric(12, 2), nullable=True),
sa.Column('total_approved_cost', sa.Numeric(12, 2), nullable=True),
sa.Column('cost_variance', sa.Numeric(12, 2), nullable=True),
sa.Column('total_demand_orders', sa.Integer(), nullable=True),
sa.Column('total_demand_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('total_production_requirements', sa.Numeric(12, 3), nullable=True),
sa.Column('safety_stock_buffer', sa.Numeric(5, 2), nullable=True),
sa.Column('primary_suppliers_count', sa.Integer(), nullable=True),
sa.Column('backup_suppliers_count', sa.Integer(), nullable=True),
sa.Column('supplier_diversification_score', sa.Numeric(3, 1), nullable=True),
sa.Column('supply_risk_level', sa.String(20), nullable=True),
sa.Column('demand_forecast_confidence', sa.Numeric(3, 1), nullable=True),
sa.Column('seasonality_adjustment', sa.Numeric(5, 2), nullable=True),
sa.Column('fulfillment_rate', sa.Numeric(5, 2), nullable=True),
sa.Column('on_time_delivery_rate', sa.Numeric(5, 2), nullable=True),
sa.Column('cost_accuracy', sa.Numeric(5, 2), nullable=True),
sa.Column('quality_score', sa.Numeric(3, 1), nullable=True),
sa.Column('source_orders', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('production_schedules', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('inventory_snapshots', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('stakeholder_notifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('approval_workflow', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('special_requirements', sa.Text(), nullable=True),
sa.Column('seasonal_adjustments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('emergency_provisions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('erp_reference', sa.String(100), nullable=True),
sa.Column('supplier_portal_reference', sa.String(100), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('execution_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('execution_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.Column('plan_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('plan_number')
)
op.create_index(op.f('ix_procurement_plans_tenant_id'), 'procurement_plans', ['tenant_id'], unique=False)
op.create_index(op.f('ix_procurement_plans_plan_number'), 'procurement_plans', ['plan_number'], unique=False)
op.create_index(op.f('ix_procurement_plans_status'), 'procurement_plans', ['status'], unique=False)
op.create_index(op.f('ix_procurement_plans_plan_date'), 'procurement_plans', ['plan_date'], unique=False)
# Create procurement_requirements table
op.create_table('procurement_requirements',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('plan_id', sa.UUID(), nullable=False),
sa.Column('requirement_number', sa.String(50), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(200), nullable=False),
sa.Column('product_sku', sa.String(100), nullable=True),
sa.Column('product_category', sa.String(100), nullable=True),
sa.Column('product_type', sa.String(50), nullable=True),
sa.Column('required_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('unit_of_measure', sa.String(50), nullable=True),
sa.Column('safety_stock_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('total_quantity_needed', sa.Numeric(12, 3), nullable=True),
sa.Column('current_stock_level', sa.Numeric(12, 3), nullable=True),
sa.Column('reserved_stock', sa.Numeric(12, 3), nullable=True),
sa.Column('available_stock', sa.Numeric(12, 3), nullable=True),
sa.Column('net_requirement', sa.Numeric(12, 3), nullable=True),
sa.Column('order_demand', sa.Numeric(12, 3), nullable=True),
sa.Column('production_demand', sa.Numeric(12, 3), nullable=True),
sa.Column('forecast_demand', sa.Numeric(12, 3), nullable=True),
sa.Column('buffer_demand', sa.Numeric(12, 3), nullable=True),
sa.Column('preferred_supplier_id', sa.UUID(), nullable=True),
sa.Column('backup_supplier_id', sa.UUID(), nullable=True),
sa.Column('supplier_name', sa.String(200), nullable=True),
sa.Column('supplier_lead_time_days', sa.Integer(), nullable=True),
sa.Column('minimum_order_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('estimated_unit_cost', sa.Numeric(10, 4), nullable=True),
sa.Column('estimated_total_cost', sa.Numeric(12, 2), nullable=True),
sa.Column('last_purchase_cost', sa.Numeric(10, 4), nullable=True),
sa.Column('cost_variance', sa.Numeric(10, 2), nullable=True),
sa.Column('required_by_date', sa.Date(), nullable=True),
sa.Column('lead_time_buffer_days', sa.Integer(), nullable=True),
sa.Column('suggested_order_date', sa.Date(), nullable=True),
sa.Column('latest_order_date', sa.Date(), nullable=True),
sa.Column('quality_specifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('special_requirements', sa.Text(), nullable=True),
sa.Column('storage_requirements', sa.String(200), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('priority', sa.String(20), nullable=True),
sa.Column('risk_level', sa.String(20), nullable=True),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('purchase_order_number', sa.String(50), nullable=True),
sa.Column('ordered_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('ordered_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expected_delivery_date', sa.Date(), nullable=True),
sa.Column('actual_delivery_date', sa.Date(), nullable=True),
sa.Column('received_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('delivery_status', sa.String(50), nullable=True),
sa.Column('fulfillment_rate', sa.Numeric(5, 2), nullable=True),
sa.Column('on_time_delivery', sa.Boolean(), nullable=True),
sa.Column('quality_rating', sa.Numeric(3, 1), nullable=True),
sa.Column('source_orders', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('source_production_batches', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('demand_analysis', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('approved_quantity', sa.Numeric(12, 3), nullable=True),
sa.Column('approved_cost', sa.Numeric(12, 2), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('procurement_notes', sa.Text(), nullable=True),
sa.Column('supplier_communication', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('requirement_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['plan_id'], ['procurement_plans.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_procurement_requirements_plan_id'), 'procurement_requirements', ['plan_id'], unique=False)
op.create_index(op.f('ix_procurement_requirements_requirement_number'), 'procurement_requirements', ['requirement_number'], unique=False)
op.create_index(op.f('ix_procurement_requirements_product_id'), 'procurement_requirements', ['product_id'], unique=False)
op.create_index(op.f('ix_procurement_requirements_status'), 'procurement_requirements', ['status'], unique=False)
op.create_index(op.f('ix_procurement_requirements_required_by_date'), 'procurement_requirements', ['required_by_date'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_procurement_requirements_required_by_date'), table_name='procurement_requirements')
op.drop_index(op.f('ix_procurement_requirements_status'), table_name='procurement_requirements')
op.drop_index(op.f('ix_procurement_requirements_product_id'), table_name='procurement_requirements')
op.drop_index(op.f('ix_procurement_requirements_requirement_number'), table_name='procurement_requirements')
op.drop_index(op.f('ix_procurement_requirements_plan_id'), table_name='procurement_requirements')
op.drop_table('procurement_requirements')
op.drop_index(op.f('ix_procurement_plans_plan_date'), table_name='procurement_plans')
op.drop_index(op.f('ix_procurement_plans_status'), table_name='procurement_plans')
op.drop_index(op.f('ix_procurement_plans_plan_number'), table_name='procurement_plans')
op.drop_index(op.f('ix_procurement_plans_tenant_id'), table_name='procurement_plans')
op.drop_table('procurement_plans')
op.drop_index(op.f('ix_order_status_history_changed_at'), table_name='order_status_history')
op.drop_index(op.f('ix_order_status_history_event_type'), table_name='order_status_history')
op.drop_index(op.f('ix_order_status_history_to_status'), table_name='order_status_history')
op.drop_index(op.f('ix_order_status_history_order_id'), table_name='order_status_history')
op.drop_table('order_status_history')
op.drop_index(op.f('ix_order_items_product_name'), table_name='order_items')
op.drop_index(op.f('ix_order_items_product_id'), table_name='order_items')
op.drop_index(op.f('ix_order_items_order_id'), table_name='order_items')
op.drop_table('order_items')
op.drop_index(op.f('ix_customer_orders_order_date'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_status'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_order_number'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_customer_id'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_tenant_id'), table_name='customer_orders')
op.drop_table('customer_orders')
op.drop_index(op.f('ix_customer_contacts_email'), table_name='customer_contacts')
op.drop_index(op.f('ix_customer_contacts_name'), table_name='customer_contacts')
op.drop_index(op.f('ix_customer_contacts_customer_id'), table_name='customer_contacts')
op.drop_table('customer_contacts')
op.drop_index(op.f('ix_customers_phone'), table_name='customers')
op.drop_index(op.f('ix_customers_email'), table_name='customers')
op.drop_index(op.f('ix_customers_name'), table_name='customers')
op.drop_index(op.f('ix_customers_customer_code'), table_name='customers')
op.drop_index(op.f('ix_customers_tenant_id'), table_name='customers')
op.drop_table('customers')

View File

@@ -0,0 +1,355 @@
"""initial_schema_20251001_1118
Revision ID: 07e130577d3f
Revises:
Create Date: 2025-10-01 11:18:52.812809+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '07e130577d3f'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('customers',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('customer_code', sa.String(length=50), nullable=False),
sa.Column('name', sa.String(length=200), nullable=False),
sa.Column('business_name', sa.String(length=200), nullable=True),
sa.Column('customer_type', sa.String(length=50), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=50), nullable=True),
sa.Column('address_line1', sa.String(length=255), nullable=True),
sa.Column('address_line2', sa.String(length=255), nullable=True),
sa.Column('city', sa.String(length=100), nullable=True),
sa.Column('state', sa.String(length=100), nullable=True),
sa.Column('postal_code', sa.String(length=20), nullable=True),
sa.Column('country', sa.String(length=100), nullable=False),
sa.Column('tax_id', sa.String(length=50), nullable=True),
sa.Column('business_license', sa.String(length=100), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('preferred_delivery_method', sa.String(length=50), nullable=False),
sa.Column('payment_terms', sa.String(length=50), nullable=False),
sa.Column('credit_limit', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('discount_percentage', sa.Numeric(precision=5, scale=2), nullable=False),
sa.Column('customer_segment', sa.String(length=50), nullable=False),
sa.Column('priority_level', sa.String(length=20), nullable=False),
sa.Column('special_instructions', sa.Text(), nullable=True),
sa.Column('delivery_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('product_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('total_orders', sa.Integer(), nullable=False),
sa.Column('total_spent', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('average_order_value', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('last_order_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_customers_customer_code'), 'customers', ['customer_code'], unique=False)
op.create_index(op.f('ix_customers_tenant_id'), 'customers', ['tenant_id'], unique=False)
op.create_table('procurement_plans',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('plan_number', sa.String(length=50), nullable=False),
sa.Column('plan_date', sa.Date(), nullable=False),
sa.Column('plan_period_start', sa.Date(), nullable=False),
sa.Column('plan_period_end', sa.Date(), nullable=False),
sa.Column('planning_horizon_days', sa.Integer(), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('plan_type', sa.String(length=50), nullable=False),
sa.Column('priority', sa.String(length=20), nullable=False),
sa.Column('business_model', sa.String(length=50), nullable=True),
sa.Column('procurement_strategy', sa.String(length=50), nullable=False),
sa.Column('total_requirements', sa.Integer(), nullable=False),
sa.Column('total_estimated_cost', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('total_approved_cost', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('cost_variance', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('total_demand_orders', sa.Integer(), nullable=False),
sa.Column('total_demand_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('total_production_requirements', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('safety_stock_buffer', sa.Numeric(precision=5, scale=2), nullable=False),
sa.Column('primary_suppliers_count', sa.Integer(), nullable=False),
sa.Column('backup_suppliers_count', sa.Integer(), nullable=False),
sa.Column('supplier_diversification_score', sa.Numeric(precision=3, scale=1), nullable=True),
sa.Column('supply_risk_level', sa.String(length=20), nullable=False),
sa.Column('demand_forecast_confidence', sa.Numeric(precision=3, scale=1), nullable=True),
sa.Column('seasonality_adjustment', sa.Numeric(precision=5, scale=2), nullable=False),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('execution_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('execution_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('fulfillment_rate', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('on_time_delivery_rate', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('cost_accuracy', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('quality_score', sa.Numeric(precision=3, scale=1), nullable=True),
sa.Column('source_orders', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('production_schedules', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('inventory_snapshots', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('stakeholder_notifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('approval_workflow', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('special_requirements', sa.Text(), nullable=True),
sa.Column('seasonal_adjustments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('emergency_provisions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('erp_reference', sa.String(length=100), nullable=True),
sa.Column('supplier_portal_reference', sa.String(length=100), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.Column('plan_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_procurement_plans_plan_date'), 'procurement_plans', ['plan_date'], unique=False)
op.create_index(op.f('ix_procurement_plans_plan_number'), 'procurement_plans', ['plan_number'], unique=True)
op.create_index(op.f('ix_procurement_plans_status'), 'procurement_plans', ['status'], unique=False)
op.create_index(op.f('ix_procurement_plans_tenant_id'), 'procurement_plans', ['tenant_id'], unique=False)
op.create_table('customer_contacts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('customer_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=200), nullable=False),
sa.Column('title', sa.String(length=100), nullable=True),
sa.Column('department', sa.String(length=100), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=50), nullable=True),
sa.Column('mobile', sa.String(length=50), nullable=True),
sa.Column('is_primary', sa.Boolean(), nullable=False),
sa.Column('contact_for_orders', sa.Boolean(), nullable=False),
sa.Column('contact_for_delivery', sa.Boolean(), nullable=False),
sa.Column('contact_for_billing', sa.Boolean(), nullable=False),
sa.Column('contact_for_support', sa.Boolean(), nullable=False),
sa.Column('preferred_contact_method', sa.String(length=50), nullable=False),
sa.Column('contact_time_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('customer_orders',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('order_number', sa.String(length=50), nullable=False),
sa.Column('customer_id', sa.UUID(), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('order_type', sa.String(length=50), nullable=False),
sa.Column('priority', sa.String(length=20), nullable=False),
sa.Column('order_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('requested_delivery_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('confirmed_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivery_method', sa.String(length=50), nullable=False),
sa.Column('delivery_address', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('delivery_instructions', sa.Text(), nullable=True),
sa.Column('delivery_window_start', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivery_window_end', sa.DateTime(timezone=True), nullable=True),
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_percentage', sa.Numeric(precision=5, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('delivery_fee', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('payment_status', sa.String(length=50), nullable=False),
sa.Column('payment_method', sa.String(length=50), nullable=True),
sa.Column('payment_terms', sa.String(length=50), nullable=False),
sa.Column('payment_due_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('special_instructions', sa.Text(), nullable=True),
sa.Column('custom_requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('allergen_warnings', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('business_model', sa.String(length=50), nullable=True),
sa.Column('estimated_business_model', sa.String(length=50), nullable=True),
sa.Column('order_source', sa.String(length=50), nullable=False),
sa.Column('sales_channel', sa.String(length=50), nullable=False),
sa.Column('order_origin', sa.String(length=100), nullable=True),
sa.Column('production_batch_id', sa.UUID(), nullable=True),
sa.Column('fulfillment_location', sa.String(length=100), nullable=True),
sa.Column('estimated_preparation_time', sa.Integer(), nullable=True),
sa.Column('actual_preparation_time', sa.Integer(), nullable=True),
sa.Column('customer_notified_confirmed', sa.Boolean(), nullable=False),
sa.Column('customer_notified_ready', sa.Boolean(), nullable=False),
sa.Column('customer_notified_delivered', sa.Boolean(), nullable=False),
sa.Column('communication_preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('quality_score', sa.Numeric(precision=3, scale=1), nullable=True),
sa.Column('customer_rating', sa.Integer(), nullable=True),
sa.Column('customer_feedback', sa.Text(), nullable=True),
sa.Column('cancellation_reason', sa.String(length=200), nullable=True),
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancelled_by', sa.UUID(), nullable=True),
sa.Column('refund_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('refund_processed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.Column('order_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_customer_orders_customer_id'), 'customer_orders', ['customer_id'], unique=False)
op.create_index(op.f('ix_customer_orders_order_number'), 'customer_orders', ['order_number'], unique=True)
op.create_index(op.f('ix_customer_orders_status'), 'customer_orders', ['status'], unique=False)
op.create_index(op.f('ix_customer_orders_tenant_id'), 'customer_orders', ['tenant_id'], unique=False)
op.create_table('procurement_requirements',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('plan_id', sa.UUID(), nullable=False),
sa.Column('requirement_number', sa.String(length=50), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(length=200), nullable=False),
sa.Column('product_sku', sa.String(length=100), nullable=True),
sa.Column('product_category', sa.String(length=100), nullable=True),
sa.Column('product_type', sa.String(length=50), nullable=False),
sa.Column('required_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('unit_of_measure', sa.String(length=50), nullable=False),
sa.Column('safety_stock_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('total_quantity_needed', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('current_stock_level', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('reserved_stock', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('available_stock', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('net_requirement', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('order_demand', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('production_demand', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('forecast_demand', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('buffer_demand', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('preferred_supplier_id', sa.UUID(), nullable=True),
sa.Column('backup_supplier_id', sa.UUID(), nullable=True),
sa.Column('supplier_name', sa.String(length=200), nullable=True),
sa.Column('supplier_lead_time_days', sa.Integer(), nullable=True),
sa.Column('minimum_order_quantity', sa.Numeric(precision=12, scale=3), nullable=True),
sa.Column('estimated_unit_cost', sa.Numeric(precision=10, scale=4), nullable=True),
sa.Column('estimated_total_cost', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('last_purchase_cost', sa.Numeric(precision=10, scale=4), nullable=True),
sa.Column('cost_variance', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('required_by_date', sa.Date(), nullable=False),
sa.Column('lead_time_buffer_days', sa.Integer(), nullable=False),
sa.Column('suggested_order_date', sa.Date(), nullable=False),
sa.Column('latest_order_date', sa.Date(), nullable=False),
sa.Column('quality_specifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('special_requirements', sa.Text(), nullable=True),
sa.Column('storage_requirements', sa.String(length=200), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('priority', sa.String(length=20), nullable=False),
sa.Column('risk_level', sa.String(length=20), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('purchase_order_number', sa.String(length=50), nullable=True),
sa.Column('ordered_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('ordered_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expected_delivery_date', sa.Date(), nullable=True),
sa.Column('actual_delivery_date', sa.Date(), nullable=True),
sa.Column('received_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
sa.Column('delivery_status', sa.String(length=50), nullable=False),
sa.Column('fulfillment_rate', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('on_time_delivery', sa.Boolean(), nullable=True),
sa.Column('quality_rating', sa.Numeric(precision=3, scale=1), nullable=True),
sa.Column('source_orders', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('source_production_batches', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('demand_analysis', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('approved_quantity', sa.Numeric(precision=12, scale=3), nullable=True),
sa.Column('approved_cost', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('procurement_notes', sa.Text(), nullable=True),
sa.Column('supplier_communication', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('requirement_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['plan_id'], ['procurement_plans.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_procurement_requirements_product_id'), 'procurement_requirements', ['product_id'], unique=False)
op.create_index(op.f('ix_procurement_requirements_requirement_number'), 'procurement_requirements', ['requirement_number'], unique=False)
op.create_table('order_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('order_id', sa.UUID(), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(length=200), nullable=False),
sa.Column('product_sku', sa.String(length=100), nullable=True),
sa.Column('product_category', sa.String(length=100), nullable=True),
sa.Column('quantity', sa.Numeric(precision=10, scale=3), nullable=False),
sa.Column('unit_of_measure', sa.String(length=50), nullable=False),
sa.Column('weight', sa.Numeric(precision=10, scale=3), nullable=True),
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('line_discount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('line_total', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('product_specifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('customization_details', sa.Text(), nullable=True),
sa.Column('special_instructions', sa.Text(), nullable=True),
sa.Column('recipe_id', sa.UUID(), nullable=True),
sa.Column('production_requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('estimated_production_time', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('production_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('production_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('quality_checked', sa.Boolean(), nullable=False),
sa.Column('quality_score', sa.Numeric(precision=3, scale=1), nullable=True),
sa.Column('ingredient_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('labor_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('overhead_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('margin', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('reserved_inventory', sa.Boolean(), nullable=False),
sa.Column('inventory_allocated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('customer_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['customer_orders.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_order_items_product_id'), 'order_items', ['product_id'], unique=False)
op.create_table('order_status_history',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('order_id', sa.UUID(), nullable=False),
sa.Column('from_status', sa.String(length=50), nullable=True),
sa.Column('to_status', sa.String(length=50), nullable=False),
sa.Column('change_reason', sa.String(length=200), nullable=True),
sa.Column('event_type', sa.String(length=50), nullable=False),
sa.Column('event_description', sa.Text(), nullable=True),
sa.Column('event_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('changed_by', sa.UUID(), nullable=True),
sa.Column('change_source', sa.String(length=50), nullable=False),
sa.Column('changed_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('customer_notified', sa.Boolean(), nullable=False),
sa.Column('notification_method', sa.String(length=50), nullable=True),
sa.Column('notification_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['customer_orders.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('order_status_history')
op.drop_index(op.f('ix_order_items_product_id'), table_name='order_items')
op.drop_table('order_items')
op.drop_index(op.f('ix_procurement_requirements_requirement_number'), table_name='procurement_requirements')
op.drop_index(op.f('ix_procurement_requirements_product_id'), table_name='procurement_requirements')
op.drop_table('procurement_requirements')
op.drop_index(op.f('ix_customer_orders_tenant_id'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_status'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_order_number'), table_name='customer_orders')
op.drop_index(op.f('ix_customer_orders_customer_id'), table_name='customer_orders')
op.drop_table('customer_orders')
op.drop_table('customer_contacts')
op.drop_index(op.f('ix_procurement_plans_tenant_id'), table_name='procurement_plans')
op.drop_index(op.f('ix_procurement_plans_status'), table_name='procurement_plans')
op.drop_index(op.f('ix_procurement_plans_plan_number'), table_name='procurement_plans')
op.drop_index(op.f('ix_procurement_plans_plan_date'), table_name='procurement_plans')
op.drop_table('procurement_plans')
op.drop_index(op.f('ix_customers_tenant_id'), table_name='customers')
op.drop_index(op.f('ix_customers_customer_code'), table_name='customers')
op.drop_table('customers')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for pos service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('POS_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,434 +0,0 @@
"""Initial schema for POS service
Revision ID: 000001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '000001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create pos_configurations table
op.create_table('pos_configurations',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('location_id', sa.String(100), nullable=False),
sa.Column('city', sa.String(50), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('traffic_volume', sa.Integer(), nullable=True),
sa.Column('congestion_level', sa.String(20), nullable=True),
sa.Column('average_speed', sa.Float(), nullable=True),
sa.Column('occupation_percentage', sa.Float(), nullable=True),
sa.Column('load_percentage', sa.Float(), nullable=True),
sa.Column('pedestrian_count', sa.Integer(), nullable=True),
sa.Column('measurement_point_id', sa.String(100), nullable=True),
sa.Column('measurement_point_name', sa.String(500), nullable=True),
sa.Column('measurement_point_type', sa.String(50), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('district', sa.String(100), nullable=True),
sa.Column('zone', sa.String(100), nullable=True),
sa.Column('source', sa.String(50), nullable=False),
sa.Column('data_quality_score', sa.Float(), nullable=True),
sa.Column('is_synthetic', sa.Boolean(), nullable=True),
sa.Column('has_pedestrian_inference', sa.Boolean(), nullable=True),
sa.Column('city_specific_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('raw_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pos_configurations_tenant_id'), 'pos_configurations', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_configurations_location_id'), 'pos_configurations', ['location_id'], unique=False)
op.create_index(op.f('ix_pos_configurations_city'), 'pos_configurations', ['city'], unique=False)
op.create_index(op.f('ix_pos_configurations_date'), 'pos_configurations', ['date'], unique=False)
op.create_index('idx_pos_location_date', 'pos_configurations', ['location_id', 'date'], unique=False)
op.create_index('idx_pos_city_date', 'pos_configurations', ['city', 'date'], unique=False)
op.create_index('idx_pos_tenant_date', 'pos_configurations', ['tenant_id', 'date'], unique=False)
op.create_index('idx_pos_city_location', 'pos_configurations', ['city', 'location_id'], unique=False)
op.create_index('idx_pos_measurement_point', 'pos_configurations', ['city', 'measurement_point_id'], unique=False)
op.create_index('idx_pos_district_date', 'pos_configurations', ['city', 'district', 'date'], unique=False)
op.create_index('idx_pos_training', 'pos_configurations', ['tenant_id', 'city', 'date', 'is_synthetic'], unique=False)
op.create_index('idx_pos_quality', 'pos_configurations', ['city', 'data_quality_score', 'date'], unique=False)
# Create pos_transactions table
op.create_table('pos_transactions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('pos_config_id', sa.UUID(), nullable=False),
sa.Column('external_transaction_id', sa.String(255), nullable=False),
sa.Column('external_order_id', sa.String(255), nullable=True),
sa.Column('pos_system', sa.String(50), nullable=False),
sa.Column('transaction_type', sa.String(50), nullable=False),
sa.Column('status', sa.String(50), nullable=False),
sa.Column('subtotal', sa.Numeric(10, 2), nullable=False),
sa.Column('tax_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('tip_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('total_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('currency', sa.String(3), nullable=False),
sa.Column('payment_method', sa.String(50), nullable=True),
sa.Column('payment_status', sa.String(50), nullable=True),
sa.Column('transaction_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('pos_created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('pos_updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('location_id', sa.String(100), nullable=True),
sa.Column('location_name', sa.String(255), nullable=True),
sa.Column('staff_id', sa.String(100), nullable=True),
sa.Column('staff_name', sa.String(255), nullable=True),
sa.Column('customer_id', sa.String(100), nullable=True),
sa.Column('customer_email', sa.String(255), nullable=True),
sa.Column('customer_phone', sa.String(50), nullable=True),
sa.Column('order_type', sa.String(50), nullable=True),
sa.Column('table_number', sa.String(20), nullable=True),
sa.Column('receipt_number', sa.String(100), nullable=True),
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
sa.Column('sales_record_id', sa.UUID(), nullable=True),
sa.Column('sync_attempted_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('sync_retry_count', sa.Integer(), nullable=False),
sa.Column('raw_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('is_processed', sa.Boolean(), nullable=False),
sa.Column('processing_error', sa.Text(), nullable=True),
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
sa.Column('duplicate_of', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pos_transactions_tenant_id'), 'pos_transactions', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_pos_config_id'), 'pos_transactions', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_external_transaction_id'), 'pos_transactions', ['external_transaction_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_external_order_id'), 'pos_transactions', ['external_order_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_pos_system'), 'pos_transactions', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_transactions_transaction_type'), 'pos_transactions', ['transaction_type'], unique=False)
op.create_index(op.f('ix_pos_transactions_status'), 'pos_transactions', ['status'], unique=False)
op.create_index(op.f('ix_pos_transactions_transaction_date'), 'pos_transactions', ['transaction_date'], unique=False)
op.create_index(op.f('ix_pos_transactions_location_id'), 'pos_transactions', ['location_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_customer_id'), 'pos_transactions', ['customer_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_sales_record_id'), 'pos_transactions', ['sales_record_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_is_synced_to_sales'), 'pos_transactions', ['is_synced_to_sales'], unique=False)
op.create_index(op.f('ix_pos_transactions_is_processed'), 'pos_transactions', ['is_processed'], unique=False)
op.create_index(op.f('ix_pos_transactions_is_duplicate'), 'pos_transactions', ['is_duplicate'], unique=False)
op.create_index(op.f('ix_pos_transactions_duplicate_of'), 'pos_transactions', ['duplicate_of'], unique=False)
op.create_index('idx_pos_transaction_tenant_date', 'pos_transactions', ['tenant_id', 'transaction_date'], unique=False)
op.create_index('idx_pos_transaction_external_id', 'pos_transactions', ['pos_system', 'external_transaction_id'], unique=False)
op.create_index('idx_pos_transaction_sync_status', 'pos_transactions', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_transaction_processed', 'pos_transactions', ['is_processed'], unique=False)
op.create_index('idx_pos_transaction_location', 'pos_transactions', ['location_id'], unique=False)
op.create_index('idx_pos_transaction_customer', 'pos_transactions', ['customer_id'], unique=False)
# Create pos_transaction_items table
op.create_table('pos_transaction_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('transaction_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('external_item_id', sa.String(255), nullable=True),
sa.Column('sku', sa.String(100), nullable=True),
sa.Column('product_name', sa.String(255), nullable=False),
sa.Column('product_category', sa.String(100), nullable=True),
sa.Column('product_subcategory', sa.String(100), nullable=True),
sa.Column('quantity', sa.Numeric(10, 3), nullable=False),
sa.Column('unit_price', sa.Numeric(10, 2), nullable=False),
sa.Column('total_price', sa.Numeric(10, 2), nullable=False),
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('tax_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('modifiers', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('inventory_product_id', sa.UUID(), nullable=True),
sa.Column('is_mapped_to_inventory', sa.Boolean(), nullable=False),
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('raw_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['transaction_id'], ['pos_transactions.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pos_transaction_items_transaction_id'), 'pos_transaction_items', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_tenant_id'), 'pos_transaction_items', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_external_item_id'), 'pos_transaction_items', ['external_item_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_sku'), 'pos_transaction_items', ['sku'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_product_name'), 'pos_transaction_items', ['product_name'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_product_category'), 'pos_transaction_items', ['product_category'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_inventory_product_id'), 'pos_transaction_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_is_mapped_to_inventory'), 'pos_transaction_items', ['is_mapped_to_inventory'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_is_synced_to_sales'), 'pos_transaction_items', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_item_transaction', 'pos_transaction_items', ['transaction_id'], unique=False)
op.create_index('idx_pos_item_product', 'pos_transaction_items', ['product_name'], unique=False)
op.create_index('idx_pos_item_category', 'pos_transaction_items', ['product_category'], unique=False)
op.create_index('idx_pos_item_sku', 'pos_transaction_items', ['sku'], unique=False)
op.create_index('idx_pos_item_inventory', 'pos_transaction_items', ['inventory_product_id'], unique=False)
op.create_index('idx_pos_item_sync', 'pos_transaction_items', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_item_mapped', 'pos_transaction_items', ['is_mapped_to_inventory'], unique=False)
# Create pos_sync_logs table
op.create_table('pos_sync_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('pos_config_id', sa.UUID(), nullable=False),
sa.Column('sync_type', sa.String(50), nullable=False),
sa.Column('sync_direction', sa.String(20), nullable=False),
sa.Column('data_type', sa.String(50), nullable=False),
sa.Column('pos_system', sa.String(50), nullable=False),
sa.Column('status', sa.String(50), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('duration_seconds', sa.Numeric(10, 3), nullable=True),
sa.Column('sync_from_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_to_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('records_requested', sa.Integer(), nullable=False),
sa.Column('records_processed', sa.Integer(), nullable=False),
sa.Column('records_created', sa.Integer(), nullable=False),
sa.Column('records_updated', sa.Integer(), nullable=False),
sa.Column('records_skipped', sa.Integer(), nullable=False),
sa.Column('records_failed', sa.Integer(), nullable=False),
sa.Column('api_calls_made', sa.Integer(), nullable=False),
sa.Column('api_rate_limit_hits', sa.Integer(), nullable=False),
sa.Column('total_api_time_ms', sa.Integer(), nullable=False),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_code', sa.String(100), nullable=True),
sa.Column('error_details', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('retry_attempt', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('parent_sync_id', sa.UUID(), nullable=True),
sa.Column('sync_configuration', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('current_page', sa.Integer(), nullable=True),
sa.Column('total_pages', sa.Integer(), nullable=True),
sa.Column('current_batch', sa.Integer(), nullable=True),
sa.Column('total_batches', sa.Integer(), nullable=True),
sa.Column('progress_percentage', sa.Numeric(5, 2), nullable=True),
sa.Column('validation_errors', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('data_quality_score', sa.Numeric(5, 2), nullable=True),
sa.Column('memory_usage_mb', sa.Numeric(10, 2), nullable=True),
sa.Column('cpu_usage_percentage', sa.Numeric(5, 2), nullable=True),
sa.Column('network_bytes_received', sa.Integer(), nullable=True),
sa.Column('network_bytes_sent', sa.Integer(), nullable=True),
sa.Column('revenue_synced', sa.Numeric(12, 2), nullable=True),
sa.Column('transactions_synced', sa.Integer(), nullable=False),
sa.Column('triggered_by', sa.String(50), nullable=True),
sa.Column('triggered_by_user_id', sa.UUID(), nullable=True),
sa.Column('trigger_details', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('external_batch_id', sa.String(255), nullable=True),
sa.Column('webhook_log_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pos_sync_logs_tenant_id'), 'pos_sync_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_pos_config_id'), 'pos_sync_logs', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_sync_type'), 'pos_sync_logs', ['sync_type'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_data_type'), 'pos_sync_logs', ['data_type'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_pos_system'), 'pos_sync_logs', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_status'), 'pos_sync_logs', ['status'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_started_at'), 'pos_sync_logs', ['started_at'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_completed_at'), 'pos_sync_logs', ['completed_at'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_parent_sync_id'), 'pos_sync_logs', ['parent_sync_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_webhook_log_id'), 'pos_sync_logs', ['webhook_log_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_external_batch_id'), 'pos_sync_logs', ['external_batch_id'], unique=False)
op.create_index('idx_sync_log_tenant_started', 'pos_sync_logs', ['tenant_id', 'started_at'], unique=False)
op.create_index('idx_sync_log_pos_system_type', 'pos_sync_logs', ['pos_system', 'sync_type'], unique=False)
op.create_index('idx_sync_log_data_type', 'pos_sync_logs', ['data_type'], unique=False)
op.create_index('idx_sync_log_trigger', 'pos_sync_logs', ['triggered_by'], unique=False)
# Create pos_webhooks table
op.create_table('pos_webhooks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('pos_config_id', sa.UUID(), nullable=False),
sa.Column('event_type', sa.String(100), nullable=False),
sa.Column('target_url', sa.String(500), nullable=False),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pos_webhooks_tenant_id'), 'pos_webhooks', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_webhooks_pos_config_id'), 'pos_webhooks', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_webhooks_event_type'), 'pos_webhooks', ['event_type'], unique=False)
op.create_index(op.f('ix_pos_webhooks_status'), 'pos_webhooks', ['status'], unique=False)
# Create pos_webhook_logs table
op.create_table('pos_webhook_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('pos_system', sa.String(50), nullable=False),
sa.Column('webhook_type', sa.String(100), nullable=False),
sa.Column('method', sa.String(10), nullable=False),
sa.Column('url_path', sa.String(500), nullable=False),
sa.Column('query_params', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('headers', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('raw_payload', sa.Text(), nullable=False),
sa.Column('payload_size', sa.Integer(), nullable=False),
sa.Column('content_type', sa.String(100), nullable=True),
sa.Column('signature', sa.String(500), nullable=True),
sa.Column('is_signature_valid', sa.Boolean(), nullable=True),
sa.Column('source_ip', sa.String(45), nullable=True),
sa.Column('status', sa.String(50), nullable=False),
sa.Column('processing_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('processing_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('processing_duration_ms', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_code', sa.String(50), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('response_status_code', sa.Integer(), nullable=True),
sa.Column('response_body', sa.Text(), nullable=True),
sa.Column('response_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('event_id', sa.String(255), nullable=True),
sa.Column('event_timestamp', sa.DateTime(timezone=True), nullable=True),
sa.Column('sequence_number', sa.Integer(), nullable=True),
sa.Column('transaction_id', sa.String(255), nullable=True),
sa.Column('order_id', sa.String(255), nullable=True),
sa.Column('customer_id', sa.String(255), nullable=True),
sa.Column('created_transaction_id', sa.UUID(), nullable=True),
sa.Column('updated_transaction_id', sa.UUID(), nullable=True),
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
sa.Column('duplicate_of', sa.UUID(), nullable=True),
sa.Column('priority', sa.String(20), nullable=False),
sa.Column('user_agent', sa.String(500), nullable=True),
sa.Column('forwarded_for', sa.String(200), nullable=True),
sa.Column('request_id', sa.String(100), nullable=True),
sa.Column('received_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_pos_webhook_logs_tenant_id'), 'pos_webhook_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_pos_system'), 'pos_webhook_logs', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_webhook_type'), 'pos_webhook_logs', ['webhook_type'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_status'), 'pos_webhook_logs', ['status'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_event_id'), 'pos_webhook_logs', ['event_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_received_at'), 'pos_webhook_logs', ['received_at'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_transaction_id'), 'pos_webhook_logs', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_order_id'), 'pos_webhook_logs', ['order_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_is_duplicate'), 'pos_webhook_logs', ['is_duplicate'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_duplicate_of'), 'pos_webhook_logs', ['duplicate_of'], unique=False)
op.create_index('idx_webhook_pos_system_type', 'pos_webhook_logs', ['pos_system', 'webhook_type'], unique=False)
op.create_index('idx_webhook_event_id', 'pos_webhook_logs', ['event_id'], unique=False)
op.create_index('idx_webhook_tenant_received', 'pos_webhook_logs', ['tenant_id', 'received_at'], unique=False)
op.create_index('idx_webhook_transaction_id', 'pos_webhook_logs', ['transaction_id'], unique=False)
op.create_index('idx_webhook_order_id', 'pos_webhook_logs', ['order_id'], unique=False)
op.create_index('idx_webhook_duplicate', 'pos_webhook_logs', ['is_duplicate'], unique=False)
op.create_index('idx_webhook_priority', 'pos_webhook_logs', ['priority'], unique=False)
op.create_index('idx_webhook_retry', 'pos_webhook_logs', ['retry_count'], unique=False)
op.create_index('idx_webhook_signature_valid', 'pos_webhook_logs', ['is_signature_valid'], unique=False)
def downgrade() -> None:
# Drop pos_webhook_logs table
op.drop_index('idx_webhook_signature_valid', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_retry', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_priority', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_duplicate', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_order_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_transaction_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_tenant_received', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_event_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_pos_system_type', table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_duplicate_of'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_is_duplicate'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_order_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_transaction_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_received_at'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_event_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_status'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_webhook_type'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_pos_system'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_tenant_id'), table_name='pos_webhook_logs')
op.drop_table('pos_webhook_logs')
# Drop pos_webhooks table
op.drop_index(op.f('ix_pos_webhooks_status'), table_name='pos_webhooks')
op.drop_index(op.f('ix_pos_webhooks_event_type'), table_name='pos_webhooks')
op.drop_index(op.f('ix_pos_webhooks_pos_config_id'), table_name='pos_webhooks')
op.drop_index(op.f('ix_pos_webhooks_tenant_id'), table_name='pos_webhooks')
op.drop_table('pos_webhooks')
# Drop pos_sync_logs table
op.drop_index('idx_sync_log_trigger', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_data_type', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_pos_system_type', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_tenant_started', table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_external_batch_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_webhook_log_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_parent_sync_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_completed_at'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_started_at'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_status'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_pos_system'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_data_type'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_sync_type'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_pos_config_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_tenant_id'), table_name='pos_sync_logs')
op.drop_table('pos_sync_logs')
# Drop pos_transaction_items table
op.drop_index('idx_pos_item_mapped', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_sync', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_inventory', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_sku', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_category', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_product', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_transaction', table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_is_synced_to_sales'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_is_mapped_to_inventory'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_inventory_product_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_product_category'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_product_name'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_sku'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_external_item_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_tenant_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_transaction_id'), table_name='pos_transaction_items')
op.drop_table('pos_transaction_items')
# Drop pos_transactions table
op.drop_index('idx_pos_transaction_customer', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_location', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_processed', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_sync_status', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_external_id', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_tenant_date', table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_duplicate_of'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_is_duplicate'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_is_processed'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_is_synced_to_sales'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_sales_record_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_customer_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_location_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_transaction_date'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_status'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_transaction_type'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_pos_system'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_external_order_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_external_transaction_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_pos_config_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_tenant_id'), table_name='pos_transactions')
op.drop_table('pos_transactions')
# Drop pos_configurations table
op.drop_index('idx_pos_quality', table_name='pos_configurations')
op.drop_index('idx_pos_training', table_name='pos_configurations')
op.drop_index('idx_pos_district_date', table_name='pos_configurations')
op.drop_index('idx_pos_measurement_point', table_name='pos_configurations')
op.drop_index('idx_pos_city_location', table_name='pos_configurations')
op.drop_index('idx_pos_tenant_date', table_name='pos_configurations')
op.drop_index('idx_pos_city_date', table_name='pos_configurations')
op.drop_index('idx_pos_location_date', table_name='pos_configurations')
op.drop_index(op.f('ix_pos_configurations_date'), table_name='pos_configurations')
op.drop_index(op.f('ix_pos_configurations_city'), table_name='pos_configurations')
op.drop_index(op.f('ix_pos_configurations_location_id'), table_name='pos_configurations')
op.drop_index(op.f('ix_pos_configurations_tenant_id'), table_name='pos_configurations')
op.drop_table('pos_configurations')

View File

@@ -0,0 +1,392 @@
"""initial_schema_20251001_1118
Revision ID: 36bd79501798
Revises:
Create Date: 2025-10-01 11:18:18.854624+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '36bd79501798'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('pos_configurations',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('provider_name', sa.String(length=100), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_connected', sa.Boolean(), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('webhook_url', sa.String(length=500), nullable=True),
sa.Column('webhook_secret', sa.String(length=255), nullable=True),
sa.Column('environment', sa.String(length=20), nullable=False),
sa.Column('location_id', sa.String(length=100), nullable=True),
sa.Column('merchant_id', sa.String(length=100), nullable=True),
sa.Column('sync_enabled', sa.Boolean(), nullable=False),
sa.Column('sync_interval_minutes', sa.String(length=10), nullable=False),
sa.Column('auto_sync_products', sa.Boolean(), nullable=False),
sa.Column('auto_sync_transactions', sa.Boolean(), nullable=False),
sa.Column('last_sync_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_successful_sync_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_sync_status', sa.String(length=50), nullable=True),
sa.Column('last_sync_message', sa.Text(), nullable=True),
sa.Column('provider_settings', sa.JSON(), nullable=True),
sa.Column('last_health_check_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('health_status', sa.String(length=50), nullable=False),
sa.Column('health_message', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_pos_config_active', 'pos_configurations', ['is_active'], unique=False)
op.create_index('idx_pos_config_connected', 'pos_configurations', ['is_connected'], unique=False)
op.create_index('idx_pos_config_created_at', 'pos_configurations', ['created_at'], unique=False)
op.create_index('idx_pos_config_health_status', 'pos_configurations', ['health_status'], unique=False)
op.create_index('idx_pos_config_sync_enabled', 'pos_configurations', ['sync_enabled'], unique=False)
op.create_index('idx_pos_config_tenant_pos_system', 'pos_configurations', ['tenant_id', 'pos_system'], unique=False)
op.create_index(op.f('ix_pos_configurations_id'), 'pos_configurations', ['id'], unique=False)
op.create_index(op.f('ix_pos_configurations_tenant_id'), 'pos_configurations', ['tenant_id'], unique=False)
op.create_table('pos_sync_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('pos_config_id', sa.UUID(), nullable=False),
sa.Column('sync_type', sa.String(length=50), nullable=False),
sa.Column('sync_direction', sa.String(length=20), nullable=False),
sa.Column('data_type', sa.String(length=50), nullable=False),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('duration_seconds', sa.Numeric(precision=10, scale=3), nullable=True),
sa.Column('sync_from_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_to_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('records_requested', sa.Integer(), nullable=False),
sa.Column('records_processed', sa.Integer(), nullable=False),
sa.Column('records_created', sa.Integer(), nullable=False),
sa.Column('records_updated', sa.Integer(), nullable=False),
sa.Column('records_skipped', sa.Integer(), nullable=False),
sa.Column('records_failed', sa.Integer(), nullable=False),
sa.Column('api_calls_made', sa.Integer(), nullable=False),
sa.Column('api_rate_limit_hits', sa.Integer(), nullable=False),
sa.Column('total_api_time_ms', sa.Integer(), nullable=False),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_code', sa.String(length=100), nullable=True),
sa.Column('error_details', sa.JSON(), nullable=True),
sa.Column('retry_attempt', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('parent_sync_id', sa.UUID(), nullable=True),
sa.Column('sync_configuration', sa.JSON(), nullable=True),
sa.Column('current_page', sa.Integer(), nullable=True),
sa.Column('total_pages', sa.Integer(), nullable=True),
sa.Column('current_batch', sa.Integer(), nullable=True),
sa.Column('total_batches', sa.Integer(), nullable=True),
sa.Column('progress_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('validation_errors', sa.JSON(), nullable=True),
sa.Column('data_quality_score', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('memory_usage_mb', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('cpu_usage_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('network_bytes_received', sa.Integer(), nullable=True),
sa.Column('network_bytes_sent', sa.Integer(), nullable=True),
sa.Column('revenue_synced', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('transactions_synced', sa.Integer(), nullable=False),
sa.Column('triggered_by', sa.String(length=50), nullable=True),
sa.Column('triggered_by_user_id', sa.UUID(), nullable=True),
sa.Column('trigger_details', sa.JSON(), nullable=True),
sa.Column('external_batch_id', sa.String(length=255), nullable=True),
sa.Column('webhook_log_id', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('tags', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_sync_log_completed', 'pos_sync_logs', ['completed_at'], unique=False)
op.create_index('idx_sync_log_data_type', 'pos_sync_logs', ['data_type'], unique=False)
op.create_index('idx_sync_log_duration', 'pos_sync_logs', ['duration_seconds'], unique=False)
op.create_index('idx_sync_log_external_batch', 'pos_sync_logs', ['external_batch_id'], unique=False)
op.create_index('idx_sync_log_parent', 'pos_sync_logs', ['parent_sync_id'], unique=False)
op.create_index('idx_sync_log_pos_system_type', 'pos_sync_logs', ['pos_system', 'sync_type'], unique=False)
op.create_index('idx_sync_log_retry', 'pos_sync_logs', ['retry_attempt'], unique=False)
op.create_index('idx_sync_log_status', 'pos_sync_logs', ['status'], unique=False)
op.create_index('idx_sync_log_tenant_started', 'pos_sync_logs', ['tenant_id', 'started_at'], unique=False)
op.create_index('idx_sync_log_trigger', 'pos_sync_logs', ['triggered_by'], unique=False)
op.create_index('idx_sync_log_webhook', 'pos_sync_logs', ['webhook_log_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_data_type'), 'pos_sync_logs', ['data_type'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_id'), 'pos_sync_logs', ['id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_pos_config_id'), 'pos_sync_logs', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_pos_system'), 'pos_sync_logs', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_started_at'), 'pos_sync_logs', ['started_at'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_status'), 'pos_sync_logs', ['status'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_sync_type'), 'pos_sync_logs', ['sync_type'], unique=False)
op.create_index(op.f('ix_pos_sync_logs_tenant_id'), 'pos_sync_logs', ['tenant_id'], unique=False)
op.create_table('pos_webhook_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('webhook_type', sa.String(length=100), nullable=False),
sa.Column('method', sa.String(length=10), nullable=False),
sa.Column('url_path', sa.String(length=500), nullable=False),
sa.Column('query_params', sa.JSON(), nullable=True),
sa.Column('headers', sa.JSON(), nullable=True),
sa.Column('raw_payload', sa.Text(), nullable=False),
sa.Column('payload_size', sa.Integer(), nullable=False),
sa.Column('content_type', sa.String(length=100), nullable=True),
sa.Column('signature', sa.String(length=500), nullable=True),
sa.Column('is_signature_valid', sa.Boolean(), nullable=True),
sa.Column('source_ip', sa.String(length=45), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('processing_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('processing_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('processing_duration_ms', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_code', sa.String(length=50), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('response_status_code', sa.Integer(), nullable=True),
sa.Column('response_body', sa.Text(), nullable=True),
sa.Column('response_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('event_id', sa.String(length=255), nullable=True),
sa.Column('event_timestamp', sa.DateTime(timezone=True), nullable=True),
sa.Column('sequence_number', sa.Integer(), nullable=True),
sa.Column('transaction_id', sa.String(length=255), nullable=True),
sa.Column('order_id', sa.String(length=255), nullable=True),
sa.Column('customer_id', sa.String(length=255), nullable=True),
sa.Column('created_transaction_id', sa.UUID(), nullable=True),
sa.Column('updated_transaction_id', sa.UUID(), nullable=True),
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
sa.Column('duplicate_of', sa.UUID(), nullable=True),
sa.Column('priority', sa.String(length=20), nullable=False),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('forwarded_for', sa.String(length=200), nullable=True),
sa.Column('request_id', sa.String(length=100), nullable=True),
sa.Column('received_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_webhook_duplicate', 'pos_webhook_logs', ['is_duplicate'], unique=False)
op.create_index('idx_webhook_event_id', 'pos_webhook_logs', ['event_id'], unique=False)
op.create_index('idx_webhook_order_id', 'pos_webhook_logs', ['order_id'], unique=False)
op.create_index('idx_webhook_pos_system_type', 'pos_webhook_logs', ['pos_system', 'webhook_type'], unique=False)
op.create_index('idx_webhook_priority', 'pos_webhook_logs', ['priority'], unique=False)
op.create_index('idx_webhook_received_at', 'pos_webhook_logs', ['received_at'], unique=False)
op.create_index('idx_webhook_retry', 'pos_webhook_logs', ['retry_count'], unique=False)
op.create_index('idx_webhook_signature_valid', 'pos_webhook_logs', ['is_signature_valid'], unique=False)
op.create_index('idx_webhook_status', 'pos_webhook_logs', ['status'], unique=False)
op.create_index('idx_webhook_tenant_received', 'pos_webhook_logs', ['tenant_id', 'received_at'], unique=False)
op.create_index('idx_webhook_transaction_id', 'pos_webhook_logs', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_event_id'), 'pos_webhook_logs', ['event_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_id'), 'pos_webhook_logs', ['id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_is_duplicate'), 'pos_webhook_logs', ['is_duplicate'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_order_id'), 'pos_webhook_logs', ['order_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_pos_system'), 'pos_webhook_logs', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_received_at'), 'pos_webhook_logs', ['received_at'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_status'), 'pos_webhook_logs', ['status'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_tenant_id'), 'pos_webhook_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_transaction_id'), 'pos_webhook_logs', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_webhook_logs_webhook_type'), 'pos_webhook_logs', ['webhook_type'], unique=False)
op.create_table('pos_transactions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('pos_config_id', sa.UUID(), nullable=False),
sa.Column('pos_system', sa.String(length=50), nullable=False),
sa.Column('external_transaction_id', sa.String(length=255), nullable=False),
sa.Column('external_order_id', sa.String(length=255), nullable=True),
sa.Column('transaction_type', sa.String(length=50), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('tip_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('currency', sa.String(length=3), nullable=False),
sa.Column('payment_method', sa.String(length=50), nullable=True),
sa.Column('payment_status', sa.String(length=50), nullable=True),
sa.Column('transaction_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('pos_created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('pos_updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('location_id', sa.String(length=100), nullable=True),
sa.Column('location_name', sa.String(length=255), nullable=True),
sa.Column('staff_id', sa.String(length=100), nullable=True),
sa.Column('staff_name', sa.String(length=255), nullable=True),
sa.Column('customer_id', sa.String(length=100), nullable=True),
sa.Column('customer_email', sa.String(length=255), nullable=True),
sa.Column('customer_phone', sa.String(length=50), nullable=True),
sa.Column('order_type', sa.String(length=50), nullable=True),
sa.Column('table_number', sa.String(length=20), nullable=True),
sa.Column('receipt_number', sa.String(length=100), nullable=True),
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
sa.Column('sales_record_id', sa.UUID(), nullable=True),
sa.Column('sync_attempted_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('sync_retry_count', sa.Integer(), nullable=False),
sa.Column('raw_data', sa.JSON(), nullable=True),
sa.Column('is_processed', sa.Boolean(), nullable=False),
sa.Column('processing_error', sa.Text(), nullable=True),
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
sa.Column('duplicate_of', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_pos_transaction_customer', 'pos_transactions', ['customer_id'], unique=False)
op.create_index('idx_pos_transaction_duplicate', 'pos_transactions', ['is_duplicate'], unique=False)
op.create_index('idx_pos_transaction_external_id', 'pos_transactions', ['pos_system', 'external_transaction_id'], unique=False)
op.create_index('idx_pos_transaction_location', 'pos_transactions', ['location_id'], unique=False)
op.create_index('idx_pos_transaction_processed', 'pos_transactions', ['is_processed'], unique=False)
op.create_index('idx_pos_transaction_status', 'pos_transactions', ['status'], unique=False)
op.create_index('idx_pos_transaction_sync_status', 'pos_transactions', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_transaction_tenant_date', 'pos_transactions', ['tenant_id', 'transaction_date'], unique=False)
op.create_index('idx_pos_transaction_type', 'pos_transactions', ['transaction_type'], unique=False)
op.create_index(op.f('ix_pos_transactions_external_order_id'), 'pos_transactions', ['external_order_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_external_transaction_id'), 'pos_transactions', ['external_transaction_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_id'), 'pos_transactions', ['id'], unique=False)
op.create_index(op.f('ix_pos_transactions_is_synced_to_sales'), 'pos_transactions', ['is_synced_to_sales'], unique=False)
op.create_index(op.f('ix_pos_transactions_pos_config_id'), 'pos_transactions', ['pos_config_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_pos_system'), 'pos_transactions', ['pos_system'], unique=False)
op.create_index(op.f('ix_pos_transactions_sales_record_id'), 'pos_transactions', ['sales_record_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_tenant_id'), 'pos_transactions', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_transactions_transaction_date'), 'pos_transactions', ['transaction_date'], unique=False)
op.create_table('pos_transaction_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('transaction_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('external_item_id', sa.String(length=255), nullable=True),
sa.Column('sku', sa.String(length=100), nullable=True),
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('product_category', sa.String(length=100), nullable=True),
sa.Column('product_subcategory', sa.String(length=100), nullable=True),
sa.Column('quantity', sa.Numeric(precision=10, scale=3), nullable=False),
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_price', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('modifiers', sa.JSON(), nullable=True),
sa.Column('inventory_product_id', sa.UUID(), nullable=True),
sa.Column('is_mapped_to_inventory', sa.Boolean(), nullable=False),
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('raw_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['transaction_id'], ['pos_transactions.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_pos_item_category', 'pos_transaction_items', ['product_category'], unique=False)
op.create_index('idx_pos_item_inventory', 'pos_transaction_items', ['inventory_product_id'], unique=False)
op.create_index('idx_pos_item_mapped', 'pos_transaction_items', ['is_mapped_to_inventory'], unique=False)
op.create_index('idx_pos_item_product', 'pos_transaction_items', ['product_name'], unique=False)
op.create_index('idx_pos_item_sku', 'pos_transaction_items', ['sku'], unique=False)
op.create_index('idx_pos_item_sync', 'pos_transaction_items', ['is_synced_to_sales'], unique=False)
op.create_index('idx_pos_item_transaction', 'pos_transaction_items', ['transaction_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_id'), 'pos_transaction_items', ['id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_inventory_product_id'), 'pos_transaction_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_product_category'), 'pos_transaction_items', ['product_category'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_sku'), 'pos_transaction_items', ['sku'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_tenant_id'), 'pos_transaction_items', ['tenant_id'], unique=False)
op.create_index(op.f('ix_pos_transaction_items_transaction_id'), 'pos_transaction_items', ['transaction_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_pos_transaction_items_transaction_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_tenant_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_sku'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_product_category'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_inventory_product_id'), table_name='pos_transaction_items')
op.drop_index(op.f('ix_pos_transaction_items_id'), table_name='pos_transaction_items')
op.drop_index('idx_pos_item_transaction', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_sync', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_sku', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_product', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_mapped', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_inventory', table_name='pos_transaction_items')
op.drop_index('idx_pos_item_category', table_name='pos_transaction_items')
op.drop_table('pos_transaction_items')
op.drop_index(op.f('ix_pos_transactions_transaction_date'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_tenant_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_sales_record_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_pos_system'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_pos_config_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_is_synced_to_sales'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_external_transaction_id'), table_name='pos_transactions')
op.drop_index(op.f('ix_pos_transactions_external_order_id'), table_name='pos_transactions')
op.drop_index('idx_pos_transaction_type', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_tenant_date', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_sync_status', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_status', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_processed', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_location', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_external_id', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_duplicate', table_name='pos_transactions')
op.drop_index('idx_pos_transaction_customer', table_name='pos_transactions')
op.drop_table('pos_transactions')
op.drop_index(op.f('ix_pos_webhook_logs_webhook_type'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_transaction_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_tenant_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_status'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_received_at'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_pos_system'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_order_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_is_duplicate'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_id'), table_name='pos_webhook_logs')
op.drop_index(op.f('ix_pos_webhook_logs_event_id'), table_name='pos_webhook_logs')
op.drop_index('idx_webhook_transaction_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_tenant_received', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_status', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_signature_valid', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_retry', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_received_at', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_priority', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_pos_system_type', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_order_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_event_id', table_name='pos_webhook_logs')
op.drop_index('idx_webhook_duplicate', table_name='pos_webhook_logs')
op.drop_table('pos_webhook_logs')
op.drop_index(op.f('ix_pos_sync_logs_tenant_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_sync_type'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_status'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_started_at'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_pos_system'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_pos_config_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_id'), table_name='pos_sync_logs')
op.drop_index(op.f('ix_pos_sync_logs_data_type'), table_name='pos_sync_logs')
op.drop_index('idx_sync_log_webhook', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_trigger', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_tenant_started', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_status', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_retry', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_pos_system_type', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_parent', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_external_batch', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_duration', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_data_type', table_name='pos_sync_logs')
op.drop_index('idx_sync_log_completed', table_name='pos_sync_logs')
op.drop_table('pos_sync_logs')
op.drop_index(op.f('ix_pos_configurations_tenant_id'), table_name='pos_configurations')
op.drop_index(op.f('ix_pos_configurations_id'), table_name='pos_configurations')
op.drop_index('idx_pos_config_tenant_pos_system', table_name='pos_configurations')
op.drop_index('idx_pos_config_sync_enabled', table_name='pos_configurations')
op.drop_index('idx_pos_config_health_status', table_name='pos_configurations')
op.drop_index('idx_pos_config_created_at', table_name='pos_configurations')
op.drop_index('idx_pos_config_connected', table_name='pos_configurations')
op.drop_index('idx_pos_config_active', table_name='pos_configurations')
op.drop_table('pos_configurations')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for production service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('PRODUCTION_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,293 +0,0 @@
"""Initial schema for production service
Revision ID: 00001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy import Enum
# revision identifiers, used by Alembic.
revision = '00001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create production_batches table (ENUMs will be created automatically)
op.create_table('production_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_number', sa.String(50), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(255), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=True),
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_quantity', sa.Float(), nullable=False),
sa.Column('planned_duration_minutes', sa.Integer(), nullable=False),
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_quantity', sa.Float(), nullable=True),
sa.Column('actual_duration_minutes', sa.Integer(), nullable=True),
sa.Column('status', sa.Enum('PENDING', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED', 'ON_HOLD', 'QUALITY_CHECK', 'FAILED', name='productionstatus'), nullable=False),
sa.Column('priority', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
sa.Column('current_process_stage', sa.Enum('mixing', 'proofing', 'shaping', 'baking', 'cooling', 'packaging', 'finishing', name='processstage'), nullable=True),
sa.Column('process_stage_history', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('pending_quality_checks', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('completed_quality_checks', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('estimated_cost', sa.Float(), nullable=True),
sa.Column('actual_cost', sa.Float(), nullable=True),
sa.Column('labor_cost', sa.Float(), nullable=True),
sa.Column('material_cost', sa.Float(), nullable=True),
sa.Column('overhead_cost', sa.Float(), nullable=True),
sa.Column('yield_percentage', sa.Float(), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=True),
sa.Column('waste_quantity', sa.Float(), nullable=True),
sa.Column('defect_quantity', sa.Float(), nullable=True),
sa.Column('equipment_used', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('staff_assigned', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('station_id', sa.String(50), nullable=True),
sa.Column('order_id', sa.UUID(), nullable=True),
sa.Column('forecast_id', sa.UUID(), nullable=True),
sa.Column('is_rush_order', sa.Boolean(), nullable=True),
sa.Column('is_special_recipe', sa.Boolean(), nullable=True),
sa.Column('production_notes', sa.Text(), nullable=True),
sa.Column('quality_notes', sa.Text(), nullable=True),
sa.Column('delay_reason', sa.String(255), nullable=True),
sa.Column('cancellation_reason', sa.String(255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('batch_number')
)
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=False)
op.create_index(op.f('ix_production_batches_product_id'), 'production_batches', ['product_id'], unique=False)
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
op.create_index(op.f('ix_production_batches_current_process_stage'), 'production_batches', ['current_process_stage'], unique=False)
# Create production_schedules table
op.create_table('production_schedules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_hours', sa.Float(), nullable=False),
sa.Column('planned_capacity_hours', sa.Float(), nullable=False),
sa.Column('actual_capacity_hours', sa.Float(), nullable=True),
sa.Column('overtime_hours', sa.Float(), nullable=True),
sa.Column('staff_count', sa.Integer(), nullable=False),
sa.Column('equipment_capacity', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('station_assignments', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('total_batches_planned', sa.Integer(), nullable=True),
sa.Column('total_batches_completed', sa.Integer(), nullable=True),
sa.Column('total_quantity_planned', sa.Float(), nullable=True),
sa.Column('total_quantity_produced', sa.Float(), nullable=True),
sa.Column('is_finalized', sa.Boolean(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('utilization_percentage', sa.Float(), nullable=True),
sa.Column('on_time_completion_rate', sa.Float(), nullable=True),
sa.Column('schedule_notes', sa.Text(), nullable=True),
sa.Column('schedule_adjustments', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('finalized_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
# Create production_capacity table
op.create_table('production_capacity',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('resource_type', sa.String(50), nullable=False),
sa.Column('resource_id', sa.String(100), nullable=False),
sa.Column('resource_name', sa.String(255), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_units', sa.Float(), nullable=False),
sa.Column('allocated_capacity_units', sa.Float(), nullable=True),
sa.Column('remaining_capacity_units', sa.Float(), nullable=False),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_maintenance', sa.Boolean(), nullable=True),
sa.Column('is_reserved', sa.Boolean(), nullable=True),
sa.Column('equipment_type', sa.String(100), nullable=True),
sa.Column('max_batch_size', sa.Float(), nullable=True),
sa.Column('min_batch_size', sa.Float(), nullable=True),
sa.Column('setup_time_minutes', sa.Integer(), nullable=True),
sa.Column('cleanup_time_minutes', sa.Integer(), nullable=True),
sa.Column('efficiency_rating', sa.Float(), nullable=True),
sa.Column('maintenance_status', sa.String(50), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('restrictions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_capacity_tenant_id'), 'production_capacity', ['tenant_id'], unique=False)
op.create_index(op.f('ix_production_capacity_date'), 'production_capacity', ['date'], unique=False)
# Create quality_check_templates table
op.create_table('quality_check_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('template_code', sa.String(100), nullable=True),
sa.Column('check_type', sa.String(50), nullable=False),
sa.Column('category', sa.String(100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('instructions', sa.Text(), nullable=True),
sa.Column('parameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('thresholds', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('scoring_criteria', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_required', sa.Boolean(), nullable=True),
sa.Column('is_critical', sa.Boolean(), nullable=True),
sa.Column('weight', sa.Float(), nullable=True),
sa.Column('min_value', sa.Float(), nullable=True),
sa.Column('max_value', sa.Float(), nullable=True),
sa.Column('target_value', sa.Float(), nullable=True),
sa.Column('unit', sa.String(20), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('applicable_stages', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_check_templates_tenant_id'), 'quality_check_templates', ['tenant_id'], unique=False)
op.create_index(op.f('ix_quality_check_templates_template_code'), 'quality_check_templates', ['template_code'], unique=False)
# Create quality_checks table
op.create_table('quality_checks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_id', sa.UUID(), nullable=False),
sa.Column('template_id', sa.UUID(), nullable=True),
sa.Column('check_type', sa.String(50), nullable=False),
sa.Column('process_stage', sa.Enum('mixing', 'proofing', 'shaping', 'baking', 'cooling', 'packaging', 'finishing', name='processstage'), nullable=True),
sa.Column('check_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('checker_id', sa.String(100), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=False),
sa.Column('pass_fail', sa.Boolean(), nullable=False),
sa.Column('defect_count', sa.Integer(), nullable=True),
sa.Column('defect_types', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('measured_weight', sa.Float(), nullable=True),
sa.Column('measured_temperature', sa.Float(), nullable=True),
sa.Column('measured_moisture', sa.Float(), nullable=True),
sa.Column('measured_dimensions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('stage_specific_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('target_weight', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('target_moisture', sa.Float(), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('within_tolerance', sa.Boolean(), nullable=True),
sa.Column('corrective_action_needed', sa.Boolean(), nullable=True),
sa.Column('corrective_actions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('template_results', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('criteria_scores', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('check_notes', sa.Text(), nullable=True),
sa.Column('photos_urls', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('certificate_url', sa.String(500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['batch_id'], ['production_batches.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_checks_tenant_id'), 'quality_checks', ['tenant_id'], unique=False)
op.create_index(op.f('ix_quality_checks_batch_id'), 'quality_checks', ['batch_id'], unique=False)
op.create_index(op.f('ix_quality_checks_template_id'), 'quality_checks', ['template_id'], unique=False)
op.create_index(op.f('ix_quality_checks_process_stage'), 'quality_checks', ['process_stage'], unique=False)
# Create equipment table
op.create_table('equipment',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('type', sa.Enum('oven', 'mixer', 'proofer', 'freezer', 'packaging', 'other', name='equipmenttype'), nullable=False),
sa.Column('model', sa.String(100), nullable=True),
sa.Column('serial_number', sa.String(100), nullable=True),
sa.Column('location', sa.String(255), nullable=True),
sa.Column('status', sa.Enum('operational', 'maintenance', 'down', 'warning', name='equipmentstatus'), nullable=True),
sa.Column('install_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('maintenance_interval_days', sa.Integer(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('uptime_percentage', sa.Float(), nullable=True),
sa.Column('energy_usage_kwh', sa.Float(), nullable=True),
sa.Column('power_kw', sa.Float(), nullable=True),
sa.Column('capacity', sa.Float(), nullable=True),
sa.Column('weight_kg', sa.Float(), nullable=True),
sa.Column('current_temperature', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_equipment_tenant_id'), 'equipment', ['tenant_id'], unique=False)
def downgrade() -> None:
# Drop equipment table
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
op.drop_table('equipment')
# Drop quality_checks table
op.drop_index(op.f('ix_quality_checks_process_stage'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_template_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_batch_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_tenant_id'), table_name='quality_checks')
op.drop_table('quality_checks')
# Drop quality_check_templates table
op.drop_index(op.f('ix_quality_check_templates_template_code'), table_name='quality_check_templates')
op.drop_index(op.f('ix_quality_check_templates_tenant_id'), table_name='quality_check_templates')
op.drop_table('quality_check_templates')
# Drop production_capacity table
op.drop_index(op.f('ix_production_capacity_date'), table_name='production_capacity')
op.drop_index(op.f('ix_production_capacity_tenant_id'), table_name='production_capacity')
op.drop_table('production_capacity')
# Drop production_schedules table
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
op.drop_table('production_schedules')
# Drop production_batches table
op.drop_index(op.f('ix_production_batches_current_process_stage'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_product_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
op.drop_table('production_batches')
# Drop enums
process_stage_enum = Enum(name='processstage')
process_stage_enum.drop(op.get_bind(), checkfirst=True)
equipment_type_enum = Enum(name='equipmenttype')
equipment_type_enum.drop(op.get_bind(), checkfirst=True)
equipment_status_enum = Enum(name='equipmentstatus')
equipment_status_enum.drop(op.get_bind(), checkfirst=True)
production_priority_enum = Enum(name='productionpriority')
production_priority_enum.drop(op.get_bind(), checkfirst=True)
production_status_enum = Enum(name='productionstatus')
production_status_enum.drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,258 @@
"""initial_schema_20251001_1119
Revision ID: 2fe9ab08dd7b
Revises:
Create Date: 2025-10-01 11:19:59.233402+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '2fe9ab08dd7b'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('equipment',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('type', sa.Enum('OVEN', 'MIXER', 'PROOFER', 'FREEZER', 'PACKAGING', 'OTHER', name='equipmenttype'), nullable=False),
sa.Column('model', sa.String(length=100), nullable=True),
sa.Column('serial_number', sa.String(length=100), nullable=True),
sa.Column('location', sa.String(length=255), nullable=True),
sa.Column('status', sa.Enum('OPERATIONAL', 'MAINTENANCE', 'DOWN', 'WARNING', name='equipmentstatus'), nullable=False),
sa.Column('install_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('maintenance_interval_days', sa.Integer(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('uptime_percentage', sa.Float(), nullable=True),
sa.Column('energy_usage_kwh', sa.Float(), nullable=True),
sa.Column('power_kw', sa.Float(), nullable=True),
sa.Column('capacity', sa.Float(), nullable=True),
sa.Column('weight_kg', sa.Float(), nullable=True),
sa.Column('current_temperature', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_equipment_tenant_id'), 'equipment', ['tenant_id'], unique=False)
op.create_table('production_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_number', sa.String(length=50), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=True),
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_quantity', sa.Float(), nullable=False),
sa.Column('planned_duration_minutes', sa.Integer(), nullable=False),
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_quantity', sa.Float(), nullable=True),
sa.Column('actual_duration_minutes', sa.Integer(), nullable=True),
sa.Column('status', sa.Enum('PENDING', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED', 'ON_HOLD', 'QUALITY_CHECK', 'FAILED', name='productionstatus'), nullable=False),
sa.Column('priority', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
sa.Column('current_process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
sa.Column('process_stage_history', sa.JSON(), nullable=True),
sa.Column('pending_quality_checks', sa.JSON(), nullable=True),
sa.Column('completed_quality_checks', sa.JSON(), nullable=True),
sa.Column('estimated_cost', sa.Float(), nullable=True),
sa.Column('actual_cost', sa.Float(), nullable=True),
sa.Column('labor_cost', sa.Float(), nullable=True),
sa.Column('material_cost', sa.Float(), nullable=True),
sa.Column('overhead_cost', sa.Float(), nullable=True),
sa.Column('yield_percentage', sa.Float(), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=True),
sa.Column('waste_quantity', sa.Float(), nullable=True),
sa.Column('defect_quantity', sa.Float(), nullable=True),
sa.Column('equipment_used', sa.JSON(), nullable=True),
sa.Column('staff_assigned', sa.JSON(), nullable=True),
sa.Column('station_id', sa.String(length=50), nullable=True),
sa.Column('order_id', sa.UUID(), nullable=True),
sa.Column('forecast_id', sa.UUID(), nullable=True),
sa.Column('is_rush_order', sa.Boolean(), nullable=True),
sa.Column('is_special_recipe', sa.Boolean(), nullable=True),
sa.Column('production_notes', sa.Text(), nullable=True),
sa.Column('quality_notes', sa.Text(), nullable=True),
sa.Column('delay_reason', sa.String(length=255), nullable=True),
sa.Column('cancellation_reason', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=True)
op.create_index(op.f('ix_production_batches_current_process_stage'), 'production_batches', ['current_process_stage'], unique=False)
op.create_index(op.f('ix_production_batches_product_id'), 'production_batches', ['product_id'], unique=False)
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
op.create_table('production_capacity',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('resource_type', sa.String(length=50), nullable=False),
sa.Column('resource_id', sa.String(length=100), nullable=False),
sa.Column('resource_name', sa.String(length=255), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_units', sa.Float(), nullable=False),
sa.Column('allocated_capacity_units', sa.Float(), nullable=False),
sa.Column('remaining_capacity_units', sa.Float(), nullable=False),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_maintenance', sa.Boolean(), nullable=True),
sa.Column('is_reserved', sa.Boolean(), nullable=True),
sa.Column('equipment_type', sa.String(length=100), nullable=True),
sa.Column('max_batch_size', sa.Float(), nullable=True),
sa.Column('min_batch_size', sa.Float(), nullable=True),
sa.Column('setup_time_minutes', sa.Integer(), nullable=True),
sa.Column('cleanup_time_minutes', sa.Integer(), nullable=True),
sa.Column('efficiency_rating', sa.Float(), nullable=True),
sa.Column('maintenance_status', sa.String(length=50), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('restrictions', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_capacity_date'), 'production_capacity', ['date'], unique=False)
op.create_index(op.f('ix_production_capacity_tenant_id'), 'production_capacity', ['tenant_id'], unique=False)
op.create_table('production_schedules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_hours', sa.Float(), nullable=False),
sa.Column('planned_capacity_hours', sa.Float(), nullable=False),
sa.Column('actual_capacity_hours', sa.Float(), nullable=True),
sa.Column('overtime_hours', sa.Float(), nullable=True),
sa.Column('staff_count', sa.Integer(), nullable=False),
sa.Column('equipment_capacity', sa.JSON(), nullable=True),
sa.Column('station_assignments', sa.JSON(), nullable=True),
sa.Column('total_batches_planned', sa.Integer(), nullable=False),
sa.Column('total_batches_completed', sa.Integer(), nullable=True),
sa.Column('total_quantity_planned', sa.Float(), nullable=False),
sa.Column('total_quantity_produced', sa.Float(), nullable=True),
sa.Column('is_finalized', sa.Boolean(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('utilization_percentage', sa.Float(), nullable=True),
sa.Column('on_time_completion_rate', sa.Float(), nullable=True),
sa.Column('schedule_notes', sa.Text(), nullable=True),
sa.Column('schedule_adjustments', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('finalized_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
op.create_table('quality_check_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('template_code', sa.String(length=100), nullable=True),
sa.Column('check_type', sa.String(length=50), nullable=False),
sa.Column('category', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('instructions', sa.Text(), nullable=True),
sa.Column('parameters', sa.JSON(), nullable=True),
sa.Column('thresholds', sa.JSON(), nullable=True),
sa.Column('scoring_criteria', sa.JSON(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_required', sa.Boolean(), nullable=True),
sa.Column('is_critical', sa.Boolean(), nullable=True),
sa.Column('weight', sa.Float(), nullable=True),
sa.Column('min_value', sa.Float(), nullable=True),
sa.Column('max_value', sa.Float(), nullable=True),
sa.Column('target_value', sa.Float(), nullable=True),
sa.Column('unit', sa.String(length=20), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('applicable_stages', sa.JSON(), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_check_templates_template_code'), 'quality_check_templates', ['template_code'], unique=False)
op.create_index(op.f('ix_quality_check_templates_tenant_id'), 'quality_check_templates', ['tenant_id'], unique=False)
op.create_table('quality_checks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_id', sa.UUID(), nullable=False),
sa.Column('template_id', sa.UUID(), nullable=True),
sa.Column('check_type', sa.String(length=50), nullable=False),
sa.Column('process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
sa.Column('check_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('checker_id', sa.String(length=100), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=False),
sa.Column('pass_fail', sa.Boolean(), nullable=False),
sa.Column('defect_count', sa.Integer(), nullable=False),
sa.Column('defect_types', sa.JSON(), nullable=True),
sa.Column('measured_weight', sa.Float(), nullable=True),
sa.Column('measured_temperature', sa.Float(), nullable=True),
sa.Column('measured_moisture', sa.Float(), nullable=True),
sa.Column('measured_dimensions', sa.JSON(), nullable=True),
sa.Column('stage_specific_data', sa.JSON(), nullable=True),
sa.Column('target_weight', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('target_moisture', sa.Float(), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('within_tolerance', sa.Boolean(), nullable=True),
sa.Column('corrective_action_needed', sa.Boolean(), nullable=True),
sa.Column('corrective_actions', sa.JSON(), nullable=True),
sa.Column('template_results', sa.JSON(), nullable=True),
sa.Column('criteria_scores', sa.JSON(), nullable=True),
sa.Column('check_notes', sa.Text(), nullable=True),
sa.Column('photos_urls', sa.JSON(), nullable=True),
sa.Column('certificate_url', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_checks_batch_id'), 'quality_checks', ['batch_id'], unique=False)
op.create_index(op.f('ix_quality_checks_process_stage'), 'quality_checks', ['process_stage'], unique=False)
op.create_index(op.f('ix_quality_checks_template_id'), 'quality_checks', ['template_id'], unique=False)
op.create_index(op.f('ix_quality_checks_tenant_id'), 'quality_checks', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_quality_checks_tenant_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_template_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_process_stage'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_batch_id'), table_name='quality_checks')
op.drop_table('quality_checks')
op.drop_index(op.f('ix_quality_check_templates_tenant_id'), table_name='quality_check_templates')
op.drop_index(op.f('ix_quality_check_templates_template_code'), table_name='quality_check_templates')
op.drop_table('quality_check_templates')
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
op.drop_table('production_schedules')
op.drop_index(op.f('ix_production_capacity_tenant_id'), table_name='production_capacity')
op.drop_index(op.f('ix_production_capacity_date'), table_name='production_capacity')
op.drop_table('production_capacity')
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_product_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_current_process_stage'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
op.drop_table('production_batches')
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
op.drop_table('equipment')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for recipes service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('RECIPES_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,83 +0,0 @@
"""Initial schema for recipes service
Revision ID: 0001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '00001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('recipes',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(100), nullable=True),
sa.Column('cuisine', sa.String(100), nullable=True),
sa.Column('difficulty_level', sa.String(50), nullable=True),
sa.Column('preparation_time', sa.Integer(), nullable=True),
sa.Column('cooking_time', sa.Integer(), nullable=True),
sa.Column('total_time', sa.Integer(), nullable=True),
sa.Column('servings', sa.Integer(), nullable=True),
sa.Column('calories_per_serving', sa.Integer(), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_recipes_tenant_id'), 'recipes', ['tenant_id'], unique=False)
op.create_index(op.f('ix_recipes_name'), 'recipes', ['name'], unique=False)
op.create_index(op.f('ix_recipes_category'), 'recipes', ['category'], unique=False)
op.create_index(op.f('ix_recipes_status'), 'recipes', ['status'], unique=False)
op.create_table('recipe_ingredients',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=False),
sa.Column('ingredient_name', sa.String(255), nullable=False),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit', sa.String(50), nullable=False),
sa.Column('optional', sa.Boolean(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['recipe_id'], ['recipes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_recipe_ingredients_recipe_id'), 'recipe_ingredients', ['recipe_id'], unique=False)
op.create_index(op.f('ix_recipe_ingredients_ingredient_name'), 'recipe_ingredients', ['ingredient_name'], unique=False)
op.create_table('recipe_steps',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=False),
sa.Column('step_number', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('duration', sa.Integer(), nullable=True),
sa.Column('temperature', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['recipe_id'], ['recipes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_recipe_steps_recipe_id'), 'recipe_steps', ['recipe_id'], unique=False)
op.create_index(op.f('ix_recipe_steps_step_number'), 'recipe_steps', ['step_number'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_recipe_steps_step_number'), table_name='recipe_steps')
op.drop_index(op.f('ix_recipe_steps_recipe_id'), table_name='recipe_steps')
op.drop_table('recipe_steps')
op.drop_index(op.f('ix_recipe_ingredients_ingredient_name'), table_name='recipe_ingredients')
op.drop_index(op.f('ix_recipe_ingredients_recipe_id'), table_name='recipe_ingredients')
op.drop_table('recipe_ingredients')
op.drop_index(op.f('ix_recipes_status'), table_name='recipes')
op.drop_index(op.f('ix_recipes_category'), table_name='recipes')
op.drop_index(op.f('ix_recipes_name'), table_name='recipes')
op.drop_index(op.f('ix_recipes_tenant_id'), table_name='recipes')
op.drop_table('recipes')

View File

@@ -0,0 +1,288 @@
"""initial_schema_20251001_1118
Revision ID: 3957346a472c
Revises:
Create Date: 2025-10-01 11:18:33.794800+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '3957346a472c'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('production_schedules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('schedule_name', sa.String(length=255), nullable=True),
sa.Column('total_planned_batches', sa.Integer(), nullable=False),
sa.Column('total_planned_items', sa.Float(), nullable=False),
sa.Column('estimated_production_hours', sa.Float(), nullable=True),
sa.Column('estimated_material_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('is_published', sa.Boolean(), nullable=True),
sa.Column('is_completed', sa.Boolean(), nullable=True),
sa.Column('completion_percentage', sa.Float(), nullable=True),
sa.Column('available_staff_hours', sa.Float(), nullable=True),
sa.Column('oven_capacity_hours', sa.Float(), nullable=True),
sa.Column('production_capacity_limit', sa.Float(), nullable=True),
sa.Column('schedule_notes', sa.Text(), nullable=True),
sa.Column('preparation_instructions', sa.Text(), nullable=True),
sa.Column('special_requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('published_by', sa.UUID(), nullable=True),
sa.Column('published_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_production_schedules_completed', 'production_schedules', ['tenant_id', 'is_completed', 'schedule_date'], unique=False)
op.create_index('idx_production_schedules_published', 'production_schedules', ['tenant_id', 'is_published', 'schedule_date'], unique=False)
op.create_index('idx_production_schedules_tenant_date', 'production_schedules', ['tenant_id', 'schedule_date'], unique=False)
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
op.create_table('recipes',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('recipe_code', sa.String(length=100), nullable=True),
sa.Column('version', sa.String(length=20), nullable=False),
sa.Column('finished_product_id', sa.UUID(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(length=100), nullable=True),
sa.Column('cuisine_type', sa.String(length=100), nullable=True),
sa.Column('difficulty_level', sa.Integer(), nullable=False),
sa.Column('yield_quantity', sa.Float(), nullable=False),
sa.Column('yield_unit', sa.Enum('GRAMS', 'KILOGRAMS', 'MILLILITERS', 'LITERS', 'CUPS', 'TABLESPOONS', 'TEASPOONS', 'UNITS', 'PIECES', 'PERCENTAGE', name='measurementunit'), nullable=False),
sa.Column('prep_time_minutes', sa.Integer(), nullable=True),
sa.Column('cook_time_minutes', sa.Integer(), nullable=True),
sa.Column('total_time_minutes', sa.Integer(), nullable=True),
sa.Column('rest_time_minutes', sa.Integer(), nullable=True),
sa.Column('estimated_cost_per_unit', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('last_calculated_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('cost_calculation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('target_margin_percentage', sa.Float(), nullable=True),
sa.Column('suggested_selling_price', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('instructions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('preparation_notes', sa.Text(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('quality_standards', sa.Text(), nullable=True),
sa.Column('serves_count', sa.Integer(), nullable=True),
sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('dietary_tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('batch_size_multiplier', sa.Float(), nullable=False),
sa.Column('minimum_batch_size', sa.Float(), nullable=True),
sa.Column('maximum_batch_size', sa.Float(), nullable=True),
sa.Column('optimal_production_temperature', sa.Float(), nullable=True),
sa.Column('optimal_humidity', sa.Float(), nullable=True),
sa.Column('quality_check_points', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('quality_check_configuration', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('common_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('status', sa.Enum('DRAFT', 'ACTIVE', 'TESTING', 'ARCHIVED', 'DISCONTINUED', name='recipestatus'), nullable=False),
sa.Column('is_seasonal', sa.Boolean(), nullable=True),
sa.Column('season_start_month', sa.Integer(), nullable=True),
sa.Column('season_end_month', sa.Integer(), nullable=True),
sa.Column('is_signature_item', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_recipes_category', 'recipes', ['tenant_id', 'category', 'status'], unique=False)
op.create_index('idx_recipes_seasonal', 'recipes', ['tenant_id', 'is_seasonal', 'season_start_month', 'season_end_month'], unique=False)
op.create_index('idx_recipes_signature', 'recipes', ['tenant_id', 'is_signature_item', 'status'], unique=False)
op.create_index('idx_recipes_status', 'recipes', ['tenant_id', 'status'], unique=False)
op.create_index('idx_recipes_tenant_name', 'recipes', ['tenant_id', 'name'], unique=False)
op.create_index('idx_recipes_tenant_product', 'recipes', ['tenant_id', 'finished_product_id'], unique=False)
op.create_index(op.f('ix_recipes_category'), 'recipes', ['category'], unique=False)
op.create_index(op.f('ix_recipes_finished_product_id'), 'recipes', ['finished_product_id'], unique=False)
op.create_index(op.f('ix_recipes_name'), 'recipes', ['name'], unique=False)
op.create_index(op.f('ix_recipes_recipe_code'), 'recipes', ['recipe_code'], unique=False)
op.create_index(op.f('ix_recipes_status'), 'recipes', ['status'], unique=False)
op.create_index(op.f('ix_recipes_tenant_id'), 'recipes', ['tenant_id'], unique=False)
op.create_table('production_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=False),
sa.Column('batch_number', sa.String(length=100), nullable=False),
sa.Column('production_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('planned_quantity', sa.Float(), nullable=False),
sa.Column('actual_quantity', sa.Float(), nullable=True),
sa.Column('yield_percentage', sa.Float(), nullable=True),
sa.Column('batch_size_multiplier', sa.Float(), nullable=False),
sa.Column('status', sa.Enum('PLANNED', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'CANCELLED', name='productionstatus'), nullable=False),
sa.Column('priority', sa.Enum('LOW', 'NORMAL', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
sa.Column('assigned_staff', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('production_notes', sa.Text(), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=True),
sa.Column('quality_notes', sa.Text(), nullable=True),
sa.Column('defect_rate', sa.Float(), nullable=True),
sa.Column('rework_required', sa.Boolean(), nullable=True),
sa.Column('planned_material_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('actual_material_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('labor_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('overhead_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_production_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('cost_per_unit', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('production_temperature', sa.Float(), nullable=True),
sa.Column('production_humidity', sa.Float(), nullable=True),
sa.Column('oven_temperature', sa.Float(), nullable=True),
sa.Column('baking_time_minutes', sa.Integer(), nullable=True),
sa.Column('waste_quantity', sa.Float(), nullable=False),
sa.Column('waste_reason', sa.String(length=255), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('customer_order_reference', sa.String(length=100), nullable=True),
sa.Column('pre_order_quantity', sa.Float(), nullable=True),
sa.Column('shelf_quantity', sa.Float(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('completed_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['recipe_id'], ['recipes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_production_batches_batch_number', 'production_batches', ['tenant_id', 'batch_number'], unique=False)
op.create_index('idx_production_batches_priority', 'production_batches', ['tenant_id', 'priority', 'planned_start_time'], unique=False)
op.create_index('idx_production_batches_recipe', 'production_batches', ['recipe_id', 'production_date'], unique=False)
op.create_index('idx_production_batches_status', 'production_batches', ['tenant_id', 'status', 'production_date'], unique=False)
op.create_index('idx_production_batches_tenant_date', 'production_batches', ['tenant_id', 'production_date'], unique=False)
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=False)
op.create_index(op.f('ix_production_batches_production_date'), 'production_batches', ['production_date'], unique=False)
op.create_index(op.f('ix_production_batches_recipe_id'), 'production_batches', ['recipe_id'], unique=False)
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
op.create_table('recipe_ingredients',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit', sa.Enum('GRAMS', 'KILOGRAMS', 'MILLILITERS', 'LITERS', 'CUPS', 'TABLESPOONS', 'TEASPOONS', 'UNITS', 'PIECES', 'PERCENTAGE', name='measurementunit'), nullable=False),
sa.Column('quantity_in_base_unit', sa.Float(), nullable=True),
sa.Column('alternative_quantity', sa.Float(), nullable=True),
sa.Column('alternative_unit', sa.Enum('GRAMS', 'KILOGRAMS', 'MILLILITERS', 'LITERS', 'CUPS', 'TABLESPOONS', 'TEASPOONS', 'UNITS', 'PIECES', 'PERCENTAGE', name='measurementunit'), nullable=True),
sa.Column('preparation_method', sa.String(length=255), nullable=True),
sa.Column('ingredient_notes', sa.Text(), nullable=True),
sa.Column('is_optional', sa.Boolean(), nullable=True),
sa.Column('ingredient_order', sa.Integer(), nullable=False),
sa.Column('ingredient_group', sa.String(length=100), nullable=True),
sa.Column('substitution_options', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('substitution_ratio', sa.Float(), nullable=True),
sa.Column('unit_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('cost_updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['recipe_id'], ['recipes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_recipe_ingredients_group', 'recipe_ingredients', ['recipe_id', 'ingredient_group', 'ingredient_order'], unique=False)
op.create_index('idx_recipe_ingredients_ingredient', 'recipe_ingredients', ['ingredient_id'], unique=False)
op.create_index('idx_recipe_ingredients_recipe', 'recipe_ingredients', ['recipe_id', 'ingredient_order'], unique=False)
op.create_index('idx_recipe_ingredients_tenant', 'recipe_ingredients', ['tenant_id', 'recipe_id'], unique=False)
op.create_index(op.f('ix_recipe_ingredients_ingredient_id'), 'recipe_ingredients', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_recipe_ingredients_recipe_id'), 'recipe_ingredients', ['recipe_id'], unique=False)
op.create_index(op.f('ix_recipe_ingredients_tenant_id'), 'recipe_ingredients', ['tenant_id'], unique=False)
op.create_table('production_ingredient_consumption',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('production_batch_id', sa.UUID(), nullable=False),
sa.Column('recipe_ingredient_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('planned_quantity', sa.Float(), nullable=False),
sa.Column('actual_quantity', sa.Float(), nullable=False),
sa.Column('unit', sa.Enum('GRAMS', 'KILOGRAMS', 'MILLILITERS', 'LITERS', 'CUPS', 'TABLESPOONS', 'TEASPOONS', 'UNITS', 'PIECES', 'PERCENTAGE', name='measurementunit'), nullable=False),
sa.Column('variance_quantity', sa.Float(), nullable=True),
sa.Column('variance_percentage', sa.Float(), nullable=True),
sa.Column('unit_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('consumption_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('consumption_notes', sa.Text(), nullable=True),
sa.Column('staff_member', sa.UUID(), nullable=True),
sa.Column('ingredient_condition', sa.String(length=50), nullable=True),
sa.Column('quality_impact', sa.String(length=255), nullable=True),
sa.Column('substitution_used', sa.Boolean(), nullable=True),
sa.Column('substitution_details', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['production_batch_id'], ['production_batches.id'], ),
sa.ForeignKeyConstraint(['recipe_ingredient_id'], ['recipe_ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_consumption_batch', 'production_ingredient_consumption', ['production_batch_id'], unique=False)
op.create_index('idx_consumption_ingredient', 'production_ingredient_consumption', ['ingredient_id', 'consumption_time'], unique=False)
op.create_index('idx_consumption_recipe_ingredient', 'production_ingredient_consumption', ['recipe_ingredient_id'], unique=False)
op.create_index('idx_consumption_stock', 'production_ingredient_consumption', ['stock_id'], unique=False)
op.create_index('idx_consumption_tenant', 'production_ingredient_consumption', ['tenant_id', 'consumption_time'], unique=False)
op.create_index(op.f('ix_production_ingredient_consumption_ingredient_id'), 'production_ingredient_consumption', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_production_ingredient_consumption_production_batch_id'), 'production_ingredient_consumption', ['production_batch_id'], unique=False)
op.create_index(op.f('ix_production_ingredient_consumption_recipe_ingredient_id'), 'production_ingredient_consumption', ['recipe_ingredient_id'], unique=False)
op.create_index(op.f('ix_production_ingredient_consumption_stock_id'), 'production_ingredient_consumption', ['stock_id'], unique=False)
op.create_index(op.f('ix_production_ingredient_consumption_tenant_id'), 'production_ingredient_consumption', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_production_ingredient_consumption_tenant_id'), table_name='production_ingredient_consumption')
op.drop_index(op.f('ix_production_ingredient_consumption_stock_id'), table_name='production_ingredient_consumption')
op.drop_index(op.f('ix_production_ingredient_consumption_recipe_ingredient_id'), table_name='production_ingredient_consumption')
op.drop_index(op.f('ix_production_ingredient_consumption_production_batch_id'), table_name='production_ingredient_consumption')
op.drop_index(op.f('ix_production_ingredient_consumption_ingredient_id'), table_name='production_ingredient_consumption')
op.drop_index('idx_consumption_tenant', table_name='production_ingredient_consumption')
op.drop_index('idx_consumption_stock', table_name='production_ingredient_consumption')
op.drop_index('idx_consumption_recipe_ingredient', table_name='production_ingredient_consumption')
op.drop_index('idx_consumption_ingredient', table_name='production_ingredient_consumption')
op.drop_index('idx_consumption_batch', table_name='production_ingredient_consumption')
op.drop_table('production_ingredient_consumption')
op.drop_index(op.f('ix_recipe_ingredients_tenant_id'), table_name='recipe_ingredients')
op.drop_index(op.f('ix_recipe_ingredients_recipe_id'), table_name='recipe_ingredients')
op.drop_index(op.f('ix_recipe_ingredients_ingredient_id'), table_name='recipe_ingredients')
op.drop_index('idx_recipe_ingredients_tenant', table_name='recipe_ingredients')
op.drop_index('idx_recipe_ingredients_recipe', table_name='recipe_ingredients')
op.drop_index('idx_recipe_ingredients_ingredient', table_name='recipe_ingredients')
op.drop_index('idx_recipe_ingredients_group', table_name='recipe_ingredients')
op.drop_table('recipe_ingredients')
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_recipe_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_production_date'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
op.drop_index('idx_production_batches_tenant_date', table_name='production_batches')
op.drop_index('idx_production_batches_status', table_name='production_batches')
op.drop_index('idx_production_batches_recipe', table_name='production_batches')
op.drop_index('idx_production_batches_priority', table_name='production_batches')
op.drop_index('idx_production_batches_batch_number', table_name='production_batches')
op.drop_table('production_batches')
op.drop_index(op.f('ix_recipes_tenant_id'), table_name='recipes')
op.drop_index(op.f('ix_recipes_status'), table_name='recipes')
op.drop_index(op.f('ix_recipes_recipe_code'), table_name='recipes')
op.drop_index(op.f('ix_recipes_name'), table_name='recipes')
op.drop_index(op.f('ix_recipes_finished_product_id'), table_name='recipes')
op.drop_index(op.f('ix_recipes_category'), table_name='recipes')
op.drop_index('idx_recipes_tenant_product', table_name='recipes')
op.drop_index('idx_recipes_tenant_name', table_name='recipes')
op.drop_index('idx_recipes_status', table_name='recipes')
op.drop_index('idx_recipes_signature', table_name='recipes')
op.drop_index('idx_recipes_seasonal', table_name='recipes')
op.drop_index('idx_recipes_category', table_name='recipes')
op.drop_table('recipes')
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
op.drop_index('idx_production_schedules_tenant_date', table_name='production_schedules')
op.drop_index('idx_production_schedules_published', table_name='production_schedules')
op.drop_index('idx_production_schedules_completed', table_name='production_schedules')
op.drop_table('production_schedules')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for sales service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('SALES_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,75 +0,0 @@
"""Initial schema for sales service
Revision ID: 00001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '00001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('sales_transactions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('order_id', sa.UUID(), nullable=True),
sa.Column('customer_id', sa.UUID(), nullable=True),
sa.Column('transaction_type', sa.String(50), nullable=False),
sa.Column('payment_method', sa.String(50), nullable=True),
sa.Column('total_amount', sa.Float(), nullable=False),
sa.Column('tax_amount', sa.Float(), nullable=True),
sa.Column('discount_amount', sa.Float(), nullable=True),
sa.Column('currency', sa.String(3), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('transaction_date', sa.DateTime(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sales_transactions_tenant_id'), 'sales_transactions', ['tenant_id'], unique=False)
op.create_index(op.f('ix_sales_transactions_order_id'), 'sales_transactions', ['order_id'], unique=False)
op.create_index(op.f('ix_sales_transactions_customer_id'), 'sales_transactions', ['customer_id'], unique=False)
op.create_index(op.f('ix_sales_transactions_transaction_type'), 'sales_transactions', ['transaction_type'], unique=False)
op.create_index(op.f('ix_sales_transactions_status'), 'sales_transactions', ['status'], unique=False)
op.create_index(op.f('ix_sales_transactions_transaction_date'), 'sales_transactions', ['transaction_date'], unique=False)
op.create_table('sales_reports',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('report_type', sa.String(100), nullable=False),
sa.Column('report_date', sa.Date(), nullable=False),
sa.Column('period_start', sa.Date(), nullable=False),
sa.Column('period_end', sa.Date(), nullable=False),
sa.Column('total_sales', sa.Float(), nullable=False),
sa.Column('total_transactions', sa.Integer(), nullable=False),
sa.Column('average_transaction_value', sa.Float(), nullable=True),
sa.Column('top_products', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('metrics', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sales_reports_tenant_id'), 'sales_reports', ['tenant_id'], unique=False)
op.create_index(op.f('ix_sales_reports_report_type'), 'sales_reports', ['report_type'], unique=False)
op.create_index(op.f('ix_sales_reports_report_date'), 'sales_reports', ['report_date'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_sales_reports_report_date'), table_name='sales_reports')
op.drop_index(op.f('ix_sales_reports_report_type'), table_name='sales_reports')
op.drop_index(op.f('ix_sales_reports_tenant_id'), table_name='sales_reports')
op.drop_table('sales_reports')
op.drop_index(op.f('ix_sales_transactions_transaction_date'), table_name='sales_transactions')
op.drop_index(op.f('ix_sales_transactions_status'), table_name='sales_transactions')
op.drop_index(op.f('ix_sales_transactions_transaction_type'), table_name='sales_transactions')
op.drop_index(op.f('ix_sales_transactions_customer_id'), table_name='sales_transactions')
op.drop_index(op.f('ix_sales_transactions_order_id'), table_name='sales_transactions')
op.drop_index(op.f('ix_sales_transactions_tenant_id'), table_name='sales_transactions')
op.drop_table('sales_transactions')

View File

@@ -0,0 +1,103 @@
"""initial_schema_20251001_1118
Revision ID: a0ed92525634
Revises:
Create Date: 2025-10-01 11:18:26.606970+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'a0ed92525634'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('sales_data',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('quantity_sold', sa.Integer(), nullable=False),
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('revenue', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('cost_of_goods', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('discount_applied', sa.Numeric(precision=5, scale=2), nullable=True),
sa.Column('location_id', sa.String(length=100), nullable=True),
sa.Column('sales_channel', sa.String(length=50), nullable=True),
sa.Column('source', sa.String(length=50), nullable=False),
sa.Column('is_validated', sa.Boolean(), nullable=True),
sa.Column('validation_notes', sa.Text(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('weather_condition', sa.String(length=50), nullable=True),
sa.Column('is_holiday', sa.Boolean(), nullable=True),
sa.Column('is_weekend', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_sales_channel_date', 'sales_data', ['sales_channel', 'date', 'tenant_id'], unique=False)
op.create_index('idx_sales_date_range', 'sales_data', ['date', 'tenant_id'], unique=False)
op.create_index('idx_sales_inventory_product', 'sales_data', ['inventory_product_id', 'tenant_id'], unique=False)
op.create_index('idx_sales_product_date', 'sales_data', ['inventory_product_id', 'date', 'tenant_id'], unique=False)
op.create_index('idx_sales_source_validated', 'sales_data', ['source', 'is_validated', 'tenant_id'], unique=False)
op.create_index('idx_sales_tenant_date', 'sales_data', ['tenant_id', 'date'], unique=False)
op.create_index('idx_sales_tenant_location', 'sales_data', ['tenant_id', 'location_id'], unique=False)
op.create_index(op.f('ix_sales_data_date'), 'sales_data', ['date'], unique=False)
op.create_index(op.f('ix_sales_data_inventory_product_id'), 'sales_data', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_sales_data_location_id'), 'sales_data', ['location_id'], unique=False)
op.create_index(op.f('ix_sales_data_tenant_id'), 'sales_data', ['tenant_id'], unique=False)
op.create_table('sales_import_jobs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('filename', sa.String(length=255), nullable=False),
sa.Column('file_size', sa.Integer(), nullable=True),
sa.Column('import_type', sa.String(length=50), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('progress_percentage', sa.Float(), nullable=True),
sa.Column('total_rows', sa.Integer(), nullable=True),
sa.Column('processed_rows', sa.Integer(), nullable=True),
sa.Column('successful_imports', sa.Integer(), nullable=True),
sa.Column('failed_imports', sa.Integer(), nullable=True),
sa.Column('duplicate_rows', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('validation_errors', sa.Text(), nullable=True),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_import_jobs_status_date', 'sales_import_jobs', ['status', 'created_at'], unique=False)
op.create_index('idx_import_jobs_tenant_status', 'sales_import_jobs', ['tenant_id', 'status', 'created_at'], unique=False)
op.create_index(op.f('ix_sales_import_jobs_tenant_id'), 'sales_import_jobs', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_sales_import_jobs_tenant_id'), table_name='sales_import_jobs')
op.drop_index('idx_import_jobs_tenant_status', table_name='sales_import_jobs')
op.drop_index('idx_import_jobs_status_date', table_name='sales_import_jobs')
op.drop_table('sales_import_jobs')
op.drop_index(op.f('ix_sales_data_tenant_id'), table_name='sales_data')
op.drop_index(op.f('ix_sales_data_location_id'), table_name='sales_data')
op.drop_index(op.f('ix_sales_data_inventory_product_id'), table_name='sales_data')
op.drop_index(op.f('ix_sales_data_date'), table_name='sales_data')
op.drop_index('idx_sales_tenant_location', table_name='sales_data')
op.drop_index('idx_sales_tenant_date', table_name='sales_data')
op.drop_index('idx_sales_source_validated', table_name='sales_data')
op.drop_index('idx_sales_product_date', table_name='sales_data')
op.drop_index('idx_sales_inventory_product', table_name='sales_data')
op.drop_index('idx_sales_date_range', table_name='sales_data')
op.drop_index('idx_sales_channel_date', table_name='sales_data')
op.drop_table('sales_data')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for suppliers service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('SUPPLIERS_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,720 +0,0 @@
"""Initial schema for suppliers service
Revision ID: 0001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '00001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create suppliers table
op.create_table('suppliers',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('supplier_code', sa.String(50), nullable=True),
sa.Column('tax_id', sa.String(50), nullable=True),
sa.Column('registration_number', sa.String(100), nullable=True),
sa.Column('supplier_type', sa.Enum('ingredients', 'packaging', 'equipment', 'services', 'utilities', 'multi', name='suppliertype'), nullable=False),
sa.Column('status', sa.Enum('active', 'inactive', 'pending_approval', 'suspended', 'blacklisted', name='supplierstatus'), nullable=False),
sa.Column('contact_person', sa.String(200), nullable=True),
sa.Column('email', sa.String(254), nullable=True),
sa.Column('phone', sa.String(30), nullable=True),
sa.Column('mobile', sa.String(30), nullable=True),
sa.Column('website', sa.String(255), nullable=True),
sa.Column('address_line1', sa.String(255), nullable=True),
sa.Column('address_line2', sa.String(255), nullable=True),
sa.Column('city', sa.String(100), nullable=True),
sa.Column('state_province', sa.String(100), nullable=True),
sa.Column('postal_code', sa.String(20), nullable=True),
sa.Column('country', sa.String(100), nullable=True),
sa.Column('payment_terms', sa.Enum('cod', 'net_15', 'net_30', 'net_45', 'net_60', 'prepaid', 'credit_terms', name='paymentterms'), nullable=False),
sa.Column('credit_limit', sa.Numeric(12, 2), nullable=True),
sa.Column('currency', sa.String(3), nullable=False),
sa.Column('standard_lead_time', sa.Integer, nullable=False),
sa.Column('minimum_order_amount', sa.Numeric(10, 2), nullable=True),
sa.Column('delivery_area', sa.String(255), nullable=True),
sa.Column('quality_rating', sa.Float, nullable=True),
sa.Column('delivery_rating', sa.Float, nullable=True),
sa.Column('total_orders', sa.Integer, nullable=False),
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejection_reason', sa.Text, nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('certifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('business_hours', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('specializations', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('updated_by', sa.UUID(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_suppliers_tenant_id'), 'suppliers', ['tenant_id'], unique=False)
op.create_index(op.f('ix_suppliers_name'), 'suppliers', ['name'], unique=False)
op.create_index(op.f('ix_suppliers_supplier_code'), 'suppliers', ['supplier_code'], unique=False)
op.create_index(op.f('ix_suppliers_email'), 'suppliers', ['email'], unique=False)
op.create_index(op.f('ix_suppliers_status'), 'suppliers', ['status'], unique=False)
op.create_index('ix_suppliers_tenant_name', 'suppliers', ['tenant_id', 'name'], unique=False)
op.create_index('ix_suppliers_tenant_status', 'suppliers', ['tenant_id', 'status'], unique=False)
op.create_index('ix_suppliers_tenant_type', 'suppliers', ['tenant_id', 'supplier_type'], unique=False)
op.create_index('ix_suppliers_quality_rating', 'suppliers', ['quality_rating'], unique=False)
# Create supplier_price_lists table
op.create_table('supplier_price_lists',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('product_code', sa.String(100), nullable=True),
sa.Column('unit_price', sa.Numeric(10, 4), nullable=False),
sa.Column('unit_of_measure', sa.String(20), nullable=False),
sa.Column('minimum_order_quantity', sa.Integer, nullable=True),
sa.Column('price_per_unit', sa.Numeric(10, 4), nullable=False),
sa.Column('tier_pricing', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean, nullable=False),
sa.Column('brand', sa.String(100), nullable=True),
sa.Column('packaging_size', sa.String(50), nullable=True),
sa.Column('origin_country', sa.String(100), nullable=True),
sa.Column('shelf_life_days', sa.Integer, nullable=True),
sa.Column('storage_requirements', sa.Text, nullable=True),
sa.Column('quality_specs', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('allergens', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('updated_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_price_lists_tenant_id'), 'supplier_price_lists', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_price_lists_supplier_id'), 'supplier_price_lists', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_price_lists_inventory_product_id'), 'supplier_price_lists', ['inventory_product_id'], unique=False)
op.create_index('ix_price_lists_tenant_supplier', 'supplier_price_lists', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_price_lists_inventory_product', 'supplier_price_lists', ['inventory_product_id'], unique=False)
op.create_index('ix_price_lists_active', 'supplier_price_lists', ['is_active'], unique=False)
op.create_index('ix_price_lists_effective_date', 'supplier_price_lists', ['effective_date'], unique=False)
# Create purchase_orders table
op.create_table('purchase_orders',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('po_number', sa.String(50), nullable=False),
sa.Column('reference_number', sa.String(100), nullable=True),
sa.Column('status', sa.Enum('draft', 'pending_approval', 'approved', 'sent_to_supplier', 'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed', name='purchaseorderstatus'), nullable=False),
sa.Column('priority', sa.String(20), nullable=False),
sa.Column('order_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('required_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('estimated_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('subtotal', sa.Numeric(12, 2), nullable=False),
sa.Column('tax_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('shipping_cost', sa.Numeric(10, 2), nullable=False),
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('currency', sa.String(3), nullable=False),
sa.Column('delivery_address', sa.Text, nullable=True),
sa.Column('delivery_instructions', sa.Text, nullable=True),
sa.Column('delivery_contact', sa.String(200), nullable=True),
sa.Column('delivery_phone', sa.String(30), nullable=True),
sa.Column('requires_approval', sa.Boolean, nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejection_reason', sa.Text, nullable=True),
sa.Column('sent_to_supplier_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('supplier_confirmation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('supplier_reference', sa.String(100), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('internal_notes', sa.Text, nullable=True),
sa.Column('terms_and_conditions', sa.Text, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('updated_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_purchase_orders_tenant_id'), 'purchase_orders', ['tenant_id'], unique=False)
op.create_index(op.f('ix_purchase_orders_supplier_id'), 'purchase_orders', ['supplier_id'], unique=False)
op.create_index(op.f('ix_purchase_orders_po_number'), 'purchase_orders', ['po_number'], unique=False)
op.create_index(op.f('ix_purchase_orders_status'), 'purchase_orders', ['status'], unique=False)
op.create_index('ix_purchase_orders_tenant_supplier', 'purchase_orders', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_purchase_orders_tenant_status', 'purchase_orders', ['tenant_id', 'status'], unique=False)
op.create_index('ix_purchase_orders_order_date', 'purchase_orders', ['order_date'], unique=False)
op.create_index('ix_purchase_orders_delivery_date', 'purchase_orders', ['required_delivery_date'], unique=False)
# Create purchase_order_items table
op.create_table('purchase_order_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=False),
sa.Column('price_list_item_id', sa.UUID(), nullable=True),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('product_code', sa.String(10), nullable=True),
sa.Column('ordered_quantity', sa.Integer, nullable=False),
sa.Column('unit_of_measure', sa.String(20), nullable=False),
sa.Column('unit_price', sa.Numeric(10, 4), nullable=False),
sa.Column('line_total', sa.Numeric(12, 2), nullable=False),
sa.Column('received_quantity', sa.Integer, nullable=False),
sa.Column('remaining_quantity', sa.Integer, nullable=False),
sa.Column('quality_requirements', sa.Text, nullable=True),
sa.Column('item_notes', sa.Text, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['price_list_item_id'], ['supplier_price_lists.id'], ),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_purchase_order_items_tenant_id'), 'purchase_order_items', ['tenant_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_purchase_order_id'), 'purchase_order_items', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_price_list_item_id'), 'purchase_order_items', ['price_list_item_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_inventory_product_id'), 'purchase_order_items', ['inventory_product_id'], unique=False)
op.create_index('ix_po_items_tenant_po', 'purchase_order_items', ['tenant_id', 'purchase_order_id'], unique=False)
op.create_index('ix_po_items_inventory_product', 'purchase_order_items', ['inventory_product_id'], unique=False)
# Create deliveries table
op.create_table('deliveries',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('delivery_number', sa.String(50), nullable=False),
sa.Column('supplier_delivery_note', sa.String(10), nullable=True),
sa.Column('status', sa.Enum('scheduled', 'in_transit', 'out_for_delivery', 'delivered', 'partially_delivered', 'failed_delivery', 'returned', name='deliverystatus'), nullable=False),
sa.Column('scheduled_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('estimated_arrival', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_arrival', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivery_address', sa.Text, nullable=True),
sa.Column('delivery_contact', sa.String(20), nullable=True),
sa.Column('delivery_phone', sa.String(30), nullable=True),
sa.Column('carrier_name', sa.String(200), nullable=True),
sa.Column('tracking_number', sa.String(100), nullable=True),
sa.Column('inspection_passed', sa.Boolean, nullable=True),
sa.Column('inspection_notes', sa.Text, nullable=True),
sa.Column('quality_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('received_by', sa.UUID(), nullable=True),
sa.Column('received_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('photos', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_deliveries_tenant_id'), 'deliveries', ['tenant_id'], unique=False)
op.create_index(op.f('ix_deliveries_purchase_order_id'), 'deliveries', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_deliveries_supplier_id'), 'deliveries', ['supplier_id'], unique=False)
op.create_index(op.f('ix_deliveries_delivery_number'), 'deliveries', ['delivery_number'], unique=False)
op.create_index(op.f('ix_deliveries_status'), 'deliveries', ['status'], unique=False)
op.create_index('ix_deliveries_tenant_status', 'deliveries', ['tenant_id', 'status'], unique=False)
op.create_index('ix_deliveries_scheduled_date', 'deliveries', ['scheduled_date'], unique=False)
# Create delivery_items table
op.create_table('delivery_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('delivery_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_item_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('ordered_quantity', sa.Integer, nullable=False),
sa.Column('delivered_quantity', sa.Integer, nullable=False),
sa.Column('accepted_quantity', sa.Integer, nullable=False),
sa.Column('rejected_quantity', sa.Integer, nullable=False),
sa.Column('batch_lot_number', sa.String(100), nullable=True),
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('quality_grade', sa.String(20), nullable=True),
sa.Column('quality_issues', sa.Text, nullable=True),
sa.Column('rejection_reason', sa.Text, nullable=True),
sa.Column('item_notes', sa.Text, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id'], ),
sa.ForeignKeyConstraint(['purchase_order_item_id'], ['purchase_order_items.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_delivery_items_tenant_id'), 'delivery_items', ['tenant_id'], unique=False)
op.create_index(op.f('ix_delivery_items_delivery_id'), 'delivery_items', ['delivery_id'], unique=False)
op.create_index(op.f('ix_delivery_items_purchase_order_item_id'), 'delivery_items', ['purchase_order_item_id'], unique=False)
op.create_index(op.f('ix_delivery_items_inventory_product_id'), 'delivery_items', ['inventory_product_id'], unique=False)
op.create_index('ix_delivery_items_tenant_delivery', 'delivery_items', ['tenant_id', 'delivery_id'], unique=False)
op.create_index('ix_delivery_items_inventory_product', 'delivery_items', ['inventory_product_id'], unique=False)
# Create supplier_quality_reviews table
op.create_table('supplier_quality_reviews',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('delivery_id', sa.UUID(), nullable=True),
sa.Column('review_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('review_type', sa.String(50), nullable=False),
sa.Column('quality_rating', sa.Enum('excellent', 'good', 'average', 'poor', 'very_poor', name='qualityrating'), nullable=False),
sa.Column('delivery_rating', sa.Enum('excellent', 'good', 'average', 'poor', 'very_poor', name='deliveryrating'), nullable=False),
sa.Column('communication_rating', sa.Integer, nullable=False),
sa.Column('overall_rating', sa.Float, nullable=False),
sa.Column('quality_comments', sa.Text, nullable=True),
sa.Column('delivery_comments', sa.Text, nullable=True),
sa.Column('communication_comments', sa.Text, nullable=True),
sa.Column('improvement_suggestions', sa.Text, nullable=True),
sa.Column('quality_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('corrective_actions', sa.Text, nullable=True),
sa.Column('follow_up_required', sa.Boolean, nullable=False),
sa.Column('follow_up_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_final', sa.Boolean, nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('reviewed_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id'], ),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_quality_reviews_tenant_id'), 'supplier_quality_reviews', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_supplier_id'), 'supplier_quality_reviews', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_purchase_order_id'), 'supplier_quality_reviews', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_delivery_id'), 'supplier_quality_reviews', ['delivery_id'], unique=False)
op.create_index('ix_quality_reviews_tenant_supplier', 'supplier_quality_reviews', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_quality_reviews_date', 'supplier_quality_reviews', ['review_date'], unique=False)
op.create_index('ix_quality_reviews_overall_rating', 'supplier_quality_reviews', ['overall_rating'], unique=False)
# Create supplier_invoices table
op.create_table('supplier_invoices',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('invoice_number', sa.String(50), nullable=False),
sa.Column('supplier_invoice_number', sa.String(10), nullable=False),
sa.Column('status', sa.Enum('pending', 'approved', 'paid', 'overdue', 'disputed', 'cancelled', name='invoicestatus'), nullable=False),
sa.Column('invoice_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('due_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('received_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('subtotal', sa.Numeric(12, 2), nullable=False),
sa.Column('tax_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('shipping_cost', sa.Numeric(10, 2), nullable=False),
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=False),
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('currency', sa.String(3), nullable=False),
sa.Column('paid_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('payment_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('payment_reference', sa.String(10), nullable=True),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejection_reason', sa.Text, nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('invoice_document_url', sa.String(500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_invoices_tenant_id'), 'supplier_invoices', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_invoices_supplier_id'), 'supplier_invoices', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_invoices_purchase_order_id'), 'supplier_invoices', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_supplier_invoices_invoice_number'), 'supplier_invoices', ['invoice_number'], unique=False)
op.create_index(op.f('ix_supplier_invoices_status'), 'supplier_invoices', ['status'], unique=False)
op.create_index('ix_invoices_tenant_supplier', 'supplier_invoices', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_invoices_tenant_status', 'supplier_invoices', ['tenant_id', 'status'], unique=False)
op.create_index('ix_invoices_due_date', 'supplier_invoices', ['due_date'], unique=False)
op.create_index('ix_invoices_invoice_number', 'supplier_invoices', ['invoice_number'], unique=False)
# Create supplier_performance_metrics table
op.create_table('supplier_performance_metrics',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('metric_type', sa.Enum('delivery_performance', 'quality_score', 'price_competitiveness', 'communication_rating', 'order_accuracy', 'response_time', 'compliance_score', 'financial_stability', name='performancemetrictype'), nullable=False),
sa.Column('period', sa.Enum('daily', 'weekly', 'monthly', 'quarterly', 'yearly', name='performanceperiod'), nullable=False),
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('metric_value', sa.Float, nullable=False),
sa.Column('target_value', sa.Float, nullable=True),
sa.Column('previous_value', sa.Float, nullable=True),
sa.Column('total_orders', sa.Integer, nullable=False),
sa.Column('total_deliveries', sa.Integer, nullable=False),
sa.Column('on_time_deliveries', sa.Integer, nullable=False),
sa.Column('late_deliveries', sa.Integer, nullable=False),
sa.Column('quality_issues', sa.Integer, nullable=False),
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False),
sa.Column('metrics_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('trend_direction', sa.String(20), nullable=True),
sa.Column('trend_percentage', sa.Float, nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('external_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('calculated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('calculated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_performance_metrics_tenant_id'), 'supplier_performance_metrics', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_supplier_id'), 'supplier_performance_metrics', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_metric_type'), 'supplier_performance_metrics', ['metric_type'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_period'), 'supplier_performance_metrics', ['period'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_period_start'), 'supplier_performance_metrics', ['period_start'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_period_end'), 'supplier_performance_metrics', ['period_end'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_metric_value'), 'supplier_performance_metrics', ['metric_value'], unique=False)
op.create_index('ix_performance_metrics_tenant_supplier', 'supplier_performance_metrics', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_performance_metrics_type_period', 'supplier_performance_metrics', ['metric_type', 'period'], unique=False)
op.create_index('ix_performance_metrics_period_dates', 'supplier_performance_metrics', ['period_start', 'period_end'], unique=False)
op.create_index('ix_performance_metrics_value', 'supplier_performance_metrics', ['metric_value'], unique=False)
# Create supplier_alerts table
op.create_table('supplier_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('alert_type', sa.Enum('poor_quality', 'late_delivery', 'price_increase', 'low_performance', 'contract_expiry', 'compliance_issue', 'financial_risk', 'communication_issue', 'capacity_constraint', 'certification_expiry', name='alerttype'), nullable=False),
sa.Column('severity', sa.Enum('critical', 'high', 'medium', 'low', 'info', name='alertseverity'), nullable=False),
sa.Column('status', sa.Enum('active', 'acknowledged', 'in_progress', 'resolved', 'dismissed', name='alertstatus'), nullable=False),
sa.Column('title', sa.String(255), nullable=False),
sa.Column('message', sa.Text, nullable=False),
sa.Column('description', sa.Text, nullable=True),
sa.Column('trigger_value', sa.Float, nullable=True),
sa.Column('threshold_value', sa.Float, nullable=True),
sa.Column('metric_type', sa.Enum('delivery_performance', 'quality_score', 'price_competitiveness', 'communication_rating', 'order_accuracy', 'response_time', 'compliance_score', 'financial_stability', name='performancemetrictype'), nullable=True),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('delivery_id', sa.UUID(), nullable=True),
sa.Column('performance_metric_id', sa.UUID(), nullable=True),
sa.Column('triggered_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('acknowledged_by', sa.UUID(), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolved_by', sa.UUID(), nullable=True),
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('resolution_notes', sa.Text, nullable=True),
sa.Column('auto_resolve', sa.Boolean, nullable=False),
sa.Column('auto_resolve_condition', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('escalated', sa.Boolean, nullable=False),
sa.Column('escalated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('escalated_to', sa.UUID(), nullable=True),
sa.Column('notification_sent', sa.Boolean, nullable=False),
sa.Column('notification_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('priority_score', sa.Integer, nullable=False),
sa.Column('business_impact', sa.String(50), nullable=True),
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['performance_metric_id'], ['supplier_performance_metrics.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_alerts_tenant_id'), 'supplier_alerts', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_alerts_supplier_id'), 'supplier_alerts', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_alerts_alert_type'), 'supplier_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_supplier_alerts_severity'), 'supplier_alerts', ['severity'], unique=False)
op.create_index(op.f('ix_supplier_alerts_status'), 'supplier_alerts', ['status'], unique=False)
op.create_index(op.f('ix_supplier_alerts_metric_type'), 'supplier_alerts', ['metric_type'], unique=False)
op.create_index(op.f('ix_supplier_alerts_purchase_order_id'), 'supplier_alerts', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_supplier_alerts_delivery_id'), 'supplier_alerts', ['delivery_id'], unique=False)
op.create_index(op.f('ix_supplier_alerts_performance_metric_id'), 'supplier_alerts', ['performance_metric_id'], unique=False)
op.create_index('ix_supplier_alerts_tenant_supplier', 'supplier_alerts', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_supplier_alerts_type_severity', 'supplier_alerts', ['alert_type', 'severity'], unique=False)
op.create_index('ix_supplier_alerts_status_triggered', 'supplier_alerts', ['status', 'triggered_at'], unique=False)
op.create_index('ix_supplier_alerts_priority', 'supplier_alerts', ['priority_score'], unique=False)
# Create supplier_scorecards table
op.create_table('supplier_scorecards',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('scorecard_name', sa.String(255), nullable=False),
sa.Column('period', sa.Enum('daily', 'weekly', 'monthly', 'quarterly', 'yearly', name='performanceperiod'), nullable=False),
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('overall_score', sa.Float, nullable=False),
sa.Column('quality_score', sa.Float, nullable=False),
sa.Column('delivery_score', sa.Float, nullable=False),
sa.Column('cost_score', sa.Float, nullable=False),
sa.Column('service_score', sa.Float, nullable=False),
sa.Column('overall_rank', sa.Integer, nullable=True),
sa.Column('category_rank', sa.Integer, nullable=True),
sa.Column('total_suppliers_evaluated', sa.Integer, nullable=True),
sa.Column('on_time_delivery_rate', sa.Float, nullable=False),
sa.Column('quality_rejection_rate', sa.Float, nullable=False),
sa.Column('order_accuracy_rate', sa.Float, nullable=False),
sa.Column('response_time_hours', sa.Float, nullable=False),
sa.Column('cost_variance_percentage', sa.Float, nullable=False),
sa.Column('total_orders_processed', sa.Integer, nullable=False),
sa.Column('total_amount_processed', sa.Numeric(12, 2), nullable=False),
sa.Column('average_order_value', sa.Numeric(10, 2), nullable=False),
sa.Column('cost_savings_achieved', sa.Numeric(10, 2), nullable=False),
sa.Column('score_trend', sa.String(20), nullable=True),
sa.Column('score_change_percentage', sa.Float, nullable=True),
sa.Column('strengths', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('improvement_areas', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('is_final', sa.Boolean, nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('attachments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('generated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('generated_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_scorecards_tenant_id'), 'supplier_scorecards', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_supplier_id'), 'supplier_scorecards', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_scorecard_name'), 'supplier_scorecards', ['scorecard_name'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_period'), 'supplier_scorecards', ['period'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_period_start'), 'supplier_scorecards', ['period_start'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_period_end'), 'supplier_scorecards', ['period_end'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_overall_score'), 'supplier_scorecards', ['overall_score'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_is_final'), 'supplier_scorecards', ['is_final'], unique=False)
op.create_index('ix_scorecards_tenant_supplier', 'supplier_scorecards', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_scorecards_period_dates', 'supplier_scorecards', ['period_start', 'period_end'], unique=False)
op.create_index('ix_scorecards_overall_score', 'supplier_scorecards', ['overall_score'], unique=False)
op.create_index('ix_scorecards_period', 'supplier_scorecards', ['period'], unique=False)
op.create_index('ix_scorecards_final', 'supplier_scorecards', ['is_final'], unique=False)
# Create supplier_benchmarks table
op.create_table('supplier_benchmarks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('benchmark_name', sa.String(255), nullable=False),
sa.Column('benchmark_type', sa.String(50), nullable=False),
sa.Column('supplier_category', sa.String(100), nullable=True),
sa.Column('metric_type', sa.Enum('delivery_performance', 'quality_score', 'price_competitiveness', 'communication_rating', 'order_accuracy', 'response_time', 'compliance_score', 'financial_stability', name='performancemetrictype'), nullable=False),
sa.Column('excellent_threshold', sa.Float, nullable=False),
sa.Column('good_threshold', sa.Float, nullable=False),
sa.Column('acceptable_threshold', sa.Float, nullable=False),
sa.Column('poor_threshold', sa.Float, nullable=False),
sa.Column('data_source', sa.String(255), nullable=True),
sa.Column('sample_size', sa.Integer, nullable=True),
sa.Column('confidence_level', sa.Float, nullable=True),
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean, nullable=False),
sa.Column('description', sa.Text, nullable=True),
sa.Column('methodology', sa.Text, nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_benchmarks_tenant_id'), 'supplier_benchmarks', ['tenant_id'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_benchmark_name'), 'supplier_benchmarks', ['benchmark_name'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_benchmark_type'), 'supplier_benchmarks', ['benchmark_type'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_supplier_category'), 'supplier_benchmarks', ['supplier_category'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_metric_type'), 'supplier_benchmarks', ['metric_type'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_is_active'), 'supplier_benchmarks', ['is_active'], unique=False)
op.create_index('ix_benchmarks_tenant_type', 'supplier_benchmarks', ['tenant_id', 'benchmark_type'], unique=False)
op.create_index('ix_benchmarks_metric_type', 'supplier_benchmarks', ['metric_type'], unique=False)
op.create_index('ix_benchmarks_category', 'supplier_benchmarks', ['supplier_category'], unique=False)
op.create_index('ix_benchmarks_active', 'supplier_benchmarks', ['is_active'], unique=False)
# Create alert_rules table
op.create_table('alert_rules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('rule_name', sa.String(255), nullable=False),
sa.Column('rule_description', sa.Text, nullable=True),
sa.Column('is_active', sa.Boolean, nullable=False),
sa.Column('alert_type', sa.Enum('poor_quality', 'late_delivery', 'price_increase', 'low_performance', 'contract_expiry', 'compliance_issue', 'financial_risk', 'communication_issue', 'capacity_constraint', 'certification_expiry', name='alerttype'), nullable=False),
sa.Column('severity', sa.Enum('critical', 'high', 'medium', 'low', 'info', name='alertseverity'), nullable=False),
sa.Column('metric_type', sa.Enum('delivery_performance', 'quality_score', 'price_competitiveness', 'communication_rating', 'order_accuracy', 'response_time', 'compliance_score', 'financial_stability', name='performancemetrictype'), nullable=True),
sa.Column('trigger_condition', sa.String(50), nullable=False),
sa.Column('threshold_value', sa.Float, nullable=False),
sa.Column('consecutive_violations', sa.Integer, nullable=False),
sa.Column('supplier_categories', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('supplier_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('exclude_suppliers', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('evaluation_period', sa.Enum('daily', 'weekly', 'monthly', 'quarterly', 'yearly', name='performanceperiod'), nullable=False),
sa.Column('time_window_hours', sa.Integer, nullable=True),
sa.Column('business_hours_only', sa.Boolean, nullable=False),
sa.Column('auto_resolve', sa.Boolean, nullable=False),
sa.Column('auto_resolve_threshold', sa.Float, nullable=True),
sa.Column('auto_resolve_duration_hours', sa.Integer, nullable=True),
sa.Column('notification_enabled', sa.Boolean, nullable=False),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('escalation_minutes', sa.Integer, nullable=True),
sa.Column('escalation_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('auto_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('priority', sa.Integer, nullable=False),
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('last_triggered', sa.DateTime(timezone=True), nullable=True),
sa.Column('trigger_count', sa.Integer, nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_alert_rules_tenant_id'), 'alert_rules', ['tenant_id'], unique=False)
op.create_index(op.f('ix_alert_rules_rule_name'), 'alert_rules', ['rule_name'], unique=False)
op.create_index(op.f('ix_alert_rules_is_active'), 'alert_rules', ['is_active'], unique=False)
op.create_index(op.f('ix_alert_rules_alert_type'), 'alert_rules', ['alert_type'], unique=False)
op.create_index(op.f('ix_alert_rules_severity'), 'alert_rules', ['severity'], unique=False)
op.create_index(op.f('ix_alert_rules_metric_type'), 'alert_rules', ['metric_type'], unique=False)
op.create_index(op.f('ix_alert_rules_priority'), 'alert_rules', ['priority'], unique=False)
op.create_index('ix_alert_rules_tenant_active', 'alert_rules', ['tenant_id', 'is_active'], unique=False)
op.create_index('ix_alert_rules_type_severity', 'alert_rules', ['alert_type', 'severity'], unique=False)
op.create_index('ix_alert_rules_metric_type', 'alert_rules', ['metric_type'], unique=False)
op.create_index('ix_alert_rules_priority', 'alert_rules', ['priority'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_alert_rules_priority'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_metric_type'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_type_severity'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_tenant_active'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_priority'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_metric_type'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_severity'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_alert_type'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_is_active'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_rule_name'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_tenant_id'), table_name='alert_rules')
op.drop_table('alert_rules')
op.drop_index('ix_benchmarks_active', table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_category', table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_metric_type', table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_tenant_type', table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_is_active'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_metric_type'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_supplier_category'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_benchmark_type'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_benchmark_name'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_tenant_id'), table_name='supplier_benchmarks')
op.drop_table('supplier_benchmarks')
op.drop_index('ix_scorecards_final', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_period', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_overall_score', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_period_dates', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_tenant_supplier', table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_is_final'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_overall_score'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_period_end'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_period_start'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_period'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_scorecard_name'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_supplier_id'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_tenant_id'), table_name='supplier_scorecards')
op.drop_table('supplier_scorecards')
op.drop_index('ix_supplier_alerts_priority', table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_status_triggered', table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_type_severity', table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_tenant_supplier', table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_performance_metric_id'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_delivery_id'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_purchase_order_id'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_metric_type'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_status'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_severity'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_alert_type'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_supplier_id'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_tenant_id'), table_name='supplier_alerts')
op.drop_table('supplier_alerts')
op.drop_index('ix_performance_metrics_value', table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_period_dates', table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_type_period', table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_tenant_supplier', table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_metric_value'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_period_end'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_period_start'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_period'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_metric_type'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_supplier_id'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_tenant_id'), table_name='supplier_performance_metrics')
op.drop_table('supplier_performance_metrics')
op.drop_index('ix_invoices_invoice_number', table_name='supplier_invoices')
op.drop_index('ix_invoices_due_date', table_name='supplier_invoices')
op.drop_index('ix_invoices_tenant_status', table_name='supplier_invoices')
op.drop_index('ix_invoices_tenant_supplier', table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_status'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_invoice_number'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_purchase_order_id'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_supplier_id'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_tenant_id'), table_name='supplier_invoices')
op.drop_table('supplier_invoices')
op.drop_index('ix_quality_reviews_overall_rating', table_name='supplier_quality_reviews')
op.drop_index('ix_quality_reviews_date', table_name='supplier_quality_reviews')
op.drop_index('ix_quality_reviews_tenant_supplier', table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_delivery_id'), table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_purchase_order_id'), table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_supplier_id'), table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_tenant_id'), table_name='supplier_quality_reviews')
op.drop_table('supplier_quality_reviews')
op.drop_index('ix_delivery_items_inventory_product', table_name='delivery_items')
op.drop_index('ix_delivery_items_tenant_delivery', table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_inventory_product_id'), table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_purchase_order_item_id'), table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_delivery_id'), table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_tenant_id'), table_name='delivery_items')
op.drop_table('delivery_items')
op.drop_index('ix_deliveries_scheduled_date', table_name='deliveries')
op.drop_index('ix_deliveries_tenant_status', table_name='deliveries')
op.drop_index(op.f('ix_deliveries_status'), table_name='deliveries')
op.drop_index(op.f('ix_deliveries_delivery_number'), table_name='deliveries')
op.drop_index(op.f('ix_deliveries_supplier_id'), table_name='deliveries')
op.drop_index(op.f('ix_deliveries_purchase_order_id'), table_name='deliveries')
op.drop_index(op.f('ix_deliveries_tenant_id'), table_name='deliveries')
op.drop_table('deliveries')
op.drop_index('ix_po_items_inventory_product', table_name='purchase_order_items')
op.drop_index('ix_po_items_tenant_po', table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_inventory_product_id'), table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_price_list_item_id'), table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_purchase_order_id'), table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_tenant_id'), table_name='purchase_order_items')
op.drop_table('purchase_order_items')
op.drop_index('ix_purchase_orders_delivery_date', table_name='purchase_orders')
op.drop_index('ix_purchase_orders_order_date', table_name='purchase_orders')
op.drop_index('ix_purchase_orders_tenant_status', table_name='purchase_orders')
op.drop_index('ix_purchase_orders_tenant_supplier', table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_status'), table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_po_number'), table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_supplier_id'), table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_tenant_id'), table_name='purchase_orders')
op.drop_table('purchase_orders')
op.drop_index('ix_price_lists_effective_date', table_name='supplier_price_lists')
op.drop_index('ix_price_lists_active', table_name='supplier_price_lists')
op.drop_index('ix_price_lists_inventory_product', table_name='supplier_price_lists')
op.drop_index('ix_price_lists_tenant_supplier', table_name='supplier_price_lists')
op.drop_index(op.f('ix_supplier_price_lists_inventory_product_id'), table_name='supplier_price_lists')
op.drop_index(op.f('ix_supplier_price_lists_supplier_id'), table_name='supplier_price_lists')
op.drop_index(op.f('ix_supplier_price_lists_tenant_id'), table_name='supplier_price_lists')
op.drop_table('supplier_price_lists')
op.drop_index('ix_suppliers_quality_rating', table_name='suppliers')
op.drop_index('ix_suppliers_tenant_type', table_name='suppliers')
op.drop_index('ix_suppliers_tenant_status', table_name='suppliers')
op.drop_index('ix_suppliers_tenant_name', table_name='suppliers')
op.drop_index(op.f('ix_suppliers_status'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_email'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_supplier_code'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_name'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_tenant_id'), table_name='suppliers')
op.drop_table('suppliers')

View File

@@ -0,0 +1,665 @@
"""initial_schema_20251001_1119
Revision ID: 38cf0f06a3f3
Revises:
Create Date: 2025-10-01 11:19:09.823424+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '38cf0f06a3f3'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('alert_rules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('rule_name', sa.String(length=255), nullable=False),
sa.Column('rule_description', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('alert_type', sa.Enum('POOR_QUALITY', 'LATE_DELIVERY', 'PRICE_INCREASE', 'LOW_PERFORMANCE', 'CONTRACT_EXPIRY', 'COMPLIANCE_ISSUE', 'FINANCIAL_RISK', 'COMMUNICATION_ISSUE', 'CAPACITY_CONSTRAINT', 'CERTIFICATION_EXPIRY', name='alerttype'), nullable=False),
sa.Column('severity', sa.Enum('CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO', name='alertseverity'), nullable=False),
sa.Column('metric_type', sa.Enum('DELIVERY_PERFORMANCE', 'QUALITY_SCORE', 'PRICE_COMPETITIVENESS', 'COMMUNICATION_RATING', 'ORDER_ACCURACY', 'RESPONSE_TIME', 'COMPLIANCE_SCORE', 'FINANCIAL_STABILITY', name='performancemetrictype'), nullable=True),
sa.Column('trigger_condition', sa.String(length=50), nullable=False),
sa.Column('threshold_value', sa.Float(), nullable=False),
sa.Column('consecutive_violations', sa.Integer(), nullable=False),
sa.Column('supplier_categories', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('supplier_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('exclude_suppliers', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('evaluation_period', sa.Enum('DAILY', 'WEEKLY', 'MONTHLY', 'QUARTERLY', 'YEARLY', name='performanceperiod'), nullable=False),
sa.Column('time_window_hours', sa.Integer(), nullable=True),
sa.Column('business_hours_only', sa.Boolean(), nullable=False),
sa.Column('auto_resolve', sa.Boolean(), nullable=False),
sa.Column('auto_resolve_threshold', sa.Float(), nullable=True),
sa.Column('auto_resolve_duration_hours', sa.Integer(), nullable=True),
sa.Column('notification_enabled', sa.Boolean(), nullable=False),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('escalation_minutes', sa.Integer(), nullable=True),
sa.Column('escalation_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('auto_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('priority', sa.Integer(), nullable=False),
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('last_triggered', sa.DateTime(timezone=True), nullable=True),
sa.Column('trigger_count', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_alert_rules_alert_type'), 'alert_rules', ['alert_type'], unique=False)
op.create_index(op.f('ix_alert_rules_metric_type'), 'alert_rules', ['metric_type'], unique=False)
op.create_index('ix_alert_rules_priority', 'alert_rules', ['priority'], unique=False)
op.create_index('ix_alert_rules_tenant_active', 'alert_rules', ['tenant_id', 'is_active'], unique=False)
op.create_index(op.f('ix_alert_rules_tenant_id'), 'alert_rules', ['tenant_id'], unique=False)
op.create_index('ix_alert_rules_type_severity', 'alert_rules', ['alert_type', 'severity'], unique=False)
op.create_table('supplier_benchmarks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('benchmark_name', sa.String(length=255), nullable=False),
sa.Column('benchmark_type', sa.String(length=50), nullable=False),
sa.Column('supplier_category', sa.String(length=100), nullable=True),
sa.Column('metric_type', sa.Enum('DELIVERY_PERFORMANCE', 'QUALITY_SCORE', 'PRICE_COMPETITIVENESS', 'COMMUNICATION_RATING', 'ORDER_ACCURACY', 'RESPONSE_TIME', 'COMPLIANCE_SCORE', 'FINANCIAL_STABILITY', name='performancemetrictype'), nullable=False),
sa.Column('excellent_threshold', sa.Float(), nullable=False),
sa.Column('good_threshold', sa.Float(), nullable=False),
sa.Column('acceptable_threshold', sa.Float(), nullable=False),
sa.Column('poor_threshold', sa.Float(), nullable=False),
sa.Column('data_source', sa.String(length=255), nullable=True),
sa.Column('sample_size', sa.Integer(), nullable=True),
sa.Column('confidence_level', sa.Float(), nullable=True),
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('methodology', sa.Text(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_benchmarks_active', 'supplier_benchmarks', ['is_active'], unique=False)
op.create_index('ix_benchmarks_category', 'supplier_benchmarks', ['supplier_category'], unique=False)
op.create_index('ix_benchmarks_metric_type', 'supplier_benchmarks', ['metric_type'], unique=False)
op.create_index('ix_benchmarks_tenant_type', 'supplier_benchmarks', ['tenant_id', 'benchmark_type'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_benchmark_type'), 'supplier_benchmarks', ['benchmark_type'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_metric_type'), 'supplier_benchmarks', ['metric_type'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_supplier_category'), 'supplier_benchmarks', ['supplier_category'], unique=False)
op.create_index(op.f('ix_supplier_benchmarks_tenant_id'), 'supplier_benchmarks', ['tenant_id'], unique=False)
op.create_table('suppliers',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('supplier_code', sa.String(length=50), nullable=True),
sa.Column('tax_id', sa.String(length=50), nullable=True),
sa.Column('registration_number', sa.String(length=100), nullable=True),
sa.Column('supplier_type', sa.Enum('ingredients', 'packaging', 'equipment', 'services', 'utilities', 'multi', name='suppliertype'), nullable=False),
sa.Column('status', sa.Enum('active', 'inactive', 'pending_approval', 'suspended', 'blacklisted', name='supplierstatus'), nullable=False),
sa.Column('contact_person', sa.String(length=200), nullable=True),
sa.Column('email', sa.String(length=254), nullable=True),
sa.Column('phone', sa.String(length=30), nullable=True),
sa.Column('mobile', sa.String(length=30), nullable=True),
sa.Column('website', sa.String(length=255), nullable=True),
sa.Column('address_line1', sa.String(length=255), nullable=True),
sa.Column('address_line2', sa.String(length=255), nullable=True),
sa.Column('city', sa.String(length=100), nullable=True),
sa.Column('state_province', sa.String(length=100), nullable=True),
sa.Column('postal_code', sa.String(length=20), nullable=True),
sa.Column('country', sa.String(length=100), nullable=True),
sa.Column('payment_terms', sa.Enum('cod', 'net_15', 'net_30', 'net_45', 'net_60', 'prepaid', 'credit_terms', name='paymentterms'), nullable=False),
sa.Column('credit_limit', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('currency', sa.String(length=3), nullable=False),
sa.Column('standard_lead_time', sa.Integer(), nullable=False),
sa.Column('minimum_order_amount', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('delivery_area', sa.String(length=255), nullable=True),
sa.Column('quality_rating', sa.Float(), nullable=True),
sa.Column('delivery_rating', sa.Float(), nullable=True),
sa.Column('total_orders', sa.Integer(), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejection_reason', sa.Text(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('certifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('business_hours', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('specializations', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('updated_by', sa.UUID(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_suppliers_name'), 'suppliers', ['name'], unique=False)
op.create_index('ix_suppliers_quality_rating', 'suppliers', ['quality_rating'], unique=False)
op.create_index(op.f('ix_suppliers_status'), 'suppliers', ['status'], unique=False)
op.create_index(op.f('ix_suppliers_supplier_code'), 'suppliers', ['supplier_code'], unique=False)
op.create_index(op.f('ix_suppliers_supplier_type'), 'suppliers', ['supplier_type'], unique=False)
op.create_index(op.f('ix_suppliers_tenant_id'), 'suppliers', ['tenant_id'], unique=False)
op.create_index('ix_suppliers_tenant_name', 'suppliers', ['tenant_id', 'name'], unique=False)
op.create_index('ix_suppliers_tenant_status', 'suppliers', ['tenant_id', 'status'], unique=False)
op.create_index('ix_suppliers_tenant_type', 'suppliers', ['tenant_id', 'supplier_type'], unique=False)
op.create_table('purchase_orders',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('po_number', sa.String(length=50), nullable=False),
sa.Column('reference_number', sa.String(length=100), nullable=True),
sa.Column('status', sa.Enum('draft', 'pending_approval', 'approved', 'sent_to_supplier', 'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed', name='purchaseorderstatus'), nullable=False),
sa.Column('priority', sa.String(length=20), nullable=False),
sa.Column('order_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('required_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('estimated_delivery_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('subtotal', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('shipping_cost', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('currency', sa.String(length=3), nullable=False),
sa.Column('delivery_address', sa.Text(), nullable=True),
sa.Column('delivery_instructions', sa.Text(), nullable=True),
sa.Column('delivery_contact', sa.String(length=200), nullable=True),
sa.Column('delivery_phone', sa.String(length=30), nullable=True),
sa.Column('requires_approval', sa.Boolean(), nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejection_reason', sa.Text(), nullable=True),
sa.Column('sent_to_supplier_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('supplier_confirmation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('supplier_reference', sa.String(length=100), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('internal_notes', sa.Text(), nullable=True),
sa.Column('terms_and_conditions', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('updated_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_purchase_orders_delivery_date', 'purchase_orders', ['required_delivery_date'], unique=False)
op.create_index('ix_purchase_orders_order_date', 'purchase_orders', ['order_date'], unique=False)
op.create_index('ix_purchase_orders_po_number', 'purchase_orders', ['po_number'], unique=False)
op.create_index(op.f('ix_purchase_orders_status'), 'purchase_orders', ['status'], unique=False)
op.create_index(op.f('ix_purchase_orders_supplier_id'), 'purchase_orders', ['supplier_id'], unique=False)
op.create_index(op.f('ix_purchase_orders_tenant_id'), 'purchase_orders', ['tenant_id'], unique=False)
op.create_index('ix_purchase_orders_tenant_status', 'purchase_orders', ['tenant_id', 'status'], unique=False)
op.create_index('ix_purchase_orders_tenant_supplier', 'purchase_orders', ['tenant_id', 'supplier_id'], unique=False)
op.create_table('supplier_performance_metrics',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('metric_type', sa.Enum('DELIVERY_PERFORMANCE', 'QUALITY_SCORE', 'PRICE_COMPETITIVENESS', 'COMMUNICATION_RATING', 'ORDER_ACCURACY', 'RESPONSE_TIME', 'COMPLIANCE_SCORE', 'FINANCIAL_STABILITY', name='performancemetrictype'), nullable=False),
sa.Column('period', sa.Enum('DAILY', 'WEEKLY', 'MONTHLY', 'QUARTERLY', 'YEARLY', name='performanceperiod'), nullable=False),
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('metric_value', sa.Float(), nullable=False),
sa.Column('target_value', sa.Float(), nullable=True),
sa.Column('previous_value', sa.Float(), nullable=True),
sa.Column('total_orders', sa.Integer(), nullable=False),
sa.Column('total_deliveries', sa.Integer(), nullable=False),
sa.Column('on_time_deliveries', sa.Integer(), nullable=False),
sa.Column('late_deliveries', sa.Integer(), nullable=False),
sa.Column('quality_issues', sa.Integer(), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('metrics_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('trend_direction', sa.String(length=20), nullable=True),
sa.Column('trend_percentage', sa.Float(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('external_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('calculated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('calculated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_performance_metrics_period_dates', 'supplier_performance_metrics', ['period_start', 'period_end'], unique=False)
op.create_index('ix_performance_metrics_tenant_supplier', 'supplier_performance_metrics', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_performance_metrics_type_period', 'supplier_performance_metrics', ['metric_type', 'period'], unique=False)
op.create_index('ix_performance_metrics_value', 'supplier_performance_metrics', ['metric_value'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_metric_type'), 'supplier_performance_metrics', ['metric_type'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_period'), 'supplier_performance_metrics', ['period'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_period_end'), 'supplier_performance_metrics', ['period_end'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_period_start'), 'supplier_performance_metrics', ['period_start'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_supplier_id'), 'supplier_performance_metrics', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_performance_metrics_tenant_id'), 'supplier_performance_metrics', ['tenant_id'], unique=False)
op.create_table('supplier_price_lists',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('product_code', sa.String(length=100), nullable=True),
sa.Column('unit_price', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('unit_of_measure', sa.String(length=20), nullable=False),
sa.Column('minimum_order_quantity', sa.Integer(), nullable=True),
sa.Column('price_per_unit', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('tier_pricing', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('brand', sa.String(length=100), nullable=True),
sa.Column('packaging_size', sa.String(length=50), nullable=True),
sa.Column('origin_country', sa.String(length=100), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('storage_requirements', sa.Text(), nullable=True),
sa.Column('quality_specs', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('allergens', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('updated_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_price_lists_active', 'supplier_price_lists', ['is_active'], unique=False)
op.create_index('ix_price_lists_effective_date', 'supplier_price_lists', ['effective_date'], unique=False)
op.create_index('ix_price_lists_inventory_product', 'supplier_price_lists', ['inventory_product_id'], unique=False)
op.create_index('ix_price_lists_tenant_supplier', 'supplier_price_lists', ['tenant_id', 'supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_price_lists_inventory_product_id'), 'supplier_price_lists', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_supplier_price_lists_supplier_id'), 'supplier_price_lists', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_price_lists_tenant_id'), 'supplier_price_lists', ['tenant_id'], unique=False)
op.create_table('supplier_scorecards',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('scorecard_name', sa.String(length=255), nullable=False),
sa.Column('period', sa.Enum('DAILY', 'WEEKLY', 'MONTHLY', 'QUARTERLY', 'YEARLY', name='performanceperiod'), nullable=False),
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('overall_score', sa.Float(), nullable=False),
sa.Column('quality_score', sa.Float(), nullable=False),
sa.Column('delivery_score', sa.Float(), nullable=False),
sa.Column('cost_score', sa.Float(), nullable=False),
sa.Column('service_score', sa.Float(), nullable=False),
sa.Column('overall_rank', sa.Integer(), nullable=True),
sa.Column('category_rank', sa.Integer(), nullable=True),
sa.Column('total_suppliers_evaluated', sa.Integer(), nullable=True),
sa.Column('on_time_delivery_rate', sa.Float(), nullable=False),
sa.Column('quality_rejection_rate', sa.Float(), nullable=False),
sa.Column('order_accuracy_rate', sa.Float(), nullable=False),
sa.Column('response_time_hours', sa.Float(), nullable=False),
sa.Column('cost_variance_percentage', sa.Float(), nullable=False),
sa.Column('total_orders_processed', sa.Integer(), nullable=False),
sa.Column('total_amount_processed', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('average_order_value', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('cost_savings_achieved', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('score_trend', sa.String(length=20), nullable=True),
sa.Column('score_change_percentage', sa.Float(), nullable=True),
sa.Column('strengths', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('improvement_areas', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('is_final', sa.Boolean(), nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('attachments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('generated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('generated_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_scorecards_final', 'supplier_scorecards', ['is_final'], unique=False)
op.create_index('ix_scorecards_overall_score', 'supplier_scorecards', ['overall_score'], unique=False)
op.create_index('ix_scorecards_period', 'supplier_scorecards', ['period'], unique=False)
op.create_index('ix_scorecards_period_dates', 'supplier_scorecards', ['period_start', 'period_end'], unique=False)
op.create_index('ix_scorecards_tenant_supplier', 'supplier_scorecards', ['tenant_id', 'supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_period'), 'supplier_scorecards', ['period'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_period_end'), 'supplier_scorecards', ['period_end'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_period_start'), 'supplier_scorecards', ['period_start'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_supplier_id'), 'supplier_scorecards', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_scorecards_tenant_id'), 'supplier_scorecards', ['tenant_id'], unique=False)
op.create_table('deliveries',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('delivery_number', sa.String(length=50), nullable=False),
sa.Column('supplier_delivery_note', sa.String(length=100), nullable=True),
sa.Column('status', sa.Enum('scheduled', 'in_transit', 'out_for_delivery', 'delivered', 'partially_delivered', 'failed_delivery', 'returned', name='deliverystatus'), nullable=False),
sa.Column('scheduled_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('estimated_arrival', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_arrival', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivery_address', sa.Text(), nullable=True),
sa.Column('delivery_contact', sa.String(length=200), nullable=True),
sa.Column('delivery_phone', sa.String(length=30), nullable=True),
sa.Column('carrier_name', sa.String(length=200), nullable=True),
sa.Column('tracking_number', sa.String(length=100), nullable=True),
sa.Column('inspection_passed', sa.Boolean(), nullable=True),
sa.Column('inspection_notes', sa.Text(), nullable=True),
sa.Column('quality_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('received_by', sa.UUID(), nullable=True),
sa.Column('received_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('photos', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_deliveries_delivery_number', 'deliveries', ['delivery_number'], unique=False)
op.create_index(op.f('ix_deliveries_purchase_order_id'), 'deliveries', ['purchase_order_id'], unique=False)
op.create_index('ix_deliveries_scheduled_date', 'deliveries', ['scheduled_date'], unique=False)
op.create_index(op.f('ix_deliveries_status'), 'deliveries', ['status'], unique=False)
op.create_index(op.f('ix_deliveries_supplier_id'), 'deliveries', ['supplier_id'], unique=False)
op.create_index(op.f('ix_deliveries_tenant_id'), 'deliveries', ['tenant_id'], unique=False)
op.create_index('ix_deliveries_tenant_status', 'deliveries', ['tenant_id', 'status'], unique=False)
op.create_table('purchase_order_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=False),
sa.Column('price_list_item_id', sa.UUID(), nullable=True),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('product_code', sa.String(length=100), nullable=True),
sa.Column('ordered_quantity', sa.Integer(), nullable=False),
sa.Column('unit_of_measure', sa.String(length=20), nullable=False),
sa.Column('unit_price', sa.Numeric(precision=10, scale=4), nullable=False),
sa.Column('line_total', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('received_quantity', sa.Integer(), nullable=False),
sa.Column('remaining_quantity', sa.Integer(), nullable=False),
sa.Column('quality_requirements', sa.Text(), nullable=True),
sa.Column('item_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['price_list_item_id'], ['supplier_price_lists.id'], ),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_po_items_inventory_product', 'purchase_order_items', ['inventory_product_id'], unique=False)
op.create_index('ix_po_items_tenant_po', 'purchase_order_items', ['tenant_id', 'purchase_order_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_inventory_product_id'), 'purchase_order_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_price_list_item_id'), 'purchase_order_items', ['price_list_item_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_purchase_order_id'), 'purchase_order_items', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_tenant_id'), 'purchase_order_items', ['tenant_id'], unique=False)
op.create_table('supplier_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('alert_type', sa.Enum('POOR_QUALITY', 'LATE_DELIVERY', 'PRICE_INCREASE', 'LOW_PERFORMANCE', 'CONTRACT_EXPIRY', 'COMPLIANCE_ISSUE', 'FINANCIAL_RISK', 'COMMUNICATION_ISSUE', 'CAPACITY_CONSTRAINT', 'CERTIFICATION_EXPIRY', name='alerttype'), nullable=False),
sa.Column('severity', sa.Enum('CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO', name='alertseverity'), nullable=False),
sa.Column('status', sa.Enum('ACTIVE', 'ACKNOWLEDGED', 'IN_PROGRESS', 'RESOLVED', 'DISMISSED', name='alertstatus'), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('trigger_value', sa.Float(), nullable=True),
sa.Column('threshold_value', sa.Float(), nullable=True),
sa.Column('metric_type', sa.Enum('DELIVERY_PERFORMANCE', 'QUALITY_SCORE', 'PRICE_COMPETITIVENESS', 'COMMUNICATION_RATING', 'ORDER_ACCURACY', 'RESPONSE_TIME', 'COMPLIANCE_SCORE', 'FINANCIAL_STABILITY', name='performancemetrictype'), nullable=True),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('delivery_id', sa.UUID(), nullable=True),
sa.Column('performance_metric_id', sa.UUID(), nullable=True),
sa.Column('triggered_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('acknowledged_by', sa.UUID(), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolved_by', sa.UUID(), nullable=True),
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('auto_resolve', sa.Boolean(), nullable=False),
sa.Column('auto_resolve_condition', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('escalated', sa.Boolean(), nullable=False),
sa.Column('escalated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('escalated_to', sa.UUID(), nullable=True),
sa.Column('notification_sent', sa.Boolean(), nullable=False),
sa.Column('notification_sent_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('priority_score', sa.Integer(), nullable=False),
sa.Column('business_impact', sa.String(length=50), nullable=True),
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['performance_metric_id'], ['supplier_performance_metrics.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_alerts_alert_type'), 'supplier_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_supplier_alerts_delivery_id'), 'supplier_alerts', ['delivery_id'], unique=False)
op.create_index('ix_supplier_alerts_metric_type', 'supplier_alerts', ['metric_type'], unique=False)
op.create_index('ix_supplier_alerts_priority', 'supplier_alerts', ['priority_score'], unique=False)
op.create_index(op.f('ix_supplier_alerts_purchase_order_id'), 'supplier_alerts', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_supplier_alerts_severity'), 'supplier_alerts', ['severity'], unique=False)
op.create_index(op.f('ix_supplier_alerts_status'), 'supplier_alerts', ['status'], unique=False)
op.create_index('ix_supplier_alerts_status_triggered', 'supplier_alerts', ['status', 'triggered_at'], unique=False)
op.create_index(op.f('ix_supplier_alerts_supplier_id'), 'supplier_alerts', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_alerts_tenant_id'), 'supplier_alerts', ['tenant_id'], unique=False)
op.create_index('ix_supplier_alerts_tenant_supplier', 'supplier_alerts', ['tenant_id', 'supplier_id'], unique=False)
op.create_index('ix_supplier_alerts_type_severity', 'supplier_alerts', ['alert_type', 'severity'], unique=False)
op.create_table('supplier_invoices',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('invoice_number', sa.String(length=50), nullable=False),
sa.Column('supplier_invoice_number', sa.String(length=100), nullable=False),
sa.Column('status', sa.Enum('pending', 'approved', 'paid', 'overdue', 'disputed', 'cancelled', name='invoicestatus'), nullable=False),
sa.Column('invoice_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('due_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('received_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('subtotal', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('tax_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('shipping_cost', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('currency', sa.String(length=3), nullable=False),
sa.Column('paid_amount', sa.Numeric(precision=12, scale=2), nullable=False),
sa.Column('payment_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('payment_reference', sa.String(length=100), nullable=True),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejection_reason', sa.Text(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('invoice_document_url', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_invoices_due_date', 'supplier_invoices', ['due_date'], unique=False)
op.create_index('ix_invoices_invoice_number', 'supplier_invoices', ['invoice_number'], unique=False)
op.create_index('ix_invoices_tenant_status', 'supplier_invoices', ['tenant_id', 'status'], unique=False)
op.create_index('ix_invoices_tenant_supplier', 'supplier_invoices', ['tenant_id', 'supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_invoices_invoice_number'), 'supplier_invoices', ['invoice_number'], unique=False)
op.create_index(op.f('ix_supplier_invoices_purchase_order_id'), 'supplier_invoices', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_supplier_invoices_status'), 'supplier_invoices', ['status'], unique=False)
op.create_index(op.f('ix_supplier_invoices_supplier_id'), 'supplier_invoices', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_invoices_tenant_id'), 'supplier_invoices', ['tenant_id'], unique=False)
op.create_table('delivery_items',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('delivery_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_item_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('ordered_quantity', sa.Integer(), nullable=False),
sa.Column('delivered_quantity', sa.Integer(), nullable=False),
sa.Column('accepted_quantity', sa.Integer(), nullable=False),
sa.Column('rejected_quantity', sa.Integer(), nullable=False),
sa.Column('batch_lot_number', sa.String(length=100), nullable=True),
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('quality_grade', sa.String(length=20), nullable=True),
sa.Column('quality_issues', sa.Text(), nullable=True),
sa.Column('rejection_reason', sa.Text(), nullable=True),
sa.Column('item_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id'], ),
sa.ForeignKeyConstraint(['purchase_order_item_id'], ['purchase_order_items.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_delivery_items_delivery_id'), 'delivery_items', ['delivery_id'], unique=False)
op.create_index('ix_delivery_items_inventory_product', 'delivery_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_delivery_items_inventory_product_id'), 'delivery_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_delivery_items_purchase_order_item_id'), 'delivery_items', ['purchase_order_item_id'], unique=False)
op.create_index('ix_delivery_items_tenant_delivery', 'delivery_items', ['tenant_id', 'delivery_id'], unique=False)
op.create_index(op.f('ix_delivery_items_tenant_id'), 'delivery_items', ['tenant_id'], unique=False)
op.create_table('supplier_quality_reviews',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=False),
sa.Column('purchase_order_id', sa.UUID(), nullable=True),
sa.Column('delivery_id', sa.UUID(), nullable=True),
sa.Column('review_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('review_type', sa.String(length=50), nullable=False),
sa.Column('quality_rating', sa.Enum('excellent', 'good', 'average', 'poor', 'very_poor', name='qualityrating'), nullable=False),
sa.Column('delivery_rating', sa.Enum('excellent', 'good', 'average', 'poor', 'very_poor', name='deliveryrating'), nullable=False),
sa.Column('communication_rating', sa.Integer(), nullable=False),
sa.Column('overall_rating', sa.Float(), nullable=False),
sa.Column('quality_comments', sa.Text(), nullable=True),
sa.Column('delivery_comments', sa.Text(), nullable=True),
sa.Column('communication_comments', sa.Text(), nullable=True),
sa.Column('improvement_suggestions', sa.Text(), nullable=True),
sa.Column('quality_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('corrective_actions', sa.Text(), nullable=True),
sa.Column('follow_up_required', sa.Boolean(), nullable=False),
sa.Column('follow_up_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_final', sa.Boolean(), nullable=False),
sa.Column('approved_by', sa.UUID(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('reviewed_by', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id'], ),
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_quality_reviews_date', 'supplier_quality_reviews', ['review_date'], unique=False)
op.create_index('ix_quality_reviews_overall_rating', 'supplier_quality_reviews', ['overall_rating'], unique=False)
op.create_index('ix_quality_reviews_tenant_supplier', 'supplier_quality_reviews', ['tenant_id', 'supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_delivery_id'), 'supplier_quality_reviews', ['delivery_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_purchase_order_id'), 'supplier_quality_reviews', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_supplier_id'), 'supplier_quality_reviews', ['supplier_id'], unique=False)
op.create_index(op.f('ix_supplier_quality_reviews_tenant_id'), 'supplier_quality_reviews', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_supplier_quality_reviews_tenant_id'), table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_supplier_id'), table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_purchase_order_id'), table_name='supplier_quality_reviews')
op.drop_index(op.f('ix_supplier_quality_reviews_delivery_id'), table_name='supplier_quality_reviews')
op.drop_index('ix_quality_reviews_tenant_supplier', table_name='supplier_quality_reviews')
op.drop_index('ix_quality_reviews_overall_rating', table_name='supplier_quality_reviews')
op.drop_index('ix_quality_reviews_date', table_name='supplier_quality_reviews')
op.drop_table('supplier_quality_reviews')
op.drop_index(op.f('ix_delivery_items_tenant_id'), table_name='delivery_items')
op.drop_index('ix_delivery_items_tenant_delivery', table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_purchase_order_item_id'), table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_inventory_product_id'), table_name='delivery_items')
op.drop_index('ix_delivery_items_inventory_product', table_name='delivery_items')
op.drop_index(op.f('ix_delivery_items_delivery_id'), table_name='delivery_items')
op.drop_table('delivery_items')
op.drop_index(op.f('ix_supplier_invoices_tenant_id'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_supplier_id'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_status'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_purchase_order_id'), table_name='supplier_invoices')
op.drop_index(op.f('ix_supplier_invoices_invoice_number'), table_name='supplier_invoices')
op.drop_index('ix_invoices_tenant_supplier', table_name='supplier_invoices')
op.drop_index('ix_invoices_tenant_status', table_name='supplier_invoices')
op.drop_index('ix_invoices_invoice_number', table_name='supplier_invoices')
op.drop_index('ix_invoices_due_date', table_name='supplier_invoices')
op.drop_table('supplier_invoices')
op.drop_index('ix_supplier_alerts_type_severity', table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_tenant_supplier', table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_tenant_id'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_supplier_id'), table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_status_triggered', table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_status'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_severity'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_purchase_order_id'), table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_priority', table_name='supplier_alerts')
op.drop_index('ix_supplier_alerts_metric_type', table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_delivery_id'), table_name='supplier_alerts')
op.drop_index(op.f('ix_supplier_alerts_alert_type'), table_name='supplier_alerts')
op.drop_table('supplier_alerts')
op.drop_index(op.f('ix_purchase_order_items_tenant_id'), table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_purchase_order_id'), table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_price_list_item_id'), table_name='purchase_order_items')
op.drop_index(op.f('ix_purchase_order_items_inventory_product_id'), table_name='purchase_order_items')
op.drop_index('ix_po_items_tenant_po', table_name='purchase_order_items')
op.drop_index('ix_po_items_inventory_product', table_name='purchase_order_items')
op.drop_table('purchase_order_items')
op.drop_index('ix_deliveries_tenant_status', table_name='deliveries')
op.drop_index(op.f('ix_deliveries_tenant_id'), table_name='deliveries')
op.drop_index(op.f('ix_deliveries_supplier_id'), table_name='deliveries')
op.drop_index(op.f('ix_deliveries_status'), table_name='deliveries')
op.drop_index('ix_deliveries_scheduled_date', table_name='deliveries')
op.drop_index(op.f('ix_deliveries_purchase_order_id'), table_name='deliveries')
op.drop_index('ix_deliveries_delivery_number', table_name='deliveries')
op.drop_table('deliveries')
op.drop_index(op.f('ix_supplier_scorecards_tenant_id'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_supplier_id'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_period_start'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_period_end'), table_name='supplier_scorecards')
op.drop_index(op.f('ix_supplier_scorecards_period'), table_name='supplier_scorecards')
op.drop_index('ix_scorecards_tenant_supplier', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_period_dates', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_period', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_overall_score', table_name='supplier_scorecards')
op.drop_index('ix_scorecards_final', table_name='supplier_scorecards')
op.drop_table('supplier_scorecards')
op.drop_index(op.f('ix_supplier_price_lists_tenant_id'), table_name='supplier_price_lists')
op.drop_index(op.f('ix_supplier_price_lists_supplier_id'), table_name='supplier_price_lists')
op.drop_index(op.f('ix_supplier_price_lists_inventory_product_id'), table_name='supplier_price_lists')
op.drop_index('ix_price_lists_tenant_supplier', table_name='supplier_price_lists')
op.drop_index('ix_price_lists_inventory_product', table_name='supplier_price_lists')
op.drop_index('ix_price_lists_effective_date', table_name='supplier_price_lists')
op.drop_index('ix_price_lists_active', table_name='supplier_price_lists')
op.drop_table('supplier_price_lists')
op.drop_index(op.f('ix_supplier_performance_metrics_tenant_id'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_supplier_id'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_period_start'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_period_end'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_period'), table_name='supplier_performance_metrics')
op.drop_index(op.f('ix_supplier_performance_metrics_metric_type'), table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_value', table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_type_period', table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_tenant_supplier', table_name='supplier_performance_metrics')
op.drop_index('ix_performance_metrics_period_dates', table_name='supplier_performance_metrics')
op.drop_table('supplier_performance_metrics')
op.drop_index('ix_purchase_orders_tenant_supplier', table_name='purchase_orders')
op.drop_index('ix_purchase_orders_tenant_status', table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_tenant_id'), table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_supplier_id'), table_name='purchase_orders')
op.drop_index(op.f('ix_purchase_orders_status'), table_name='purchase_orders')
op.drop_index('ix_purchase_orders_po_number', table_name='purchase_orders')
op.drop_index('ix_purchase_orders_order_date', table_name='purchase_orders')
op.drop_index('ix_purchase_orders_delivery_date', table_name='purchase_orders')
op.drop_table('purchase_orders')
op.drop_index('ix_suppliers_tenant_type', table_name='suppliers')
op.drop_index('ix_suppliers_tenant_status', table_name='suppliers')
op.drop_index('ix_suppliers_tenant_name', table_name='suppliers')
op.drop_index(op.f('ix_suppliers_tenant_id'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_supplier_type'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_supplier_code'), table_name='suppliers')
op.drop_index(op.f('ix_suppliers_status'), table_name='suppliers')
op.drop_index('ix_suppliers_quality_rating', table_name='suppliers')
op.drop_index(op.f('ix_suppliers_name'), table_name='suppliers')
op.drop_table('suppliers')
op.drop_index(op.f('ix_supplier_benchmarks_tenant_id'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_supplier_category'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_metric_type'), table_name='supplier_benchmarks')
op.drop_index(op.f('ix_supplier_benchmarks_benchmark_type'), table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_tenant_type', table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_metric_type', table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_category', table_name='supplier_benchmarks')
op.drop_index('ix_benchmarks_active', table_name='supplier_benchmarks')
op.drop_table('supplier_benchmarks')
op.drop_index('ix_alert_rules_type_severity', table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_tenant_id'), table_name='alert_rules')
op.drop_index('ix_alert_rules_tenant_active', table_name='alert_rules')
op.drop_index('ix_alert_rules_priority', table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_metric_type'), table_name='alert_rules')
op.drop_index(op.f('ix_alert_rules_alert_type'), table_name='alert_rules')
op.drop_table('alert_rules')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for tenant service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('TENANT_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,95 +0,0 @@
"""Initial schema for tenant service
Revision ID: 00001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '00001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create tenants table
op.create_table('tenants',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('subdomain', sa.String(100), nullable=True),
sa.Column('business_type', sa.String(100), nullable=True, default="bakery"),
sa.Column('business_model', sa.String(100), nullable=True, default="individual_bakery"),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('city', sa.String(100), nullable=True, default="Madrid"),
sa.Column('postal_code', sa.String(10), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('phone', sa.String(20), nullable=True),
sa.Column('email', sa.String(255), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
sa.Column('subscription_tier', sa.String(50), nullable=True, default="starter"),
sa.Column('ml_model_trained', sa.Boolean(), nullable=True, default=False),
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('owner_id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('subdomain')
)
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
# Create tenant_members table
op.create_table('tenant_members',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('role', sa.String(50), nullable=True, default="member"),
sa.Column('permissions', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
sa.Column('invited_by', sa.UUID(), nullable=True),
sa.Column('invited_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tenant_members_tenant_id'), 'tenant_members', ['tenant_id'], unique=False)
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
# Create subscriptions table
op.create_table('subscriptions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('plan', sa.String(50), nullable=True, default="starter"),
sa.Column('status', sa.String(50), nullable=True, default="active"),
sa.Column('monthly_price', sa.Float(), nullable=True, default=0.0),
sa.Column('billing_cycle', sa.String(20), nullable=True, default="monthly"),
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('max_users', sa.Integer(), nullable=True, default=5),
sa.Column('max_locations', sa.Integer(), nullable=True, default=1),
sa.Column('max_products', sa.Integer(), nullable=True, default=50),
sa.Column('features', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_subscriptions_tenant_id'), 'subscriptions', ['tenant_id'], unique=False)
op.create_index(op.f('ix_subscriptions_status'), 'subscriptions', ['status'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_subscriptions_status'), table_name='subscriptions')
op.drop_index(op.f('ix_subscriptions_tenant_id'), table_name='subscriptions')
op.drop_table('subscriptions')
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
op.drop_index(op.f('ix_tenant_members_tenant_id'), table_name='tenant_members')
op.drop_table('tenant_members')
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
op.drop_table('tenants')

View File

@@ -0,0 +1,90 @@
"""initial_schema_20251001_1119
Revision ID: 1e8aebb4d9ce
Revises:
Create Date: 2025-10-01 11:19:18.038250+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '1e8aebb4d9ce'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tenants',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=200), nullable=False),
sa.Column('subdomain', sa.String(length=100), nullable=True),
sa.Column('business_type', sa.String(length=100), nullable=True),
sa.Column('business_model', sa.String(length=100), nullable=True),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('city', sa.String(length=100), nullable=True),
sa.Column('postal_code', sa.String(length=10), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('phone', sa.String(length=20), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('subscription_tier', sa.String(length=50), nullable=True),
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('owner_id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('subdomain')
)
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
op.create_table('subscriptions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('plan', sa.String(length=50), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('monthly_price', sa.Float(), nullable=True),
sa.Column('billing_cycle', sa.String(length=20), nullable=True),
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('max_users', sa.Integer(), nullable=True),
sa.Column('max_locations', sa.Integer(), nullable=True),
sa.Column('max_products', sa.Integer(), nullable=True),
sa.Column('features', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tenant_members',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('role', sa.String(length=50), nullable=True),
sa.Column('permissions', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('invited_by', sa.UUID(), nullable=True),
sa.Column('invited_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
op.drop_table('tenant_members')
op.drop_table('subscriptions')
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
op.drop_table('tenants')
# ### end Alembic commands ###

View File

@@ -1,7 +1,6 @@
"""Alembic environment configuration for training service"""
import asyncio
import logging
import os
import sys
from logging.config import fileConfig
@@ -25,7 +24,7 @@ try:
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
@@ -35,12 +34,19 @@ except ImportError as e:
# this is the Alembic Config object
config = context.config
# Set database URL from environment variables or settings
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('TRAINING_DATABASE_URL') or os.getenv('DATABASE_URL')
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
@@ -50,11 +56,28 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:

View File

@@ -1,78 +0,0 @@
"""Initial schema for training service
Revision ID: 0001
Revises:
Create Date: 2025-09-30 18:00:00.0000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '00001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table('training_jobs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('model_id', sa.UUID(), nullable=False),
sa.Column('job_name', sa.String(255), nullable=False),
sa.Column('job_type', sa.String(100), nullable=False),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('progress', sa.Float(), nullable=True),
sa.Column('parameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('metrics', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('training_data_path', sa.String(500), nullable=True),
sa.Column('model_path', sa.String(500), nullable=True),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_training_jobs_tenant_id'), 'training_jobs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_training_jobs_model_id'), 'training_jobs', ['model_id'], unique=False)
op.create_index(op.f('ix_training_jobs_status'), 'training_jobs', ['status'], unique=False)
op.create_index(op.f('ix_training_jobs_job_type'), 'training_jobs', ['job_type'], unique=False)
op.create_table('ml_models',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('version', sa.String(50), nullable=False),
sa.Column('model_type', sa.String(100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('status', sa.String(50), nullable=True),
sa.Column('accuracy', sa.Float(), nullable=True),
sa.Column('f1_score', sa.Float(), nullable=True),
sa.Column('precision', sa.Float(), nullable=True),
sa.Column('recall', sa.Float(), nullable=True),
sa.Column('model_path', sa.String(500), nullable=True),
sa.Column('hyperparameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('training_data_info', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_ml_models_tenant_id'), 'ml_models', ['tenant_id'], unique=False)
op.create_index(op.f('ix_ml_models_name'), 'ml_models', ['name'], unique=False)
op.create_index(op.f('ix_ml_models_version'), 'ml_models', ['version'], unique=False)
op.create_index(op.f('ix_ml_models_status'), 'ml_models', ['status'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_ml_models_status'), table_name='ml_models')
op.drop_index(op.f('ix_ml_models_version'), table_name='ml_models')
op.drop_index(op.f('ix_ml_models_name'), table_name='ml_models')
op.drop_index(op.f('ix_ml_models_tenant_id'), table_name='ml_models')
op.drop_table('ml_models')
op.drop_index(op.f('ix_training_jobs_job_type'), table_name='training_jobs')
op.drop_index(op.f('ix_training_jobs_status'), table_name='training_jobs')
op.drop_index(op.f('ix_training_jobs_model_id'), table_name='training_jobs')
op.drop_index(op.f('ix_training_jobs_tenant_id'), table_name='training_jobs')
op.drop_table('training_jobs')

View File

@@ -0,0 +1,159 @@
"""initial_schema_20251001_1118
Revision ID: 121e47ff97c4
Revises:
Create Date: 2025-10-01 11:18:37.223786+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '121e47ff97c4'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('model_artifacts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('model_id', sa.String(length=255), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('artifact_type', sa.String(length=50), nullable=False),
sa.Column('file_path', sa.String(length=1000), nullable=False),
sa.Column('file_size_bytes', sa.Integer(), nullable=True),
sa.Column('checksum', sa.String(length=255), nullable=True),
sa.Column('storage_location', sa.String(length=100), nullable=False),
sa.Column('compression', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_model_artifacts_id'), 'model_artifacts', ['id'], unique=False)
op.create_index(op.f('ix_model_artifacts_model_id'), 'model_artifacts', ['model_id'], unique=False)
op.create_index(op.f('ix_model_artifacts_tenant_id'), 'model_artifacts', ['tenant_id'], unique=False)
op.create_table('model_performance_metrics',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('model_id', sa.String(length=255), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('mae', sa.Float(), nullable=True),
sa.Column('mse', sa.Float(), nullable=True),
sa.Column('rmse', sa.Float(), nullable=True),
sa.Column('mape', sa.Float(), nullable=True),
sa.Column('r2_score', sa.Float(), nullable=True),
sa.Column('accuracy_percentage', sa.Float(), nullable=True),
sa.Column('prediction_confidence', sa.Float(), nullable=True),
sa.Column('evaluation_period_start', sa.DateTime(), nullable=True),
sa.Column('evaluation_period_end', sa.DateTime(), nullable=True),
sa.Column('evaluation_samples', sa.Integer(), nullable=True),
sa.Column('measured_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_model_performance_metrics_id'), 'model_performance_metrics', ['id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_inventory_product_id'), 'model_performance_metrics', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_model_id'), 'model_performance_metrics', ['model_id'], unique=False)
op.create_index(op.f('ix_model_performance_metrics_tenant_id'), 'model_performance_metrics', ['tenant_id'], unique=False)
op.create_table('model_training_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('job_id', sa.String(length=255), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('progress', sa.Integer(), nullable=True),
sa.Column('current_step', sa.String(length=500), nullable=True),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('config', sa.JSON(), nullable=True),
sa.Column('results', sa.JSON(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_model_training_logs_id'), 'model_training_logs', ['id'], unique=False)
op.create_index(op.f('ix_model_training_logs_job_id'), 'model_training_logs', ['job_id'], unique=True)
op.create_index(op.f('ix_model_training_logs_tenant_id'), 'model_training_logs', ['tenant_id'], unique=False)
op.create_table('trained_models',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
sa.Column('model_type', sa.String(), nullable=True),
sa.Column('model_version', sa.String(), nullable=True),
sa.Column('job_id', sa.String(), nullable=False),
sa.Column('model_path', sa.String(), nullable=False),
sa.Column('metadata_path', sa.String(), nullable=True),
sa.Column('mape', sa.Float(), nullable=True),
sa.Column('mae', sa.Float(), nullable=True),
sa.Column('rmse', sa.Float(), nullable=True),
sa.Column('r2_score', sa.Float(), nullable=True),
sa.Column('training_samples', sa.Integer(), nullable=True),
sa.Column('hyperparameters', sa.JSON(), nullable=True),
sa.Column('features_used', sa.JSON(), nullable=True),
sa.Column('normalization_params', sa.JSON(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_production', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('training_start_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('training_end_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('data_quality_score', sa.Float(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_by', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_trained_models_inventory_product_id'), 'trained_models', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_trained_models_tenant_id'), 'trained_models', ['tenant_id'], unique=False)
op.create_table('training_job_queue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('job_id', sa.String(length=255), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('job_type', sa.String(length=50), nullable=False),
sa.Column('priority', sa.Integer(), nullable=True),
sa.Column('config', sa.JSON(), nullable=True),
sa.Column('scheduled_at', sa.DateTime(), nullable=True),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('estimated_duration_minutes', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('retry_count', sa.Integer(), nullable=True),
sa.Column('max_retries', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancelled_by', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_training_job_queue_id'), 'training_job_queue', ['id'], unique=False)
op.create_index(op.f('ix_training_job_queue_job_id'), 'training_job_queue', ['job_id'], unique=True)
op.create_index(op.f('ix_training_job_queue_tenant_id'), 'training_job_queue', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_training_job_queue_tenant_id'), table_name='training_job_queue')
op.drop_index(op.f('ix_training_job_queue_job_id'), table_name='training_job_queue')
op.drop_index(op.f('ix_training_job_queue_id'), table_name='training_job_queue')
op.drop_table('training_job_queue')
op.drop_index(op.f('ix_trained_models_tenant_id'), table_name='trained_models')
op.drop_index(op.f('ix_trained_models_inventory_product_id'), table_name='trained_models')
op.drop_table('trained_models')
op.drop_index(op.f('ix_model_training_logs_tenant_id'), table_name='model_training_logs')
op.drop_index(op.f('ix_model_training_logs_job_id'), table_name='model_training_logs')
op.drop_index(op.f('ix_model_training_logs_id'), table_name='model_training_logs')
op.drop_table('model_training_logs')
op.drop_index(op.f('ix_model_performance_metrics_tenant_id'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_model_id'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_inventory_product_id'), table_name='model_performance_metrics')
op.drop_index(op.f('ix_model_performance_metrics_id'), table_name='model_performance_metrics')
op.drop_table('model_performance_metrics')
op.drop_index(op.f('ix_model_artifacts_tenant_id'), table_name='model_artifacts')
op.drop_index(op.f('ix_model_artifacts_model_id'), table_name='model_artifacts')
op.drop_index(op.f('ix_model_artifacts_id'), table_name='model_artifacts')
op.drop_table('model_artifacts')
# ### end Alembic commands ###