Add supplier and imporve inventory frontend

This commit is contained in:
Urtzi Alfaro
2025-09-18 23:32:53 +02:00
parent ae77a0e1c5
commit d61056df33
40 changed files with 2022 additions and 629 deletions

View File

@@ -0,0 +1,93 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = .
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version number format
# Uses Alembic datetime format
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
# version name format
version_path_separator = /
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql+asyncpg://alert_processor_user:alert_processor_pass123@alert-processor-db:5432/alert_processor_db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,109 @@
"""
Alembic environment configuration for Alert Processor Service
"""
import asyncio
from logging.config import fileConfig
import os
import sys
from pathlib import Path
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the app directory to the path
sys.path.insert(0, str(Path(__file__).parent.parent))
# Import models to ensure they're registered
from app.models.alerts import * # noqa
from shared.database.base import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set the SQLAlchemy URL from environment variable if available
database_url = os.getenv('ALERT_PROCESSOR_DATABASE_URL')
if database_url:
config.set_main_option('sqlalchemy.url', database_url)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Run migrations with database connection"""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in async mode"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,64 @@
"""Initial alerts table
Revision ID: 001
Revises:
Create Date: 2025-09-18 23:17:00
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create enum types
alert_status_enum = postgresql.ENUM('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus')
alert_severity_enum = postgresql.ENUM('low', 'medium', 'high', 'urgent', name='alertseverity')
alert_status_enum.create(op.get_bind())
alert_severity_enum.create(op.get_bind())
# Create alerts table
op.create_table('alerts',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('item_type', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.String(length=100), nullable=False),
sa.Column('severity', alert_severity_enum, nullable=False),
sa.Column('status', alert_status_enum, nullable=False),
sa.Column('service', sa.String(length=100), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('actions', sa.JSON(), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('resolved_at', sa.DateTime(), nullable=True),
)
# Create indexes
op.create_index('ix_alerts_tenant_id', 'alerts', ['tenant_id'])
op.create_index('ix_alerts_severity', 'alerts', ['severity'])
op.create_index('ix_alerts_status', 'alerts', ['status'])
op.create_index('ix_alerts_created_at', 'alerts', ['created_at'])
def downgrade() -> None:
# Drop indexes
op.drop_index('ix_alerts_created_at', 'alerts')
op.drop_index('ix_alerts_status', 'alerts')
op.drop_index('ix_alerts_severity', 'alerts')
op.drop_index('ix_alerts_tenant_id', 'alerts')
# Drop table
op.drop_table('alerts')
# Drop enum types
op.execute('DROP TYPE alertseverity')
op.execute('DROP TYPE alertstatus')