Add supplier and imporve inventory frontend
This commit is contained in:
@@ -13,11 +13,10 @@ class AlertProcessorConfig(BaseServiceSettings):
|
||||
APP_NAME: str = "Alert Processor Service"
|
||||
DESCRIPTION: str = "Central alert and recommendation processor"
|
||||
|
||||
# Use the notification database for alert storage
|
||||
# This makes sense since alerts and notifications are closely related
|
||||
# Use dedicated database for alert storage
|
||||
DATABASE_URL: str = os.getenv(
|
||||
"NOTIFICATION_DATABASE_URL",
|
||||
"postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db"
|
||||
"ALERT_PROCESSOR_DATABASE_URL",
|
||||
"postgresql+asyncpg://alert_processor_user:alert_processor_pass123@alert-processor-db:5432/alert_processor_db"
|
||||
)
|
||||
|
||||
# Use dedicated Redis DB for alert processing
|
||||
|
||||
@@ -206,42 +206,47 @@ class AlertProcessorService:
|
||||
|
||||
async def store_item(self, item: dict) -> dict:
|
||||
"""Store alert or recommendation in database"""
|
||||
from sqlalchemy import text
|
||||
|
||||
query = text("""
|
||||
INSERT INTO alerts (
|
||||
id, tenant_id, item_type, alert_type, severity, status,
|
||||
service, title, message, actions, metadata,
|
||||
created_at
|
||||
) VALUES (:id, :tenant_id, :item_type, :alert_type, :severity, :status,
|
||||
:service, :title, :message, :actions, :metadata, :created_at)
|
||||
RETURNING *
|
||||
""")
|
||||
|
||||
from app.models.alerts import Alert, AlertSeverity, AlertStatus
|
||||
from sqlalchemy import select
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(
|
||||
query,
|
||||
{
|
||||
'id': item['id'],
|
||||
'tenant_id': item['tenant_id'],
|
||||
'item_type': item['item_type'], # 'alert' or 'recommendation'
|
||||
'alert_type': item['type'],
|
||||
'severity': item['severity'],
|
||||
'status': 'active',
|
||||
'service': item['service'],
|
||||
'title': item['title'],
|
||||
'message': item['message'],
|
||||
'actions': json.dumps(item.get('actions', [])),
|
||||
'metadata': json.dumps(item.get('metadata', {})),
|
||||
'created_at': item['timestamp']
|
||||
}
|
||||
# Create alert instance
|
||||
alert = Alert(
|
||||
id=item['id'],
|
||||
tenant_id=item['tenant_id'],
|
||||
item_type=item['item_type'], # 'alert' or 'recommendation'
|
||||
alert_type=item['type'],
|
||||
severity=AlertSeverity(item['severity']),
|
||||
status=AlertStatus.ACTIVE,
|
||||
service=item['service'],
|
||||
title=item['title'],
|
||||
message=item['message'],
|
||||
actions=item.get('actions', []),
|
||||
alert_metadata=item.get('metadata', {}),
|
||||
created_at=datetime.fromisoformat(item['timestamp']) if isinstance(item['timestamp'], str) else item['timestamp']
|
||||
)
|
||||
|
||||
row = result.fetchone()
|
||||
|
||||
session.add(alert)
|
||||
await session.commit()
|
||||
|
||||
await session.refresh(alert)
|
||||
|
||||
logger.debug("Item stored in database", item_id=item['id'])
|
||||
return dict(row._mapping)
|
||||
|
||||
# Convert to dict for return
|
||||
return {
|
||||
'id': str(alert.id),
|
||||
'tenant_id': str(alert.tenant_id),
|
||||
'item_type': alert.item_type,
|
||||
'alert_type': alert.alert_type,
|
||||
'severity': alert.severity.value,
|
||||
'status': alert.status.value,
|
||||
'service': alert.service,
|
||||
'title': alert.title,
|
||||
'message': alert.message,
|
||||
'actions': alert.actions,
|
||||
'metadata': alert.alert_metadata,
|
||||
'created_at': alert.created_at
|
||||
}
|
||||
|
||||
async def stream_to_sse(self, tenant_id: str, item: dict):
|
||||
"""Publish item to Redis for SSE streaming"""
|
||||
|
||||
1
services/alert_processor/app/models/__init__.py
Normal file
1
services/alert_processor/app/models/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/alert_processor/app/models/__init__.py
|
||||
56
services/alert_processor/app/models/alerts.py
Normal file
56
services/alert_processor/app/models/alerts.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# services/alert_processor/app/models/alerts.py
|
||||
"""
|
||||
Alert models for the alert processor service
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, Text, DateTime, JSON, Enum
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import enum
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class AlertStatus(enum.Enum):
|
||||
"""Alert status values"""
|
||||
ACTIVE = "active"
|
||||
RESOLVED = "resolved"
|
||||
ACKNOWLEDGED = "acknowledged"
|
||||
IGNORED = "ignored"
|
||||
|
||||
|
||||
class AlertSeverity(enum.Enum):
|
||||
"""Alert severity levels"""
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
URGENT = "urgent"
|
||||
|
||||
|
||||
class Alert(Base):
|
||||
"""Alert records for the alert processor service"""
|
||||
__tablename__ = "alerts"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Alert classification
|
||||
item_type = Column(String(50), nullable=False) # 'alert' or 'recommendation'
|
||||
alert_type = Column(String(100), nullable=False) # e.g., 'overstock_warning'
|
||||
severity = Column(Enum(AlertSeverity), nullable=False, index=True)
|
||||
status = Column(Enum(AlertStatus), default=AlertStatus.ACTIVE, index=True)
|
||||
|
||||
# Source and content
|
||||
service = Column(String(100), nullable=False) # originating service
|
||||
title = Column(String(255), nullable=False)
|
||||
message = Column(Text, nullable=False)
|
||||
|
||||
# Actions and metadata
|
||||
actions = Column(JSON, nullable=True) # List of available actions
|
||||
alert_metadata = Column(JSON, nullable=True) # Additional alert-specific data
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime, default=datetime.utcnow, index=True)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
resolved_at = Column(DateTime, nullable=True)
|
||||
93
services/alert_processor/migrations/alembic.ini
Normal file
93
services/alert_processor/migrations/alembic.ini
Normal file
@@ -0,0 +1,93 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version number format
|
||||
# Uses Alembic datetime format
|
||||
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
|
||||
|
||||
# version name format
|
||||
version_path_separator = /
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+asyncpg://alert_processor_user:alert_processor_pass123@alert-processor-db:5432/alert_processor_db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
109
services/alert_processor/migrations/env.py
Normal file
109
services/alert_processor/migrations/env.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Alembic environment configuration for Alert Processor Service
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# Import models to ensure they're registered
|
||||
from app.models.alerts import * # noqa
|
||||
from shared.database.base import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable if available
|
||||
database_url = os.getenv('ALERT_PROCESSOR_DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with database connection"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in async mode"""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
services/alert_processor/migrations/script.py.mako
Normal file
24
services/alert_processor/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,64 @@
|
||||
"""Initial alerts table
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-09-18 23:17:00
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum types
|
||||
alert_status_enum = postgresql.ENUM('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus')
|
||||
alert_severity_enum = postgresql.ENUM('low', 'medium', 'high', 'urgent', name='alertseverity')
|
||||
|
||||
alert_status_enum.create(op.get_bind())
|
||||
alert_severity_enum.create(op.get_bind())
|
||||
|
||||
# Create alerts table
|
||||
op.create_table('alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('item_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('alert_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('severity', alert_severity_enum, nullable=False),
|
||||
sa.Column('status', alert_status_enum, nullable=False),
|
||||
sa.Column('service', sa.String(length=100), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('actions', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_alerts_tenant_id', 'alerts', ['tenant_id'])
|
||||
op.create_index('ix_alerts_severity', 'alerts', ['severity'])
|
||||
op.create_index('ix_alerts_status', 'alerts', ['status'])
|
||||
op.create_index('ix_alerts_created_at', 'alerts', ['created_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('ix_alerts_created_at', 'alerts')
|
||||
op.drop_index('ix_alerts_status', 'alerts')
|
||||
op.drop_index('ix_alerts_severity', 'alerts')
|
||||
op.drop_index('ix_alerts_tenant_id', 'alerts')
|
||||
|
||||
# Drop table
|
||||
op.drop_table('alerts')
|
||||
|
||||
# Drop enum types
|
||||
op.execute('DROP TYPE alertseverity')
|
||||
op.execute('DROP TYPE alertstatus')
|
||||
Reference in New Issue
Block a user