Add role-based filtering and imporve code
This commit is contained in:
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/alert_processor/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -11,7 +11,7 @@ import sys
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
import redis.asyncio as aioredis
|
||||
from shared.redis_utils import initialize_redis, close_redis, get_redis_client
|
||||
from aio_pika import connect_robust, IncomingMessage, ExchangeType
|
||||
|
||||
from app.config import AlertProcessorConfig
|
||||
@@ -92,9 +92,10 @@ class AlertProcessorService:
|
||||
"""Start the alert processor service"""
|
||||
try:
|
||||
logger.info("Starting Alert Processor Service")
|
||||
|
||||
# Connect to Redis for SSE publishing
|
||||
self.redis = aioredis.from_url(self.config.REDIS_URL)
|
||||
|
||||
# Initialize shared Redis connection for SSE publishing
|
||||
await initialize_redis(self.config.REDIS_URL, db=0, max_connections=20)
|
||||
self.redis = await get_redis_client()
|
||||
logger.info("Connected to Redis")
|
||||
|
||||
# Connect to RabbitMQ
|
||||
@@ -306,18 +307,17 @@ class AlertProcessorService:
|
||||
"""Stop the alert processor service"""
|
||||
self.running = False
|
||||
logger.info("Stopping Alert Processor Service")
|
||||
|
||||
|
||||
try:
|
||||
# Close RabbitMQ connection
|
||||
if self.connection and not self.connection.is_closed:
|
||||
await self.connection.close()
|
||||
|
||||
# Close Redis connection
|
||||
if self.redis:
|
||||
await self.redis.close()
|
||||
|
||||
|
||||
# Close shared Redis connection
|
||||
await close_redis()
|
||||
|
||||
logger.info("Alert Processor Service stopped")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error stopping service", error=str(e))
|
||||
|
||||
|
||||
@@ -4,6 +4,13 @@ Alert Processor Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .alerts import Alert, AlertStatus, AlertSeverity
|
||||
|
||||
@@ -12,4 +19,5 @@ __all__ = [
|
||||
"Alert",
|
||||
"AlertStatus",
|
||||
"AlertSeverity",
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -1,54 +0,0 @@
|
||||
"""initial_schema_20251009_2039
|
||||
|
||||
Revision ID: 48724b300473
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:33.768021+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '48724b300473'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('alerts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('item_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('alert_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('severity', sa.Enum('low', 'medium', 'high', 'urgent', name='alertseverity'), nullable=False),
|
||||
sa.Column('status', sa.Enum('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus'), nullable=True),
|
||||
sa.Column('service', sa.String(length=100), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('actions', sa.JSON(), nullable=True),
|
||||
sa.Column('alert_metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_alerts_created_at'), 'alerts', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_severity'), 'alerts', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_status'), 'alerts', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_tenant_id'), 'alerts', ['tenant_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_alerts_tenant_id'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_status'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_severity'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_created_at'), table_name='alerts')
|
||||
op.drop_table('alerts')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,100 @@
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: 5ad7a76c1b10
|
||||
Revises:
|
||||
Create Date: 2025-10-15 12:30:29.410300+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '5ad7a76c1b10'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('alerts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('item_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('alert_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('severity', sa.Enum('low', 'medium', 'high', 'urgent', name='alertseverity'), nullable=False),
|
||||
sa.Column('status', sa.Enum('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus'), nullable=True),
|
||||
sa.Column('service', sa.String(length=100), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('actions', sa.JSON(), nullable=True),
|
||||
sa.Column('alert_metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_alerts_created_at'), 'alerts', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_severity'), 'alerts', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_status'), 'alerts', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_tenant_id'), 'alerts', ['tenant_id'], unique=False)
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
op.drop_index(op.f('ix_alerts_tenant_id'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_status'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_severity'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_created_at'), table_name='alerts')
|
||||
op.drop_table('alerts')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/auth/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -25,11 +25,15 @@ from shared.auth.decorators import (
|
||||
require_admin_role_dep
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["users"])
|
||||
route_builder = RouteBuilder('auth')
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("auth-service")
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("me", include_tenant_prefix=False), response_model=UserResponse)
|
||||
async def get_current_user_info(
|
||||
@@ -184,14 +188,32 @@ async def delete_admin_user(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Admin user {user_id} not found"
|
||||
)
|
||||
|
||||
|
||||
# Log audit event for user deletion
|
||||
try:
|
||||
# Get tenant_id from current_user or use a placeholder for system-level operations
|
||||
tenant_id_str = current_user.get("tenant_id", "00000000-0000-0000-0000-000000000000")
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=tenant_id_str,
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="user",
|
||||
resource_id=user_id,
|
||||
resource_data=user_info,
|
||||
description=f"Admin {current_user.get('email', current_user['user_id'])} initiated deletion of user {user_info.get('email', user_id)}",
|
||||
endpoint="/delete/{user_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
# Start deletion as background task for better performance
|
||||
background_tasks.add_task(
|
||||
execute_admin_user_deletion,
|
||||
user_id=user_id,
|
||||
requesting_user_id=current_user["user_id"]
|
||||
)
|
||||
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Admin user deletion for {user_id} has been initiated",
|
||||
|
||||
@@ -8,7 +8,7 @@ import re
|
||||
import hashlib
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any, List
|
||||
import redis.asyncio as redis
|
||||
from shared.redis_utils import get_redis_client
|
||||
from fastapi import HTTPException, status
|
||||
import structlog
|
||||
from passlib.context import CryptContext
|
||||
@@ -24,8 +24,7 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
# Initialize JWT handler with SAME configuration as gateway
|
||||
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
|
||||
|
||||
# Redis client for session management
|
||||
redis_client = redis.from_url(settings.REDIS_URL)
|
||||
# Note: Redis client is now accessed via get_redis_client() from shared.redis_utils
|
||||
|
||||
class SecurityManager:
|
||||
"""Security utilities for authentication - FIXED VERSION"""
|
||||
|
||||
@@ -3,6 +3,13 @@
|
||||
Models export for auth service
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
from .users import User
|
||||
from .tokens import RefreshToken, LoginAttempt
|
||||
from .onboarding import UserOnboardingProgress, UserOnboardingSummary
|
||||
@@ -13,4 +20,5 @@ __all__ = [
|
||||
'LoginAttempt',
|
||||
'UserOnboardingProgress',
|
||||
'UserOnboardingSummary',
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2038
|
||||
"""initial_schema_20251015_1229
|
||||
|
||||
Revision ID: 105797cd9710
|
||||
Revision ID: 13327ad46a4d
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:38:43.315537+02:00
|
||||
Create Date: 2025-10-15 12:29:13.886996+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '105797cd9710'
|
||||
revision: str = '13327ad46a4d'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('login_attempts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('email', sa.String(length=255), nullable=False),
|
||||
@@ -111,4 +143,18 @@ def downgrade() -> None:
|
||||
op.drop_table('refresh_tokens')
|
||||
op.drop_index(op.f('ix_login_attempts_email'), table_name='login_attempts')
|
||||
op.drop_table('login_attempts')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,42 +1,48 @@
|
||||
# Multi-stage build for Demo Session Service
|
||||
FROM python:3.11-slim as builder
|
||||
# Demo Session Dockerfile
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim AS shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements and install
|
||||
COPY services/demo_session/requirements.txt .
|
||||
RUN pip install --no-cache-dir --user -r requirements.txt
|
||||
|
||||
# Final stage
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy Python dependencies from builder
|
||||
COPY --from=builder /root/.local /root/.local
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy shared libraries
|
||||
COPY shared/ /app/shared/
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
# Copy service code
|
||||
COPY services/demo_session/ /app/
|
||||
COPY services/demo_session/requirements.txt .
|
||||
|
||||
# Copy scripts
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY services/demo_session/ .
|
||||
|
||||
# Copy scripts for migrations
|
||||
COPY scripts/ /app/scripts/
|
||||
|
||||
# Make sure scripts are in path
|
||||
ENV PATH=/root/.local/bin:$PATH
|
||||
ENV PYTHONPATH=/app:$PYTHONPATH
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||
CMD python -c "import httpx; httpx.get('http://localhost:8000/health')"
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
@@ -8,7 +8,7 @@ import jwt
|
||||
|
||||
from app.api.schemas import DemoSessionResponse, DemoSessionStats
|
||||
from app.services import DemoSessionManager, DemoCleanupService
|
||||
from app.core import get_db, get_redis, RedisClient
|
||||
from app.core import get_db, get_redis, DemoRedisWrapper
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
@@ -25,7 +25,7 @@ route_builder = RouteBuilder('demo')
|
||||
async def extend_demo_session(
|
||||
session_id: str = Path(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Extend demo session expiration (BUSINESS OPERATION)"""
|
||||
try:
|
||||
@@ -67,7 +67,7 @@ async def extend_demo_session(
|
||||
)
|
||||
async def get_demo_stats(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Get demo session statistics (BUSINESS OPERATION)"""
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
@@ -81,7 +81,7 @@ async def get_demo_stats(
|
||||
)
|
||||
async def run_cleanup(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Manually trigger session cleanup (BUSINESS OPERATION - Internal endpoint for CronJob)"""
|
||||
cleanup_service = DemoCleanupService(db, redis)
|
||||
|
||||
@@ -10,7 +10,8 @@ import jwt
|
||||
|
||||
from app.api.schemas import DemoSessionCreate, DemoSessionResponse
|
||||
from app.services import DemoSessionManager
|
||||
from app.core import get_db, get_redis, RedisClient
|
||||
from app.core import get_db
|
||||
from app.core.redis_wrapper import get_redis, DemoRedisWrapper
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
@@ -64,7 +65,7 @@ async def create_demo_session(
|
||||
request: DemoSessionCreate,
|
||||
http_request: Request,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Create a new isolated demo session (ATOMIC)"""
|
||||
logger.info("Creating demo session", demo_account_type=request.demo_account_type)
|
||||
@@ -130,7 +131,7 @@ async def create_demo_session(
|
||||
async def get_session_info(
|
||||
session_id: str = Path(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Get demo session information (ATOMIC READ)"""
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
@@ -149,7 +150,7 @@ async def get_session_info(
|
||||
async def get_session_status(
|
||||
session_id: str = Path(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Get demo session provisioning status
|
||||
@@ -173,7 +174,7 @@ async def get_session_status(
|
||||
async def retry_session_cloning(
|
||||
session_id: str = Path(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Retry failed cloning operations
|
||||
@@ -204,7 +205,7 @@ async def retry_session_cloning(
|
||||
async def destroy_demo_session(
|
||||
session_id: str = Path(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Destroy demo session and cleanup resources (ATOMIC DELETE)"""
|
||||
try:
|
||||
@@ -225,7 +226,7 @@ async def destroy_demo_session(
|
||||
async def destroy_demo_session_post(
|
||||
session_id: str = Path(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
redis: DemoRedisWrapper = Depends(get_redis)
|
||||
):
|
||||
"""Destroy demo session via POST (for frontend compatibility)"""
|
||||
try:
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
|
||||
from .config import settings
|
||||
from .database import DatabaseManager, get_db
|
||||
from .redis_client import RedisClient, get_redis
|
||||
from .redis_wrapper import DemoRedisWrapper, get_redis
|
||||
|
||||
__all__ = ["settings", "DatabaseManager", "get_db", "RedisClient", "get_redis"]
|
||||
__all__ = ["settings", "DatabaseManager", "get_db", "DemoRedisWrapper", "get_redis"]
|
||||
|
||||
@@ -1,51 +1,25 @@
|
||||
"""
|
||||
Redis client for demo session data caching
|
||||
Redis wrapper for demo session service using shared Redis implementation
|
||||
Provides a compatibility layer for session-specific operations
|
||||
"""
|
||||
|
||||
import redis.asyncio as redis
|
||||
from typing import Optional, Any
|
||||
import json
|
||||
import structlog
|
||||
from datetime import timedelta
|
||||
|
||||
from .config import settings
|
||||
from typing import Optional, Any
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class RedisClient:
|
||||
"""Redis client for session data"""
|
||||
class DemoRedisWrapper:
|
||||
"""Wrapper around shared Redis client for demo session operations"""
|
||||
|
||||
def __init__(self, redis_url: str = None):
|
||||
self.redis_url = redis_url or settings.REDIS_URL
|
||||
self.client: Optional[redis.Redis] = None
|
||||
self.key_prefix = settings.REDIS_KEY_PREFIX
|
||||
def __init__(self, key_prefix: str = "demo_session"):
|
||||
self.key_prefix = key_prefix
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to Redis"""
|
||||
if not self.client:
|
||||
self.client = await redis.from_url(
|
||||
self.redis_url,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
logger.info("Redis client connected", redis_url=self.redis_url.split("@")[-1])
|
||||
|
||||
async def close(self):
|
||||
"""Close Redis connection"""
|
||||
if self.client:
|
||||
await self.client.close()
|
||||
logger.info("Redis connection closed")
|
||||
|
||||
async def ping(self) -> bool:
|
||||
"""Check Redis connection"""
|
||||
try:
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
return await self.client.ping()
|
||||
except Exception as e:
|
||||
logger.error("Redis ping failed", error=str(e))
|
||||
return False
|
||||
async def get_client(self):
|
||||
"""Get the underlying Redis client"""
|
||||
return await get_redis_client()
|
||||
|
||||
def _make_key(self, *parts: str) -> str:
|
||||
"""Create Redis key with prefix"""
|
||||
@@ -53,26 +27,22 @@ class RedisClient:
|
||||
|
||||
async def set_session_data(self, session_id: str, key: str, data: Any, ttl: int = None):
|
||||
"""Store session data in Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
client = await get_redis_client()
|
||||
redis_key = self._make_key(session_id, key)
|
||||
serialized = json.dumps(data) if not isinstance(data, str) else data
|
||||
|
||||
if ttl:
|
||||
await self.client.setex(redis_key, ttl, serialized)
|
||||
await client.setex(redis_key, ttl, serialized)
|
||||
else:
|
||||
await self.client.set(redis_key, serialized)
|
||||
await client.set(redis_key, serialized)
|
||||
|
||||
logger.debug("Session data stored", session_id=session_id, key=key)
|
||||
|
||||
async def get_session_data(self, session_id: str, key: str) -> Optional[Any]:
|
||||
"""Retrieve session data from Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
client = await get_redis_client()
|
||||
redis_key = self._make_key(session_id, key)
|
||||
data = await self.client.get(redis_key)
|
||||
data = await client.get(redis_key)
|
||||
|
||||
if data:
|
||||
try:
|
||||
@@ -84,49 +54,42 @@ class RedisClient:
|
||||
|
||||
async def delete_session_data(self, session_id: str, key: str = None):
|
||||
"""Delete session data"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
client = await get_redis_client()
|
||||
|
||||
if key:
|
||||
redis_key = self._make_key(session_id, key)
|
||||
await self.client.delete(redis_key)
|
||||
await client.delete(redis_key)
|
||||
else:
|
||||
pattern = self._make_key(session_id, "*")
|
||||
keys = await self.client.keys(pattern)
|
||||
keys = await client.keys(pattern)
|
||||
if keys:
|
||||
await self.client.delete(*keys)
|
||||
await client.delete(*keys)
|
||||
|
||||
logger.debug("Session data deleted", session_id=session_id, key=key)
|
||||
|
||||
async def extend_session_ttl(self, session_id: str, ttl: int):
|
||||
"""Extend TTL for all session keys"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
client = await get_redis_client()
|
||||
pattern = self._make_key(session_id, "*")
|
||||
keys = await self.client.keys(pattern)
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
for key in keys:
|
||||
await self.client.expire(key, ttl)
|
||||
await client.expire(key, ttl)
|
||||
|
||||
logger.debug("Session TTL extended", session_id=session_id, ttl=ttl)
|
||||
|
||||
async def set_hash(self, session_id: str, hash_key: str, field: str, value: Any):
|
||||
"""Store hash field in Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
client = await get_redis_client()
|
||||
redis_key = self._make_key(session_id, hash_key)
|
||||
serialized = json.dumps(value) if not isinstance(value, str) else value
|
||||
await self.client.hset(redis_key, field, serialized)
|
||||
await client.hset(redis_key, field, serialized)
|
||||
|
||||
async def get_hash(self, session_id: str, hash_key: str, field: str) -> Optional[Any]:
|
||||
"""Get hash field from Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
client = await get_redis_client()
|
||||
redis_key = self._make_key(session_id, hash_key)
|
||||
data = await self.client.hget(redis_key, field)
|
||||
data = await client.hget(redis_key, field)
|
||||
|
||||
if data:
|
||||
try:
|
||||
@@ -138,11 +101,9 @@ class RedisClient:
|
||||
|
||||
async def get_all_hash(self, session_id: str, hash_key: str) -> dict:
|
||||
"""Get all hash fields"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
client = await get_redis_client()
|
||||
redis_key = self._make_key(session_id, hash_key)
|
||||
data = await self.client.hgetall(redis_key)
|
||||
data = await client.hgetall(redis_key)
|
||||
|
||||
result = {}
|
||||
for field, value in data.items():
|
||||
@@ -153,12 +114,18 @@ class RedisClient:
|
||||
|
||||
return result
|
||||
|
||||
|
||||
redis_client = RedisClient()
|
||||
async def get_client(self):
|
||||
"""Get raw Redis client for direct operations"""
|
||||
return await get_redis_client()
|
||||
|
||||
|
||||
async def get_redis() -> RedisClient:
|
||||
"""Dependency for FastAPI"""
|
||||
if not redis_client.client:
|
||||
await redis_client.connect()
|
||||
return redis_client
|
||||
# Cached instance
|
||||
_redis_wrapper = None
|
||||
|
||||
|
||||
async def get_redis() -> DemoRedisWrapper:
|
||||
"""Dependency for FastAPI - returns wrapper around shared Redis"""
|
||||
global _redis_wrapper
|
||||
if _redis_wrapper is None:
|
||||
_redis_wrapper = DemoRedisWrapper()
|
||||
return _redis_wrapper
|
||||
@@ -9,14 +9,14 @@ from fastapi.responses import JSONResponse
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from app.core import settings, DatabaseManager, RedisClient
|
||||
from app.core import settings, DatabaseManager
|
||||
from app.api import demo_sessions, demo_accounts, demo_operations
|
||||
from shared.redis_utils import initialize_redis, close_redis
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize database and redis
|
||||
# Initialize database
|
||||
db_manager = DatabaseManager()
|
||||
redis_client = RedisClient()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -27,8 +27,12 @@ async def lifespan(app: FastAPI):
|
||||
# Initialize database
|
||||
db_manager.initialize()
|
||||
|
||||
# Connect to Redis
|
||||
await redis_client.connect()
|
||||
# Initialize Redis using shared implementation
|
||||
await initialize_redis(
|
||||
redis_url=settings.REDIS_URL,
|
||||
db=0,
|
||||
max_connections=50
|
||||
)
|
||||
|
||||
logger.info("Demo Session Service started successfully")
|
||||
|
||||
@@ -36,7 +40,7 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# Cleanup on shutdown
|
||||
await db_manager.close()
|
||||
await redis_client.close()
|
||||
await close_redis()
|
||||
|
||||
logger.info("Demo Session Service stopped")
|
||||
|
||||
@@ -92,7 +96,10 @@ async def root():
|
||||
@app.get("/health")
|
||||
async def health():
|
||||
"""Health check endpoint"""
|
||||
redis_ok = await redis_client.ping()
|
||||
from shared.redis_utils import get_redis_manager
|
||||
|
||||
redis_manager = await get_redis_manager()
|
||||
redis_ok = await redis_manager.health_check()
|
||||
|
||||
return {
|
||||
"status": "healthy" if redis_ok else "degraded",
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
"""Demo Session Service Models"""
|
||||
|
||||
from .demo_session import DemoSession, DemoSessionStatus, CloningStatus
|
||||
|
||||
__all__ = ["DemoSession", "DemoSessionStatus", "CloningStatus"]
|
||||
__all__ = ["DemoSession", "DemoSessionStatus", "CloningStatus", "AuditLog"]
|
||||
|
||||
@@ -11,7 +11,7 @@ import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus
|
||||
from app.services.data_cloner import DemoDataCloner
|
||||
from app.core import RedisClient
|
||||
from app.core.redis_wrapper import DemoRedisWrapper
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -19,7 +19,7 @@ logger = structlog.get_logger()
|
||||
class DemoCleanupService:
|
||||
"""Handles cleanup of expired demo sessions"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
self.data_cloner = DemoDataCloner(db, redis)
|
||||
|
||||
@@ -9,7 +9,8 @@ import httpx
|
||||
import structlog
|
||||
import uuid
|
||||
|
||||
from app.core import RedisClient, settings
|
||||
from app.core.redis_wrapper import DemoRedisWrapper
|
||||
from app.core import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -17,7 +18,7 @@ logger = structlog.get_logger()
|
||||
class DemoDataCloner:
|
||||
"""Clones demo data for isolated sessions"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
|
||||
|
||||
@@ -12,7 +12,8 @@ import secrets
|
||||
import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus, CloningStatus
|
||||
from app.core import RedisClient, settings
|
||||
from app.core.redis_wrapper import DemoRedisWrapper
|
||||
from app.core import settings
|
||||
from app.services.clone_orchestrator import CloneOrchestrator
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -21,7 +22,7 @@ logger = structlog.get_logger()
|
||||
class DemoSessionManager:
|
||||
"""Manages demo session lifecycle"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
self.orchestrator = CloneOrchestrator()
|
||||
@@ -367,7 +368,8 @@ class DemoSessionManager:
|
||||
}
|
||||
|
||||
import json as json_module
|
||||
await self.redis.client.setex(
|
||||
client = await self.redis.get_client()
|
||||
await client.setex(
|
||||
status_key,
|
||||
7200, # Cache for 2 hours
|
||||
json_module.dumps(status_data) # Convert to JSON string
|
||||
@@ -385,7 +387,8 @@ class DemoSessionManager:
|
||||
"""
|
||||
# Try Redis cache first
|
||||
status_key = f"session:{session_id}:status"
|
||||
cached = await self.redis.client.get(status_key)
|
||||
client = await self.redis.get_client()
|
||||
cached = await client.get(status_key)
|
||||
|
||||
if cached:
|
||||
import json
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
"""Add cloning status tracking
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2025-01-10
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers
|
||||
revision = '002'
|
||||
down_revision = 'a1b2c3d4e5f6' # References the actual initial schema revision
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add new status values and cloning tracking fields"""
|
||||
|
||||
# Add new columns for cloning progress
|
||||
op.add_column('demo_sessions', sa.Column('cloning_started_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('demo_sessions', sa.Column('cloning_completed_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('demo_sessions', sa.Column('total_records_cloned', sa.Integer(), server_default='0', nullable=False))
|
||||
op.add_column('demo_sessions', sa.Column('cloning_progress', postgresql.JSONB(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
|
||||
# Update the status enum to include new values
|
||||
# PostgreSQL doesn't support IF NOT EXISTS for enum values in older versions
|
||||
# We need to check if values exist before adding them
|
||||
from sqlalchemy import text
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
# Check and add each enum value if it doesn't exist
|
||||
enum_values_to_add = ['pending', 'ready', 'failed', 'partial']
|
||||
|
||||
for value in enum_values_to_add:
|
||||
# Check if the enum value already exists
|
||||
result = conn.execute(text("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM pg_enum
|
||||
WHERE enumlabel = :value
|
||||
AND enumtypid = (
|
||||
SELECT oid FROM pg_type WHERE typname = 'demosessionstatus'
|
||||
)
|
||||
);
|
||||
"""), {"value": value})
|
||||
|
||||
exists = result.scalar()
|
||||
|
||||
if not exists:
|
||||
# Add the enum value
|
||||
# Note: ALTER TYPE ADD VALUE cannot run inside a transaction block in PostgreSQL
|
||||
# but Alembic handles this for us
|
||||
conn.execute(text(f"ALTER TYPE demosessionstatus ADD VALUE '{value}'"))
|
||||
|
||||
# Update existing sessions: active → ready
|
||||
op.execute("""
|
||||
UPDATE demo_sessions
|
||||
SET status = 'ready'
|
||||
WHERE status = 'active' AND data_cloned = true;
|
||||
""")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove cloning status tracking"""
|
||||
|
||||
# Remove new columns
|
||||
op.drop_column('demo_sessions', 'cloning_progress')
|
||||
op.drop_column('demo_sessions', 'total_records_cloned')
|
||||
op.drop_column('demo_sessions', 'cloning_completed_at')
|
||||
op.drop_column('demo_sessions', 'cloning_started_at')
|
||||
|
||||
# Note: Cannot easily remove enum values in PostgreSQL
|
||||
# Migration down would require recreating the enum type
|
||||
op.execute("""
|
||||
UPDATE demo_sessions
|
||||
SET status = 'active'
|
||||
WHERE status IN ('ready', 'pending', 'failed', 'partial');
|
||||
""")
|
||||
@@ -1,64 +0,0 @@
|
||||
"""initial_schema
|
||||
|
||||
Revision ID: a1b2c3d4e5f6
|
||||
Revises:
|
||||
Create Date: 2025-10-02 17:45:00.000000+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'a1b2c3d4e5f6'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create demo_sessions table
|
||||
op.create_table('demo_sessions',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('session_id', sa.String(length=100), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.String(length=500), nullable=True),
|
||||
sa.Column('base_demo_tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('virtual_tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('demo_account_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.Enum('active', 'expired', 'destroyed', name='demosessionstatus'), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('destroyed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('request_count', sa.Integer(), nullable=True),
|
||||
sa.Column('data_cloned', sa.Boolean(), nullable=True),
|
||||
sa.Column('redis_populated', sa.Boolean(), nullable=True),
|
||||
sa.Column('session_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('session_id')
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index(op.f('ix_demo_sessions_session_id'), 'demo_sessions', ['session_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_base_demo_tenant_id'), 'demo_sessions', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_virtual_tenant_id'), 'demo_sessions', ['virtual_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_status'), 'demo_sessions', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_created_at'), 'demo_sessions', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_expires_at'), 'demo_sessions', ['expires_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index(op.f('ix_demo_sessions_expires_at'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_created_at'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_status'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_virtual_tenant_id'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_base_demo_tenant_id'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_session_id'), table_name='demo_sessions')
|
||||
|
||||
# Drop table (this will automatically drop the enum if it's only used here)
|
||||
op.drop_table('demo_sessions')
|
||||
@@ -0,0 +1,109 @@
|
||||
"""initial_schema_20251015_1231
|
||||
|
||||
Revision ID: de5ec23ee752
|
||||
Revises:
|
||||
Create Date: 2025-10-15 10:31:12.539158
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'de5ec23ee752'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('demo_sessions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('session_id', sa.String(length=100), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.String(length=500), nullable=True),
|
||||
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('virtual_tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('demo_account_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.Enum('pending', 'ready', 'failed', 'partial', 'active', 'expired', 'destroyed', name='demosessionstatus'), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('destroyed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cloning_started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cloning_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('total_records_cloned', sa.Integer(), nullable=True),
|
||||
sa.Column('cloning_progress', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('request_count', sa.Integer(), nullable=True),
|
||||
sa.Column('data_cloned', sa.Boolean(), nullable=True),
|
||||
sa.Column('redis_populated', sa.Boolean(), nullable=True),
|
||||
sa.Column('session_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_demo_sessions_base_demo_tenant_id'), 'demo_sessions', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_created_at'), 'demo_sessions', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_expires_at'), 'demo_sessions', ['expires_at'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_session_id'), 'demo_sessions', ['session_id'], unique=True)
|
||||
op.create_index(op.f('ix_demo_sessions_status'), 'demo_sessions', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_virtual_tenant_id'), 'demo_sessions', ['virtual_tenant_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_demo_sessions_virtual_tenant_id'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_status'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_session_id'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_expires_at'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_created_at'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_base_demo_tenant_id'), table_name='demo_sessions')
|
||||
op.drop_table('demo_sessions')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -8,6 +8,7 @@ redis==5.0.1
|
||||
structlog==23.2.0
|
||||
pydantic==2.5.0
|
||||
pydantic-settings==2.1.0
|
||||
typing-extensions>=4.5.0
|
||||
httpx==0.25.2
|
||||
PyJWT==2.8.0
|
||||
python-multipart==0.0.6
|
||||
|
||||
4
services/external/Dockerfile
vendored
4
services/external/Dockerfile
vendored
@@ -17,9 +17,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/external/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
2
services/external/app/api/city_operations.py
vendored
2
services/external/app/api/city_operations.py
vendored
@@ -15,7 +15,7 @@ from app.schemas.traffic import TrafficDataResponse
|
||||
from app.registry.city_registry import CityRegistry
|
||||
from app.registry.geolocation_mapper import GeolocationMapper
|
||||
from app.repositories.city_data_repository import CityDataRepository
|
||||
from app.cache.redis_cache import ExternalDataCache
|
||||
from app.cache.redis_wrapper import ExternalDataCache
|
||||
from app.services.weather_service import WeatherService
|
||||
from app.services.traffic_service import TrafficService
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
|
||||
8
services/external/app/api/traffic_data.py
vendored
8
services/external/app/api/traffic_data.py
vendored
@@ -12,6 +12,8 @@ import structlog
|
||||
from app.schemas.traffic import TrafficDataResponse
|
||||
from app.services.traffic_service import TrafficService
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.database import get_db
|
||||
|
||||
@@ -29,6 +31,7 @@ def get_traffic_service():
|
||||
route_builder.build_base_route("traffic-data"),
|
||||
response_model=List[TrafficDataResponse]
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def list_traffic_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[date] = Query(None),
|
||||
@@ -36,10 +39,11 @@ async def list_traffic_data(
|
||||
latitude: Optional[float] = Query(None),
|
||||
longitude: Optional[float] = Query(None),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
traffic_service: TrafficService = Depends(get_traffic_service)
|
||||
):
|
||||
"""List stored traffic data records"""
|
||||
"""List stored traffic data records (Professional+ tier required)"""
|
||||
try:
|
||||
logger.info("Listing traffic data", tenant_id=tenant_id)
|
||||
|
||||
@@ -64,9 +68,11 @@ async def list_traffic_data(
|
||||
route_builder.build_resource_detail_route("traffic-data", "traffic_id"),
|
||||
response_model=TrafficDataResponse
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_traffic_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
traffic_id: UUID = Path(..., description="Traffic data ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
traffic_service: TrafficService = Depends(get_traffic_service)
|
||||
):
|
||||
|
||||
8
services/external/app/api/weather_data.py
vendored
8
services/external/app/api/weather_data.py
vendored
@@ -12,6 +12,8 @@ import structlog
|
||||
from app.schemas.weather import WeatherDataResponse
|
||||
from app.services.weather_service import WeatherService
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.database import get_db
|
||||
|
||||
@@ -29,6 +31,7 @@ def get_weather_service():
|
||||
route_builder.build_base_route("weather-data"),
|
||||
response_model=List[WeatherDataResponse]
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def list_weather_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[date] = Query(None),
|
||||
@@ -36,10 +39,11 @@ async def list_weather_data(
|
||||
latitude: Optional[float] = Query(None),
|
||||
longitude: Optional[float] = Query(None),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
weather_service: WeatherService = Depends(get_weather_service)
|
||||
):
|
||||
"""List stored weather data records"""
|
||||
"""List stored weather data records (Professional+ tier required)"""
|
||||
try:
|
||||
logger.info("Listing weather data", tenant_id=tenant_id)
|
||||
|
||||
@@ -64,9 +68,11 @@ async def list_weather_data(
|
||||
route_builder.build_resource_detail_route("weather-data", "weather_id"),
|
||||
response_model=WeatherDataResponse
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_weather_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
weather_id: UUID = Path(..., description="Weather data ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
weather_service: WeatherService = Depends(get_weather_service)
|
||||
):
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
# services/external/app/cache/redis_cache.py
|
||||
# services/external/app/cache/redis_wrapper.py
|
||||
"""
|
||||
Redis cache layer for fast training data access
|
||||
Redis cache layer for fast training data access using shared Redis implementation
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any, Optional
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import redis.asyncio as redis
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -18,12 +16,11 @@ class ExternalDataCache:
|
||||
"""Redis cache for external data service"""
|
||||
|
||||
def __init__(self):
|
||||
self.redis_client = redis.from_url(
|
||||
settings.REDIS_URL,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
self.ttl = 86400 * 7
|
||||
self.ttl = 86400 * 7 # 7 days
|
||||
|
||||
async def _get_client(self):
|
||||
"""Get the shared Redis client"""
|
||||
return await get_redis_client()
|
||||
|
||||
def _weather_cache_key(
|
||||
self,
|
||||
@@ -43,7 +40,8 @@ class ExternalDataCache:
|
||||
"""Get cached weather data"""
|
||||
try:
|
||||
key = self._weather_cache_key(city_id, start_date, end_date)
|
||||
cached = await self.redis_client.get(key)
|
||||
client = await self._get_client()
|
||||
cached = await client.get(key)
|
||||
|
||||
if cached:
|
||||
logger.debug("Weather cache hit", city_id=city_id, key=key)
|
||||
@@ -84,7 +82,8 @@ class ExternalDataCache:
|
||||
|
||||
serializable_data.append(record_dict)
|
||||
|
||||
await self.redis_client.setex(
|
||||
client = await self._get_client()
|
||||
await client.setex(
|
||||
key,
|
||||
self.ttl,
|
||||
json.dumps(serializable_data)
|
||||
@@ -113,7 +112,8 @@ class ExternalDataCache:
|
||||
"""Get cached traffic data"""
|
||||
try:
|
||||
key = self._traffic_cache_key(city_id, start_date, end_date)
|
||||
cached = await self.redis_client.get(key)
|
||||
client = await self._get_client()
|
||||
cached = await client.get(key)
|
||||
|
||||
if cached:
|
||||
logger.debug("Traffic cache hit", city_id=city_id, key=key)
|
||||
@@ -154,7 +154,8 @@ class ExternalDataCache:
|
||||
|
||||
serializable_data.append(record_dict)
|
||||
|
||||
await self.redis_client.setex(
|
||||
client = await self._get_client()
|
||||
await client.setex(
|
||||
key,
|
||||
self.ttl,
|
||||
json.dumps(serializable_data)
|
||||
@@ -168,11 +169,18 @@ class ExternalDataCache:
|
||||
async def invalidate_city_cache(self, city_id: str):
|
||||
"""Invalidate all cache entries for a city"""
|
||||
try:
|
||||
client = await self._get_client()
|
||||
pattern = f"*:{city_id}:*"
|
||||
async for key in self.redis_client.scan_iter(match=pattern):
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
logger.info("City cache invalidated", city_id=city_id)
|
||||
# Use scan_iter for safer key pattern matching
|
||||
keys_to_delete = []
|
||||
async for key in client.scan_iter(match=pattern):
|
||||
keys_to_delete.append(key)
|
||||
|
||||
if keys_to_delete:
|
||||
await client.delete(*keys_to_delete)
|
||||
|
||||
logger.info("City cache invalidated", city_id=city_id, keys_deleted=len(keys_to_delete))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error invalidating cache", error=str(e))
|
||||
8
services/external/app/models/__init__.py
vendored
8
services/external/app/models/__init__.py
vendored
@@ -4,6 +4,13 @@ External Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .traffic import (
|
||||
TrafficData,
|
||||
@@ -31,4 +38,5 @@ __all__ = [
|
||||
# City-based models (new)
|
||||
"CityWeatherData",
|
||||
"CityTrafficData",
|
||||
"AuditLog",
|
||||
]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: e1c05c379c10
|
||||
Revision ID: b97bab14ac47
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:49.989716+02:00
|
||||
Create Date: 2025-10-15 12:30:54.963197+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'e1c05c379c10'
|
||||
revision: str = 'b97bab14ac47'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('city_traffic_data',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('city_id', sa.String(length=50), nullable=False),
|
||||
@@ -265,4 +297,18 @@ def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_city_traffic_data_city_id'), table_name='city_traffic_data')
|
||||
op.drop_index('idx_city_traffic_lookup', table_name='city_traffic_data')
|
||||
op.drop_table('city_traffic_data')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/forecasting/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -12,6 +12,7 @@ from app.services.prediction_service import PredictionService
|
||||
from shared.database.base import create_database_manager
|
||||
from app.core.config import settings
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
|
||||
route_builder = RouteBuilder('forecasting')
|
||||
logger = structlog.get_logger()
|
||||
@@ -27,13 +28,14 @@ def get_enhanced_prediction_service():
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("predictions-performance")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_predictions_performance(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[date] = Query(None),
|
||||
end_date: Optional[date] = Query(None),
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Get predictions performance analytics"""
|
||||
"""Get predictions performance analytics (Professional+ tier required)"""
|
||||
try:
|
||||
logger.info("Getting predictions performance", tenant_id=tenant_id)
|
||||
|
||||
|
||||
@@ -23,11 +23,22 @@ from shared.monitoring.metrics import get_metrics_collector
|
||||
from app.core.config import settings
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, create_rate_limiter, AuditSeverity, AuditAction
|
||||
from shared.subscription.plans import get_forecast_quota, get_forecast_horizon_limit
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
route_builder = RouteBuilder('forecasting')
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["forecasting-operations"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("forecasting-service")
|
||||
|
||||
async def get_rate_limiter():
|
||||
"""Dependency for rate limiter"""
|
||||
redis_client = await get_redis_client()
|
||||
return create_rate_limiter(redis_client)
|
||||
|
||||
|
||||
def get_enhanced_forecasting_service():
|
||||
"""Dependency injection for EnhancedForecastingService"""
|
||||
@@ -194,16 +205,17 @@ async def generate_multi_day_forecast(
|
||||
route_builder.build_operations_route("batch"),
|
||||
response_model=BatchForecastResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
@require_user_role(['admin', 'owner'])
|
||||
@track_execution_time("enhanced_batch_forecast_duration_seconds", "forecasting-service")
|
||||
async def generate_batch_forecast(
|
||||
request: BatchForecastRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service),
|
||||
rate_limiter = Depends(get_rate_limiter)
|
||||
):
|
||||
"""Generate forecasts for multiple products in batch"""
|
||||
"""Generate forecasts for multiple products in batch (Admin+ only, quota enforced)"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
@@ -217,6 +229,24 @@ async def generate_batch_forecast(
|
||||
if not request.inventory_product_ids:
|
||||
raise ValueError("inventory_product_ids cannot be empty")
|
||||
|
||||
# Get subscription tier and enforce quotas
|
||||
tier = current_user.get('subscription_tier', 'starter')
|
||||
|
||||
# Check daily quota for forecast generation
|
||||
quota_limit = get_forecast_quota(tier)
|
||||
quota_result = await rate_limiter.check_and_increment_quota(
|
||||
tenant_id,
|
||||
"forecast_generation",
|
||||
quota_limit,
|
||||
period=86400 # 24 hours
|
||||
)
|
||||
|
||||
# Validate forecast horizon if specified
|
||||
if request.horizon_days:
|
||||
await rate_limiter.validate_forecast_horizon(
|
||||
tenant_id, request.horizon_days, tier
|
||||
)
|
||||
|
||||
batch_result = await enhanced_forecasting_service.generate_batch_forecast(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
|
||||
@@ -26,7 +26,7 @@ from shared.monitoring.decorators import track_execution_time
|
||||
from shared.monitoring.metrics import get_metrics_collector
|
||||
from app.core.config import settings
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.auth.access_control import require_user_role, enterprise_tier_required
|
||||
|
||||
route_builder = RouteBuilder('forecasting')
|
||||
logger = structlog.get_logger()
|
||||
@@ -43,12 +43,14 @@ def get_enhanced_forecasting_service():
|
||||
route_builder.build_analytics_route("scenario-simulation"),
|
||||
response_model=ScenarioSimulationResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
@require_user_role(['admin', 'owner'])
|
||||
@enterprise_tier_required
|
||||
@track_execution_time("scenario_simulation_duration_seconds", "forecasting-service")
|
||||
async def simulate_scenario(
|
||||
request: ScenarioSimulationRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""
|
||||
@@ -62,7 +64,7 @@ async def simulate_scenario(
|
||||
- Promotions
|
||||
- Supply disruptions
|
||||
|
||||
**PROFESSIONAL/ENTERPRISE ONLY**
|
||||
**ENTERPRISE TIER ONLY - Admin+ role required**
|
||||
"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
@@ -4,6 +4,13 @@ Forecasting Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .forecasts import Forecast, PredictionBatch
|
||||
from .predictions import ModelPerformanceMetric, PredictionCache
|
||||
@@ -14,4 +21,5 @@ __all__ = [
|
||||
"PredictionBatch",
|
||||
"ModelPerformanceMetric",
|
||||
"PredictionCache",
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -14,11 +14,11 @@ Cache Strategy:
|
||||
"""
|
||||
|
||||
import json
|
||||
import redis
|
||||
from datetime import datetime, date, timedelta
|
||||
from typing import Optional, Dict, Any, List
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -26,47 +26,20 @@ logger = structlog.get_logger()
|
||||
class ForecastCacheService:
|
||||
"""Service-level caching for forecast predictions"""
|
||||
|
||||
def __init__(self, redis_url: str):
|
||||
"""
|
||||
Initialize Redis connection for forecast caching
|
||||
def __init__(self):
|
||||
"""Initialize forecast cache service"""
|
||||
pass
|
||||
|
||||
Args:
|
||||
redis_url: Redis connection URL
|
||||
"""
|
||||
self.redis_url = redis_url
|
||||
self._redis_client = None
|
||||
self._connect()
|
||||
async def _get_redis(self):
|
||||
"""Get shared Redis client"""
|
||||
return await get_redis_client()
|
||||
|
||||
def _connect(self):
|
||||
"""Establish Redis connection with retry logic"""
|
||||
try:
|
||||
self._redis_client = redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=True,
|
||||
socket_keepalive=True,
|
||||
socket_keepalive_options={1: 1, 3: 3, 5: 5},
|
||||
retry_on_timeout=True,
|
||||
max_connections=100, # Higher limit for forecast service
|
||||
health_check_interval=30
|
||||
)
|
||||
# Test connection
|
||||
self._redis_client.ping()
|
||||
logger.info("Forecast cache Redis connection established")
|
||||
except Exception as e:
|
||||
logger.error("Failed to connect to forecast cache Redis", error=str(e))
|
||||
self._redis_client = None
|
||||
|
||||
@property
|
||||
def redis(self):
|
||||
"""Get Redis client with connection check"""
|
||||
if self._redis_client is None:
|
||||
self._connect()
|
||||
return self._redis_client
|
||||
|
||||
def is_available(self) -> bool:
|
||||
async def is_available(self) -> bool:
|
||||
"""Check if Redis cache is available"""
|
||||
try:
|
||||
return self.redis is not None and self.redis.ping()
|
||||
client = await self._get_redis()
|
||||
await client.ping()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@@ -138,12 +111,13 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Cached forecast data or None if not found
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return None
|
||||
|
||||
try:
|
||||
key = self._get_forecast_key(tenant_id, product_id, forecast_date)
|
||||
cached_data = self.redis.get(key)
|
||||
client = await self._get_redis()
|
||||
cached_data = await client.get(key)
|
||||
|
||||
if cached_data:
|
||||
forecast_data = json.loads(cached_data)
|
||||
@@ -188,7 +162,7 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
True if cached successfully, False otherwise
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
logger.warning("Redis not available, skipping forecast cache")
|
||||
return False
|
||||
|
||||
@@ -205,7 +179,8 @@ class ForecastCacheService:
|
||||
}
|
||||
|
||||
# Serialize and cache
|
||||
self.redis.setex(
|
||||
client = await self._get_redis()
|
||||
await client.setex(
|
||||
key,
|
||||
ttl,
|
||||
json.dumps(cache_entry, default=str)
|
||||
@@ -241,12 +216,13 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Cached batch forecast data or None
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return None
|
||||
|
||||
try:
|
||||
key = self._get_batch_forecast_key(tenant_id, product_ids, forecast_date)
|
||||
cached_data = self.redis.get(key)
|
||||
client = await self._get_redis()
|
||||
cached_data = await client.get(key)
|
||||
|
||||
if cached_data:
|
||||
forecast_data = json.loads(cached_data)
|
||||
@@ -273,7 +249,7 @@ class ForecastCacheService:
|
||||
forecast_data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""Cache batch forecast result"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -287,7 +263,8 @@ class ForecastCacheService:
|
||||
'ttl_seconds': ttl
|
||||
}
|
||||
|
||||
self.redis.setex(key, ttl, json.dumps(cache_entry, default=str))
|
||||
client = await self._get_redis()
|
||||
await client.setex(key, ttl, json.dumps(cache_entry, default=str))
|
||||
|
||||
logger.info("Batch forecast cached successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
@@ -320,16 +297,17 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Number of cache entries invalidated
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return 0
|
||||
|
||||
try:
|
||||
# Find all keys matching this product
|
||||
pattern = f"forecast:{tenant_id}:{product_id}:*"
|
||||
keys = self.redis.keys(pattern)
|
||||
client = await self._get_redis()
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
if keys:
|
||||
deleted = self.redis.delete(*keys)
|
||||
deleted = await client.delete(*keys)
|
||||
logger.info("Invalidated product forecast cache",
|
||||
tenant_id=str(tenant_id),
|
||||
product_id=str(product_id),
|
||||
@@ -359,7 +337,7 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Number of cache entries invalidated
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return 0
|
||||
|
||||
try:
|
||||
@@ -368,10 +346,11 @@ class ForecastCacheService:
|
||||
else:
|
||||
pattern = f"forecast:{tenant_id}:*"
|
||||
|
||||
keys = self.redis.keys(pattern)
|
||||
client = await self._get_redis()
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
if keys:
|
||||
deleted = self.redis.delete(*keys)
|
||||
deleted = await client.delete(*keys)
|
||||
logger.info("Invalidated tenant forecast cache",
|
||||
tenant_id=str(tenant_id),
|
||||
forecast_date=str(forecast_date) if forecast_date else "all",
|
||||
@@ -391,15 +370,16 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Number of cache entries invalidated
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return 0
|
||||
|
||||
try:
|
||||
pattern = "forecast:*"
|
||||
keys = self.redis.keys(pattern)
|
||||
client = await self._get_redis()
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
if keys:
|
||||
deleted = self.redis.delete(*keys)
|
||||
deleted = await client.delete(*keys)
|
||||
logger.warning("Invalidated ALL forecast cache", keys_deleted=deleted)
|
||||
return deleted
|
||||
|
||||
@@ -413,22 +393,23 @@ class ForecastCacheService:
|
||||
# CACHE STATISTICS & MONITORING
|
||||
# ================================================================
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
async def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get cache statistics for monitoring
|
||||
|
||||
Returns:
|
||||
Dictionary with cache metrics
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return {"available": False}
|
||||
|
||||
try:
|
||||
info = self.redis.info()
|
||||
client = await self._get_redis()
|
||||
info = await client.info()
|
||||
|
||||
# Get forecast-specific stats
|
||||
forecast_keys = self.redis.keys("forecast:*")
|
||||
batch_keys = self.redis.keys("forecast:batch:*")
|
||||
forecast_keys = await client.keys("forecast:*")
|
||||
batch_keys = await client.keys("forecast:batch:*")
|
||||
|
||||
return {
|
||||
"available": True,
|
||||
@@ -471,12 +452,13 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Cache metadata or None
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return None
|
||||
|
||||
try:
|
||||
key = self._get_forecast_key(tenant_id, product_id, forecast_date)
|
||||
ttl = self.redis.ttl(key)
|
||||
client = await self._get_redis()
|
||||
ttl = await client.ttl(key)
|
||||
|
||||
if ttl > 0:
|
||||
return {
|
||||
@@ -498,21 +480,16 @@ class ForecastCacheService:
|
||||
_cache_service = None
|
||||
|
||||
|
||||
def get_forecast_cache_service(redis_url: Optional[str] = None) -> ForecastCacheService:
|
||||
def get_forecast_cache_service() -> ForecastCacheService:
|
||||
"""
|
||||
Get the global forecast cache service instance
|
||||
|
||||
Args:
|
||||
redis_url: Redis connection URL (required for first call)
|
||||
|
||||
Returns:
|
||||
ForecastCacheService instance
|
||||
"""
|
||||
global _cache_service
|
||||
|
||||
if _cache_service is None:
|
||||
if redis_url is None:
|
||||
raise ValueError("redis_url required for first initialization")
|
||||
_cache_service = ForecastCacheService(redis_url)
|
||||
_cache_service = ForecastCacheService()
|
||||
|
||||
return _cache_service
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: cae963fbc2af
|
||||
Revision ID: 301bc59f6dfb
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:42.106460+02:00
|
||||
Create Date: 2025-10-15 12:30:42.311369+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'cae963fbc2af'
|
||||
revision: str = '301bc59f6dfb'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('forecasts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -125,4 +157,18 @@ def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_forecasts_inventory_product_id'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_forecast_date'), table_name='forecasts')
|
||||
op.drop_table('forecasts')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -17,9 +17,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/inventory/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -209,7 +209,7 @@ async def update_compliance_record(
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
|
||||
status_code=status.HTTP_204_NO_CONTENT
|
||||
status_code=status.HTTP_403_FORBIDDEN
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_compliance_record(
|
||||
@@ -218,7 +218,33 @@ async def delete_compliance_record(
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete (soft delete) compliance record"""
|
||||
"""
|
||||
Compliance records CANNOT be deleted for regulatory compliance.
|
||||
Use the archive endpoint to mark records as inactive.
|
||||
"""
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail={
|
||||
"error": "compliance_records_cannot_be_deleted",
|
||||
"message": "Compliance records cannot be deleted for regulatory compliance. Use PUT /food-safety/compliance/{id}/archive to archive records instead.",
|
||||
"reason": "Food safety compliance records must be retained for regulatory audits",
|
||||
"alternative_endpoint": f"/api/v1/tenants/{tenant_id}/inventory/food-safety/compliance/{compliance_id}/archive"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_nested_resource_route("food-safety/compliance", "compliance_id", "archive"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def archive_compliance_record(
|
||||
compliance_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Archive (soft delete) compliance record - marks as inactive but retains for audit"""
|
||||
try:
|
||||
query = """
|
||||
UPDATE food_safety_compliance
|
||||
@@ -228,7 +254,7 @@ async def delete_compliance_record(
|
||||
result = await db.execute(query, {
|
||||
"compliance_id": compliance_id,
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": UUID(current_user["sub"])
|
||||
"user_id": UUID(current_user["user_id"])
|
||||
})
|
||||
|
||||
if result.rowcount == 0:
|
||||
@@ -238,13 +264,38 @@ async def delete_compliance_record(
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
return None
|
||||
|
||||
# Log audit event for archiving compliance record
|
||||
try:
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
audit_logger = create_audit_logger("inventory-service")
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
action="archive",
|
||||
resource_type="compliance_record",
|
||||
resource_id=str(compliance_id),
|
||||
severity=AuditSeverity.HIGH.value,
|
||||
description=f"Archived compliance record (retained for regulatory compliance)",
|
||||
endpoint=f"/food-safety/compliance/{compliance_id}/archive",
|
||||
method="PUT"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return {
|
||||
"message": "Compliance record archived successfully",
|
||||
"compliance_id": str(compliance_id),
|
||||
"archived": True,
|
||||
"note": "Record retained for regulatory compliance audits"
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting compliance record", error=str(e))
|
||||
logger.error("Error archiving compliance record", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete compliance record"
|
||||
detail="Failed to archive compliance record"
|
||||
)
|
||||
|
||||
@@ -22,12 +22,16 @@ from app.schemas.inventory import (
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required, owner_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('inventory')
|
||||
|
||||
router = APIRouter(tags=["ingredients"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("inventory-service")
|
||||
|
||||
# Helper function to extract user ID from user object
|
||||
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
|
||||
"""Extract user ID from current user context"""
|
||||
@@ -264,6 +268,25 @@ async def hard_delete_ingredient(
|
||||
try:
|
||||
service = InventoryService()
|
||||
deletion_summary = await service.hard_delete_ingredient(ingredient_id, tenant_id)
|
||||
|
||||
# Log audit event for hard deletion
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="ingredient",
|
||||
resource_id=str(ingredient_id),
|
||||
resource_data=deletion_summary,
|
||||
description=f"Hard deleted ingredient and all associated data",
|
||||
endpoint=f"/ingredients/{ingredient_id}/hard",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
import structlog
|
||||
logger = structlog.get_logger()
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return deletion_summary
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
|
||||
@@ -4,6 +4,13 @@ Inventory Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .inventory import (
|
||||
Ingredient,
|
||||
@@ -51,4 +58,5 @@ __all__ = [
|
||||
"FoodSafetyStandard",
|
||||
"ComplianceStatus",
|
||||
"FoodSafetyAlertType",
|
||||
"AuditLog",
|
||||
]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""initial_schema_20251009_2038
|
||||
"""initial_schema_20251015_1229
|
||||
|
||||
Revision ID: da978256de4a
|
||||
Revision ID: e7fcea67bf4e
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:00.639427+02:00
|
||||
Create Date: 2025-10-15 12:29:40.991849+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'da978256de4a'
|
||||
revision: str = 'e7fcea67bf4e'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('ingredients',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -453,4 +485,18 @@ def downgrade() -> None:
|
||||
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
|
||||
op.drop_table('ingredients')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/notification/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -22,8 +22,10 @@ from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.metrics import track_endpoint_metrics
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("notification-service")
|
||||
router = APIRouter()
|
||||
route_builder = RouteBuilder("notification")
|
||||
|
||||
@@ -52,12 +54,25 @@ async def send_notification(
|
||||
"""Send a single notification with enhanced validation and features"""
|
||||
|
||||
try:
|
||||
# Check permissions for broadcast notifications
|
||||
if notification_data.get("broadcast", False) and current_user.get("role") not in ["admin", "manager"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only admins and managers can send broadcast notifications"
|
||||
)
|
||||
# Check permissions for broadcast notifications (Admin+ only)
|
||||
if notification_data.get("broadcast", False):
|
||||
user_role = current_user.get("role", "").lower()
|
||||
if user_role not in ["admin", "owner"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only admins and owners can send broadcast notifications"
|
||||
)
|
||||
|
||||
# Log HIGH severity audit event for broadcast notifications
|
||||
try:
|
||||
# Note: db session would need to be passed as dependency for full audit logging
|
||||
logger.info("Broadcast notification initiated",
|
||||
tenant_id=current_user.get("tenant_id"),
|
||||
user_id=current_user["user_id"],
|
||||
notification_type=notification_data.get("type"),
|
||||
severity="HIGH")
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
# Validate required fields
|
||||
if not notification_data.get("message"):
|
||||
|
||||
@@ -24,12 +24,7 @@ from shared.service_base import StandardFastAPIService
|
||||
class NotificationService(StandardFastAPIService):
|
||||
"""Notification Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
await self.verify_migrations()
|
||||
await super().on_startup(app)
|
||||
expected_migration_version = "359991e24ea2"
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
@@ -166,13 +161,19 @@ class NotificationService(StandardFastAPIService):
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for notification service"""
|
||||
# Verify migrations first
|
||||
await self.verify_migrations()
|
||||
|
||||
# Call parent startup (includes database, messaging, etc.)
|
||||
await super().on_startup(app)
|
||||
|
||||
# Initialize services
|
||||
self.email_service = EmailService()
|
||||
self.whatsapp_service = WhatsAppService()
|
||||
|
||||
# Initialize SSE service
|
||||
self.sse_service = SSEService(settings.REDIS_URL)
|
||||
await self.sse_service.initialize()
|
||||
self.sse_service = SSEService()
|
||||
await self.sse_service.initialize(settings.REDIS_URL)
|
||||
self.logger.info("SSE service initialized")
|
||||
|
||||
# Create orchestrator
|
||||
@@ -257,4 +258,4 @@ service.add_router(analytics_router, tags=["notifications-analytics"])
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
|
||||
@@ -4,6 +4,13 @@ Notification Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .notifications import (
|
||||
Notification,
|
||||
@@ -30,4 +37,5 @@ __all__ = [
|
||||
"NotificationLog",
|
||||
"EmailTemplate",
|
||||
"WhatsAppTemplate",
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -5,11 +5,11 @@ Integrated within the notification service for alerts and recommendations
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from redis.asyncio import Redis
|
||||
import json
|
||||
from typing import Dict, Set, Any
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from shared.redis_utils import initialize_redis, get_redis_client, close_redis
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -18,18 +18,21 @@ class SSEService:
|
||||
Server-Sent Events service for real-time notifications
|
||||
Handles both alerts and recommendations through unified SSE streams
|
||||
"""
|
||||
|
||||
def __init__(self, redis_url: str):
|
||||
self.redis_url = redis_url
|
||||
|
||||
def __init__(self):
|
||||
self.redis = None
|
||||
self.redis_url = None
|
||||
self.active_connections: Dict[str, Set[asyncio.Queue]] = {}
|
||||
self.pubsub_tasks: Dict[str, asyncio.Task] = {}
|
||||
|
||||
async def initialize(self):
|
||||
|
||||
async def initialize(self, redis_url: str):
|
||||
"""Initialize Redis connection"""
|
||||
try:
|
||||
self.redis = Redis.from_url(self.redis_url)
|
||||
logger.info("SSE Service initialized with Redis connection")
|
||||
self.redis_url = redis_url
|
||||
# Initialize shared Redis connection for SSE
|
||||
await initialize_redis(redis_url, db=0, max_connections=30)
|
||||
self.redis = await get_redis_client()
|
||||
logger.info("SSE Service initialized with shared Redis connection")
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize SSE service", error=str(e))
|
||||
raise
|
||||
@@ -45,7 +48,7 @@ class SSEService:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
|
||||
# Close all client connections
|
||||
for tenant_id, connections in self.active_connections.items():
|
||||
for queue in connections.copy():
|
||||
@@ -53,13 +56,12 @@ class SSEService:
|
||||
await queue.put({"event": "shutdown", "data": json.dumps({"status": "server_shutdown"})})
|
||||
except:
|
||||
pass
|
||||
|
||||
# Close Redis connection
|
||||
if self.redis:
|
||||
await self.redis.close()
|
||||
|
||||
|
||||
# Close shared Redis connection
|
||||
await close_redis()
|
||||
|
||||
logger.info("SSE Service shutdown completed")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error during SSE shutdown", error=str(e))
|
||||
|
||||
@@ -124,32 +126,33 @@ class SSEService:
|
||||
|
||||
async def _listen_to_tenant_channel(self, tenant_id: str):
|
||||
"""Listen to Redis channel for tenant-specific items"""
|
||||
pubsub = None
|
||||
try:
|
||||
# Create a separate Redis connection for pubsub
|
||||
pubsub_redis = Redis.from_url(self.redis_url)
|
||||
pubsub = pubsub_redis.pubsub()
|
||||
# Use the shared Redis client for pubsub
|
||||
pubsub = self.redis.pubsub()
|
||||
channel = f"alerts:{tenant_id}"
|
||||
await pubsub.subscribe(channel)
|
||||
|
||||
logger.info("Started listening to tenant channel",
|
||||
tenant_id=tenant_id,
|
||||
|
||||
logger.info("Started listening to tenant channel",
|
||||
tenant_id=tenant_id,
|
||||
channel=channel)
|
||||
|
||||
|
||||
async for message in pubsub.listen():
|
||||
if message["type"] == "message":
|
||||
# Broadcast to all connected clients for this tenant
|
||||
await self.broadcast_to_tenant(tenant_id, message["data"])
|
||||
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Stopped listening to tenant channel", tenant_id=tenant_id)
|
||||
except Exception as e:
|
||||
logger.error("Error in pubsub listener", tenant_id=tenant_id, error=str(e))
|
||||
finally:
|
||||
try:
|
||||
await pubsub.unsubscribe(channel)
|
||||
await pubsub_redis.close()
|
||||
except:
|
||||
pass
|
||||
if pubsub:
|
||||
try:
|
||||
await pubsub.unsubscribe(channel)
|
||||
await pubsub.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
async def broadcast_to_tenant(self, tenant_id: str, message: str):
|
||||
"""Broadcast message to all connected clients of a tenant"""
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: c27e2b79f787
|
||||
Revision ID: 359991e24ea2
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:25.955986+02:00
|
||||
Create Date: 2025-10-15 12:30:17.568404+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'c27e2b79f787'
|
||||
revision: str = '359991e24ea2'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('email_templates',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True),
|
||||
@@ -181,4 +213,18 @@ def downgrade() -> None:
|
||||
op.drop_table('notification_logs')
|
||||
op.drop_index(op.f('ix_email_templates_tenant_id'), table_name='email_templates')
|
||||
op.drop_table('email_templates')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/orders/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -13,6 +13,7 @@ import structlog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.orders_service import OrdersService
|
||||
from app.schemas.order_schemas import (
|
||||
@@ -22,6 +23,7 @@ from app.schemas.order_schemas import (
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("orders-service")
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('orders')
|
||||
@@ -236,7 +238,10 @@ async def delete_customer(
|
||||
orders_service: OrdersService = Depends(get_orders_service),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Delete a customer (soft delete)"""
|
||||
"""
|
||||
Delete a customer (Admin+ only, GDPR-compliant soft delete)
|
||||
Removes PII while maintaining referential integrity
|
||||
"""
|
||||
try:
|
||||
customer = await orders_service.customer_repo.get(db, customer_id, tenant_id)
|
||||
if not customer:
|
||||
@@ -245,10 +250,39 @@ async def delete_customer(
|
||||
detail="Customer not found"
|
||||
)
|
||||
|
||||
# Capture customer data before deletion (for audit trail)
|
||||
# Note: This is anonymized after retention period in compliance with GDPR
|
||||
customer_data = {
|
||||
"customer_code": customer.customer_code,
|
||||
"customer_name": customer.customer_name,
|
||||
"email": customer.email,
|
||||
"phone": customer.phone,
|
||||
"business_type": customer.business_type if hasattr(customer, 'business_type') else None
|
||||
}
|
||||
|
||||
await orders_service.customer_repo.delete(db, customer_id, tenant_id)
|
||||
|
||||
logger.info("Customer deleted successfully",
|
||||
customer_id=str(customer_id))
|
||||
# Log HIGH severity audit event for customer deletion (GDPR compliance)
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="customer",
|
||||
resource_id=str(customer_id),
|
||||
resource_data=customer_data,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} deleted customer {customer_data['customer_code']} (GDPR-compliant soft delete)",
|
||||
endpoint=f"/customers/{customer_id}",
|
||||
method="DELETE",
|
||||
severity=AuditSeverity.HIGH.value
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Customer deleted successfully (GDPR-compliant)",
|
||||
customer_id=str(customer_id),
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
@@ -14,6 +14,7 @@ import structlog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.orders_service import OrdersService
|
||||
from app.schemas.order_schemas import (
|
||||
@@ -23,6 +24,7 @@ from app.schemas.order_schemas import (
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("orders-service")
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('orders')
|
||||
@@ -238,7 +240,7 @@ async def delete_order(
|
||||
orders_service: OrdersService = Depends(get_orders_service),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Delete an order (soft delete)"""
|
||||
"""Delete an order (Admin+ only, soft delete)"""
|
||||
try:
|
||||
order = await orders_service.order_repo.get(db, order_id, tenant_id)
|
||||
if not order:
|
||||
@@ -247,10 +249,37 @@ async def delete_order(
|
||||
detail="Order not found"
|
||||
)
|
||||
|
||||
# Capture order data before deletion
|
||||
order_data = {
|
||||
"order_number": order.order_number,
|
||||
"customer_id": str(order.customer_id) if order.customer_id else None,
|
||||
"order_status": order.order_status,
|
||||
"total_amount": float(order.total_amount) if order.total_amount else 0.0,
|
||||
"order_date": order.order_date.isoformat() if order.order_date else None
|
||||
}
|
||||
|
||||
await orders_service.order_repo.delete(db, order_id, tenant_id)
|
||||
|
||||
# Log audit event for order deletion
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="order",
|
||||
resource_id=str(order_id),
|
||||
resource_data=order_data,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} deleted order {order_data['order_number']}",
|
||||
endpoint=f"/orders/{order_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Order deleted successfully",
|
||||
order_id=str(order_id))
|
||||
order_id=str(order_id),
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
@@ -4,6 +4,13 @@ Orders Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .customer import Customer, CustomerContact
|
||||
from .order import CustomerOrder, OrderItem, OrderStatusHistory
|
||||
@@ -60,4 +67,5 @@ __all__ = [
|
||||
"PriorityLevel",
|
||||
"RequirementStatus",
|
||||
"RiskLevel",
|
||||
"AuditLog",
|
||||
]
|
||||
|
||||
@@ -9,9 +9,9 @@ import json
|
||||
import uuid
|
||||
from datetime import datetime, date, timedelta
|
||||
from typing import Optional, Dict, Any, List
|
||||
import redis
|
||||
import structlog
|
||||
from pydantic import BaseModel
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models.procurement import ProcurementPlan
|
||||
@@ -22,31 +22,17 @@ logger = structlog.get_logger()
|
||||
|
||||
class CacheService:
|
||||
"""Service for managing Redis cache operations"""
|
||||
|
||||
def __init__(self, redis_url: Optional[str] = None):
|
||||
"""Initialize Redis connection"""
|
||||
self.redis_url = redis_url or settings.REDIS_URL
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize cache service"""
|
||||
self._redis_client = None
|
||||
self._connect()
|
||||
|
||||
def _connect(self):
|
||||
"""Connect to Redis"""
|
||||
try:
|
||||
self._redis_client = redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=True,
|
||||
socket_keepalive=True,
|
||||
socket_keepalive_options={1: 1, 3: 3, 5: 5}, # Use integer keys
|
||||
retry_on_timeout=True,
|
||||
max_connections=50
|
||||
)
|
||||
# Test connection
|
||||
self._redis_client.ping()
|
||||
logger.info("Redis connection established")
|
||||
except Exception as e:
|
||||
logger.error("Failed to connect to Redis", error=str(e))
|
||||
self._redis_client = None
|
||||
|
||||
|
||||
async def _get_redis(self):
|
||||
"""Get shared Redis client"""
|
||||
if self._redis_client is None:
|
||||
self._redis_client = await get_redis_client()
|
||||
return self._redis_client
|
||||
|
||||
@property
|
||||
def redis(self):
|
||||
"""Get Redis client with connection check"""
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""initial_schema_20251009_2038
|
||||
"""initial_schema_20251015_1229
|
||||
|
||||
Revision ID: 2f48673b672c
|
||||
Revision ID: 7f882c2ca25c
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:38:51.897501+02:00
|
||||
Create Date: 2025-10-15 12:29:27.201743+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '2f48673b672c'
|
||||
revision: str = '7f882c2ca25c'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('customers',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -352,4 +384,18 @@ def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_customers_tenant_id'), table_name='customers')
|
||||
op.drop_index(op.f('ix_customers_customer_code'), table_name='customers')
|
||||
op.drop_table('customers')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/pos/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -12,9 +12,11 @@ from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("pos-service")
|
||||
route_builder = RouteBuilder('pos')
|
||||
|
||||
|
||||
@@ -110,6 +112,29 @@ async def update_pos_configuration(
|
||||
):
|
||||
"""Update a POS configuration (Admin/Owner only)"""
|
||||
try:
|
||||
# Log HIGH severity audit event for configuration changes
|
||||
try:
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
action=AuditAction.UPDATE.value,
|
||||
resource_type="pos_configuration",
|
||||
resource_id=str(config_id),
|
||||
severity=AuditSeverity.HIGH.value,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} updated POS configuration",
|
||||
changes={"configuration_updates": configuration_data},
|
||||
endpoint=f"/configurations/{config_id}",
|
||||
method="PUT"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("POS configuration updated",
|
||||
config_id=str(config_id),
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Configuration updated successfully", "id": str(config_id)}
|
||||
except Exception as e:
|
||||
logger.error("Failed to update POS configuration", error=str(e),
|
||||
@@ -130,6 +155,27 @@ async def delete_pos_configuration(
|
||||
):
|
||||
"""Delete a POS configuration (Owner only)"""
|
||||
try:
|
||||
# Log CRITICAL severity audit event for configuration deletion
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="pos_configuration",
|
||||
resource_id=str(config_id),
|
||||
severity=AuditSeverity.CRITICAL.value,
|
||||
description=f"Owner {current_user.get('email', 'unknown')} deleted POS configuration",
|
||||
endpoint=f"/configurations/{config_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("POS configuration deleted",
|
||||
config_id=str(config_id),
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Configuration deleted successfully"}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete POS configuration", error=str(e),
|
||||
|
||||
@@ -2,6 +2,13 @@
|
||||
Database models for POS Integration Service
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
from .pos_config import POSConfiguration
|
||||
from .pos_transaction import POSTransaction, POSTransactionItem
|
||||
from .pos_webhook import POSWebhookLog
|
||||
@@ -12,5 +19,6 @@ __all__ = [
|
||||
"POSTransaction",
|
||||
"POSTransactionItem",
|
||||
"POSWebhookLog",
|
||||
"POSSyncLog"
|
||||
"POSSyncLog",
|
||||
"AuditLog"
|
||||
]
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2038
|
||||
"""initial_schema_20251015_1228
|
||||
|
||||
Revision ID: 65eda9df893b
|
||||
Revision ID: e9976ec9fe9e
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:38:17.435929+02:00
|
||||
Create Date: 2025-10-15 12:28:31.849997+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '65eda9df893b'
|
||||
revision: str = 'e9976ec9fe9e'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('pos_configurations',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -389,4 +421,18 @@ def downgrade() -> None:
|
||||
op.drop_index('idx_pos_config_connected', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_active', table_name='pos_configurations')
|
||||
op.drop_table('pos_configurations')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/production/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -10,7 +10,9 @@ from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.production_service import ProductionService
|
||||
from app.schemas.production import (
|
||||
@@ -27,6 +29,9 @@ logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-batches"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("production-service")
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
@@ -229,16 +234,33 @@ async def update_production_batch(
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("batches", "batch_id")
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_production_batch(
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Cancel/delete draft batch (soft delete preferred)"""
|
||||
"""Cancel/delete draft batch (Admin+ only, soft delete preferred)"""
|
||||
try:
|
||||
await production_service.delete_production_batch(tenant_id, batch_id)
|
||||
|
||||
# Log audit event for batch deletion
|
||||
try:
|
||||
db = next(get_db())
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="production_batch",
|
||||
resource_id=str(batch_id),
|
||||
description=f"Deleted production batch",
|
||||
endpoint=f"/batches/{batch_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Deleted production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
|
||||
@@ -10,7 +10,9 @@ from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.production_service import ProductionService
|
||||
from app.schemas.production import (
|
||||
@@ -24,6 +26,9 @@ logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-schedules"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("production-service")
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
@@ -125,13 +130,14 @@ async def get_production_schedule_details(
|
||||
route_builder.build_base_route("schedules"),
|
||||
response_model=ProductionScheduleResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def create_production_schedule(
|
||||
schedule_data: ProductionScheduleCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Generate or manually create a daily/shift schedule"""
|
||||
"""Generate or manually create a daily/shift schedule (Admin+ only)"""
|
||||
try:
|
||||
schedule = await production_service.create_production_schedule(tenant_id, schedule_data)
|
||||
|
||||
@@ -153,6 +159,7 @@ async def create_production_schedule(
|
||||
route_builder.build_resource_detail_route("schedules", "schedule_id"),
|
||||
response_model=ProductionScheduleResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def update_production_schedule(
|
||||
schedule_update: ProductionScheduleUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
@@ -160,7 +167,7 @@ async def update_production_schedule(
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Edit schedule before finalizing"""
|
||||
"""Edit schedule before finalizing (Admin+ only)"""
|
||||
try:
|
||||
schedule = await production_service.update_production_schedule(tenant_id, schedule_id, schedule_update)
|
||||
|
||||
|
||||
@@ -5,6 +5,13 @@
|
||||
Production service models
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
from .production import (
|
||||
ProductionBatch,
|
||||
ProductionSchedule,
|
||||
@@ -31,4 +38,5 @@ __all__ = [
|
||||
"EquipmentStatus",
|
||||
"ProcessStage",
|
||||
"EquipmentType",
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1231
|
||||
|
||||
Revision ID: ff7cc8350951
|
||||
Revision ID: 42a9c1fd8fec
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:57.570220+02:00
|
||||
Create Date: 2025-10-15 12:31:07.740405+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ff7cc8350951'
|
||||
revision: str = '42a9c1fd8fec'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('equipment',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -255,4 +287,18 @@ def downgrade() -> None:
|
||||
op.drop_table('production_batches')
|
||||
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
|
||||
op.drop_table('equipment')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/recipes/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -17,7 +17,7 @@ from ..schemas.recipes import (
|
||||
RecipeStatisticsResponse,
|
||||
)
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.auth.access_control import require_user_role, analytics_tier_required
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
@@ -114,13 +114,18 @@ async def activate_recipe(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "feasibility"]),
|
||||
response_model=RecipeFeasibilityResponse
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def check_recipe_feasibility(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
batch_multiplier: float = Query(1.0, gt=0),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Check if recipe can be produced with current inventory"""
|
||||
"""
|
||||
Check if recipe can be produced with current inventory (Professional+ tier)
|
||||
Supports batch scaling for production planning
|
||||
"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
@@ -187,3 +192,30 @@ async def get_recipe_categories(
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe categories: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["count"])
|
||||
)
|
||||
async def get_recipe_count(
|
||||
tenant_id: UUID,
|
||||
x_internal_request: str = Header(None),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get total count of recipes for a tenant
|
||||
Internal endpoint for subscription usage tracking
|
||||
"""
|
||||
if x_internal_request != "true":
|
||||
raise HTTPException(status_code=403, detail="Internal endpoint only")
|
||||
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
recipes = await recipe_service.search_recipes(tenant_id, limit=10000)
|
||||
count = len(recipes)
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe count: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
@@ -18,9 +18,11 @@ from ..schemas.recipes import (
|
||||
)
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
logger = logging.getLogger(__name__)
|
||||
audit_logger = create_audit_logger("recipes-service")
|
||||
router = APIRouter(tags=["recipes"])
|
||||
|
||||
|
||||
@@ -193,9 +195,10 @@ async def update_recipe(
|
||||
async def delete_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete a recipe"""
|
||||
"""Delete a recipe (Admin+ only)"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
@@ -206,10 +209,43 @@ async def delete_recipe(
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
# Capture recipe data before deletion
|
||||
recipe_data = {
|
||||
"recipe_name": existing_recipe.get("name"),
|
||||
"category": existing_recipe.get("category"),
|
||||
"difficulty_level": existing_recipe.get("difficulty_level"),
|
||||
"ingredient_count": len(existing_recipe.get("ingredients", []))
|
||||
}
|
||||
|
||||
success = await recipe_service.delete_recipe(recipe_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
# Log audit event for recipe deletion
|
||||
try:
|
||||
# Get sync db for audit logging
|
||||
from ..core.database import SessionLocal
|
||||
sync_db = SessionLocal()
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=sync_db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=str(user_id),
|
||||
resource_type="recipe",
|
||||
resource_id=str(recipe_id),
|
||||
resource_data=recipe_data,
|
||||
description=f"Admin deleted recipe {recipe_data['recipe_name']}",
|
||||
endpoint=f"/recipes/{recipe_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
sync_db.commit()
|
||||
finally:
|
||||
sync_db.close()
|
||||
except Exception as audit_error:
|
||||
logger.warning(f"Failed to log audit event: {audit_error}")
|
||||
|
||||
logger.info(f"Deleted recipe {recipe_id} by user {user_id}")
|
||||
|
||||
return {"message": "Recipe deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
# services/recipes/app/models/__init__.py
|
||||
|
||||
from .recipes import (
|
||||
@@ -21,5 +28,6 @@ __all__ = [
|
||||
"RecipeStatus",
|
||||
"ProductionStatus",
|
||||
"MeasurementUnit",
|
||||
"ProductionPriority"
|
||||
"ProductionPriority",
|
||||
"AuditLog"
|
||||
]
|
||||
@@ -1,8 +1,8 @@
|
||||
"""initial_schema_20251009_2038
|
||||
"""initial_schema_20251015_1228
|
||||
|
||||
Revision ID: a89b48099599
|
||||
Revision ID: 3c4d0f57a312
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:38:32.626427+02:00
|
||||
Create Date: 2025-10-15 12:28:57.066635+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'a89b48099599'
|
||||
revision: str = '3c4d0f57a312'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('production_schedules',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -285,4 +317,18 @@ def downgrade() -> None:
|
||||
op.drop_index('idx_production_schedules_published', table_name='production_schedules')
|
||||
op.drop_index('idx_production_schedules_completed', table_name='production_schedules')
|
||||
op.drop_table('production_schedules')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -17,9 +17,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/sales/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -11,6 +11,7 @@ import structlog
|
||||
|
||||
from app.services.sales_service import SalesService
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-analytics"])
|
||||
@@ -25,13 +26,14 @@ def get_sales_service():
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("summary")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_sales_analytics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales analytics summary for a tenant"""
|
||||
"""Get sales analytics summary for a tenant (Professional+ tier required)"""
|
||||
try:
|
||||
analytics = await sales_service.get_sales_analytics(tenant_id, start_date, end_date)
|
||||
|
||||
|
||||
@@ -19,11 +19,15 @@ from app.services.sales_service import SalesService
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-records"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("sales-service")
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
@@ -169,24 +173,53 @@ async def update_sales_record(
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("sales", "record_id")
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Delete a sales record"""
|
||||
"""Delete a sales record (Admin+ only)"""
|
||||
try:
|
||||
# Get record details before deletion for audit log
|
||||
record = await sales_service.get_sales_record(record_id, tenant_id)
|
||||
|
||||
success = await sales_service.delete_sales_record(record_id, tenant_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Sales record not found")
|
||||
|
||||
# Log audit event for sales record deletion
|
||||
try:
|
||||
from app.core.database import get_db
|
||||
db = next(get_db())
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="sales_record",
|
||||
resource_id=str(record_id),
|
||||
resource_data={
|
||||
"product_name": record.product_name if record else None,
|
||||
"quantity_sold": record.quantity_sold if record else None,
|
||||
"sale_date": record.date.isoformat() if record and record.date else None
|
||||
} if record else None,
|
||||
description=f"Deleted sales record for {record.product_name if record else 'unknown product'}",
|
||||
endpoint=f"/sales/{record_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return {"message": "Sales record deleted successfully"}
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Error deleting sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
# services/sales/app/models/__init__.py
|
||||
|
||||
from .sales import SalesData, SalesImportJob
|
||||
|
||||
__all__ = ["SalesData", "SalesImportJob"]
|
||||
__all__ = ["SalesData", "SalesImportJob", "AuditLog"]
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2038
|
||||
"""initial_schema_20251015_1228
|
||||
|
||||
Revision ID: ccb1465b527e
|
||||
Revision ID: 1949ed96e20e
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:38:25.308184+02:00
|
||||
Create Date: 2025-10-15 12:28:44.373103+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ccb1465b527e'
|
||||
revision: str = '1949ed96e20e'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('sales_data',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -100,4 +132,18 @@ def downgrade() -> None:
|
||||
op.drop_index('idx_sales_date_range', table_name='sales_data')
|
||||
op.drop_index('idx_sales_channel_date', table_name='sales_data')
|
||||
op.drop_table('sales_data')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -36,4 +36,5 @@ aio-pika==9.3.1
|
||||
# Note: pytest and testing dependencies are in tests/requirements.txt
|
||||
|
||||
# Development
|
||||
python-multipart==0.0.6
|
||||
python-multipart==0.0.6
|
||||
redis==5.0.1
|
||||
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/suppliers/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -19,6 +19,7 @@ from app.models.suppliers import PurchaseOrderStatus
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
@@ -26,6 +27,7 @@ route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["purchase-orders"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service")
|
||||
|
||||
|
||||
@router.post(route_builder.build_base_route("purchase-orders"), response_model=PurchaseOrderResponse)
|
||||
@@ -158,26 +160,26 @@ async def update_purchase_order(
|
||||
):
|
||||
"""Update purchase order information"""
|
||||
# require_permissions(current_user, ["purchase_orders:update"])
|
||||
|
||||
|
||||
try:
|
||||
service = PurchaseOrderService(db)
|
||||
|
||||
|
||||
# Check order exists and belongs to tenant
|
||||
existing_order = await service.get_purchase_order(po_id)
|
||||
if not existing_order:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
if existing_order.tenant_id != current_user.tenant_id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
|
||||
purchase_order = await service.update_purchase_order(
|
||||
po_id=po_id,
|
||||
po_data=po_data,
|
||||
updated_by=current_user.user_id
|
||||
)
|
||||
|
||||
|
||||
if not purchase_order:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
|
||||
return PurchaseOrderResponse.from_orm(purchase_order)
|
||||
except HTTPException:
|
||||
raise
|
||||
@@ -188,3 +190,65 @@ async def update_purchase_order(
|
||||
raise HTTPException(status_code=500, detail="Failed to update purchase order")
|
||||
|
||||
|
||||
@router.delete(route_builder.build_resource_detail_route("purchase-orders", "po_id"))
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_purchase_order(
|
||||
po_id: UUID = Path(..., description="Purchase order ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Delete purchase order (soft delete, Admin+ only)"""
|
||||
try:
|
||||
service = PurchaseOrderService(db)
|
||||
|
||||
# Check order exists and belongs to tenant
|
||||
existing_order = await service.get_purchase_order(po_id)
|
||||
if not existing_order:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
if existing_order.tenant_id != current_user.tenant_id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
# Capture PO data before deletion
|
||||
po_data = {
|
||||
"po_number": existing_order.order_number,
|
||||
"supplier_id": str(existing_order.supplier_id),
|
||||
"status": existing_order.status.value if existing_order.status else None,
|
||||
"total_amount": float(existing_order.total_amount) if existing_order.total_amount else 0.0,
|
||||
"expected_delivery_date": existing_order.expected_delivery_date.isoformat() if existing_order.expected_delivery_date else None
|
||||
}
|
||||
|
||||
# Delete purchase order (likely soft delete in service)
|
||||
success = await service.delete_purchase_order(po_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
# Log audit event for purchase order deletion
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="purchase_order",
|
||||
resource_id=str(po_id),
|
||||
resource_data=po_data,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} deleted purchase order {po_data['po_number']}",
|
||||
endpoint=f"/purchase-orders/{po_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Deleted purchase order",
|
||||
po_id=str(po_id),
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Purchase order deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting purchase order", po_id=str(po_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to delete purchase order")
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ Supplier Business Operations API endpoints (BUSINESS)
|
||||
Handles approvals, status updates, active/top suppliers, and delivery/PO operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, Header
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
@@ -25,6 +25,7 @@ from app.models.suppliers import SupplierType
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
@@ -32,6 +33,7 @@ route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["supplier-operations"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service")
|
||||
|
||||
|
||||
# ===== Supplier Operations =====
|
||||
@@ -441,7 +443,7 @@ async def update_purchase_order_status(
|
||||
|
||||
|
||||
@router.post(route_builder.build_nested_resource_route("purchase-orders", "po_id", "approve"), response_model=PurchaseOrderResponse)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def approve_purchase_order(
|
||||
approval_data: PurchaseOrderApproval,
|
||||
po_id: UUID = Path(..., description="Purchase order ID"),
|
||||
@@ -449,7 +451,7 @@ async def approve_purchase_order(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Approve or reject a purchase order"""
|
||||
"""Approve or reject a purchase order (Admin+ only)"""
|
||||
try:
|
||||
service = PurchaseOrderService(db)
|
||||
|
||||
@@ -460,12 +462,22 @@ async def approve_purchase_order(
|
||||
if existing_order.tenant_id != current_user.tenant_id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
# Capture PO details for audit
|
||||
po_details = {
|
||||
"po_number": existing_order.order_number,
|
||||
"supplier_id": str(existing_order.supplier_id),
|
||||
"total_amount": float(existing_order.total_amount) if existing_order.total_amount else 0.0,
|
||||
"expected_delivery_date": existing_order.expected_delivery_date.isoformat() if existing_order.expected_delivery_date else None
|
||||
}
|
||||
|
||||
if approval_data.action == "approve":
|
||||
purchase_order = await service.approve_purchase_order(
|
||||
po_id=po_id,
|
||||
approved_by=current_user.user_id,
|
||||
approval_notes=approval_data.notes
|
||||
)
|
||||
action = "approve"
|
||||
description = f"Admin {current_user.get('email', 'unknown')} approved purchase order {po_details['po_number']}"
|
||||
elif approval_data.action == "reject":
|
||||
if not approval_data.notes:
|
||||
raise HTTPException(status_code=400, detail="Rejection reason is required")
|
||||
@@ -474,6 +486,8 @@ async def approve_purchase_order(
|
||||
rejection_reason=approval_data.notes,
|
||||
rejected_by=current_user.user_id
|
||||
)
|
||||
action = "reject"
|
||||
description = f"Admin {current_user.get('email', 'unknown')} rejected purchase order {po_details['po_number']}"
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="Invalid action")
|
||||
|
||||
@@ -483,6 +497,34 @@ async def approve_purchase_order(
|
||||
detail="Purchase order is not in pending approval status"
|
||||
)
|
||||
|
||||
# Log HIGH severity audit event for purchase order approval/rejection
|
||||
try:
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
action=action,
|
||||
resource_type="purchase_order",
|
||||
resource_id=str(po_id),
|
||||
severity=AuditSeverity.HIGH.value,
|
||||
description=description,
|
||||
changes={
|
||||
"action": approval_data.action,
|
||||
"notes": approval_data.notes,
|
||||
"po_details": po_details
|
||||
},
|
||||
endpoint=f"/purchase-orders/{po_id}/approve",
|
||||
method="POST"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Purchase order approval processed",
|
||||
po_id=str(po_id),
|
||||
action=approval_data.action,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return PurchaseOrderResponse.from_orm(purchase_order)
|
||||
except HTTPException:
|
||||
raise
|
||||
@@ -672,3 +714,29 @@ async def get_top_purchased_inventory_products(
|
||||
except Exception as e:
|
||||
logger.error("Error getting top purchased inventory products", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve top purchased inventory products")
|
||||
|
||||
|
||||
@router.get(route_builder.build_operations_route("count"))
|
||||
async def get_supplier_count(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
x_internal_request: str = Header(None),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get total count of suppliers for a tenant
|
||||
Internal endpoint for subscription usage tracking
|
||||
"""
|
||||
if x_internal_request != "true":
|
||||
raise HTTPException(status_code=403, detail="Internal endpoint only")
|
||||
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
suppliers = await service.get_suppliers(tenant_id=current_user.tenant_id)
|
||||
count = len(suppliers)
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier count", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve supplier count")
|
||||
|
||||
@@ -18,6 +18,7 @@ from app.schemas.suppliers import (
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
@@ -25,6 +26,7 @@ route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["suppliers"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service")
|
||||
|
||||
@router.post(route_builder.build_base_route("suppliers"), response_model=SupplierResponse)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
@@ -142,9 +144,11 @@ async def update_supplier(
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_supplier(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete supplier (soft delete)"""
|
||||
"""Delete supplier (soft delete, Admin+ only)"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
@@ -153,10 +157,46 @@ async def delete_supplier(
|
||||
if not existing_supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
# Capture supplier data before deletion
|
||||
supplier_data = {
|
||||
"supplier_name": existing_supplier.name,
|
||||
"supplier_type": existing_supplier.supplier_type,
|
||||
"contact_person": existing_supplier.contact_person,
|
||||
"email": existing_supplier.email
|
||||
}
|
||||
|
||||
success = await service.delete_supplier(supplier_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
# Log audit event for supplier deletion
|
||||
try:
|
||||
# Get sync db session for audit logging
|
||||
from app.core.database import SessionLocal
|
||||
sync_db = SessionLocal()
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=sync_db,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="supplier",
|
||||
resource_id=str(supplier_id),
|
||||
resource_data=supplier_data,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} deleted supplier",
|
||||
endpoint=f"/suppliers/{supplier_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
sync_db.commit()
|
||||
finally:
|
||||
sync_db.close()
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Deleted supplier",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Supplier deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
@@ -3,6 +3,13 @@
|
||||
Models package for the Supplier service
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
from .suppliers import (
|
||||
Supplier, SupplierPriceList, PurchaseOrder, PurchaseOrderItem,
|
||||
Delivery, DeliveryItem, SupplierQualityReview, SupplierInvoice,
|
||||
@@ -49,5 +56,6 @@ __all__ = [
|
||||
'AlertType',
|
||||
'AlertStatus',
|
||||
'PerformanceMetricType',
|
||||
'PerformancePeriod'
|
||||
'PerformancePeriod',
|
||||
"AuditLog"
|
||||
]
|
||||
@@ -464,4 +464,33 @@ class PurchaseOrderService:
|
||||
"""Get most purchased inventory products"""
|
||||
return self.item_repository.get_top_purchased_inventory_products(
|
||||
tenant_id, days_back, limit
|
||||
)
|
||||
)
|
||||
|
||||
async def delete_purchase_order(self, po_id: UUID) -> bool:
|
||||
"""
|
||||
Delete (soft delete) a purchase order
|
||||
Only allows deletion of draft orders
|
||||
"""
|
||||
logger.info("Deleting purchase order", po_id=str(po_id))
|
||||
|
||||
po = self.repository.get_by_id(po_id)
|
||||
if not po:
|
||||
return False
|
||||
|
||||
# Only allow deletion of draft orders
|
||||
if po.status not in [PurchaseOrderStatus.DRAFT, PurchaseOrderStatus.CANCELLED]:
|
||||
raise ValueError(
|
||||
f"Cannot delete purchase order with status {po.status.value}. "
|
||||
"Only draft and cancelled orders can be deleted."
|
||||
)
|
||||
|
||||
# Perform soft delete
|
||||
try:
|
||||
self.repository.delete(po_id)
|
||||
self.db.commit()
|
||||
logger.info("Purchase order deleted successfully", po_id=str(po_id))
|
||||
return True
|
||||
except Exception as e:
|
||||
self.db.rollback()
|
||||
logger.error("Failed to delete purchase order", po_id=str(po_id), error=str(e))
|
||||
raise
|
||||
@@ -1,8 +1,8 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1229
|
||||
|
||||
Revision ID: 52c9e6461ed9
|
||||
Revision ID: 93d6ea3dc888
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:09.709448+02:00
|
||||
Create Date: 2025-10-15 12:29:52.767171+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '52c9e6461ed9'
|
||||
revision: str = '93d6ea3dc888'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -62,6 +62,38 @@ def upgrade() -> None:
|
||||
op.create_index('ix_alert_rules_tenant_active', 'alert_rules', ['tenant_id', 'is_active'], unique=False)
|
||||
op.create_index(op.f('ix_alert_rules_tenant_id'), 'alert_rules', ['tenant_id'], unique=False)
|
||||
op.create_index('ix_alert_rules_type_severity', 'alert_rules', ['alert_type', 'severity'], unique=False)
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('supplier_benchmarks',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
@@ -655,6 +687,20 @@ def downgrade() -> None:
|
||||
op.drop_index('ix_benchmarks_category', table_name='supplier_benchmarks')
|
||||
op.drop_index('ix_benchmarks_active', table_name='supplier_benchmarks')
|
||||
op.drop_table('supplier_benchmarks')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
op.drop_index('ix_alert_rules_type_severity', table_name='alert_rules')
|
||||
op.drop_index(op.f('ix_alert_rules_tenant_id'), table_name='alert_rules')
|
||||
op.drop_index('ix_alert_rules_tenant_active', table_name='alert_rules')
|
||||
@@ -39,3 +39,4 @@ email-validator==2.1.0
|
||||
|
||||
# Development
|
||||
python-multipart==0.0.6
|
||||
redis==5.0.1
|
||||
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/tenant/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
286
services/tenant/app/api/plans.py
Normal file
286
services/tenant/app/api/plans.py
Normal file
@@ -0,0 +1,286 @@
|
||||
"""
|
||||
Subscription Plans API
|
||||
Public endpoint for fetching available subscription plans
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
|
||||
from shared.subscription.plans import (
|
||||
SubscriptionTier,
|
||||
SubscriptionPlanMetadata,
|
||||
PlanPricing,
|
||||
QuotaLimits,
|
||||
PlanFeatures
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/plans", tags=["subscription-plans"])
|
||||
|
||||
|
||||
@router.get("", response_model=Dict[str, Any])
|
||||
async def get_available_plans():
|
||||
"""
|
||||
Get all available subscription plans with complete metadata
|
||||
|
||||
**Public endpoint** - No authentication required
|
||||
|
||||
Returns:
|
||||
Dictionary containing plan metadata for all tiers
|
||||
|
||||
Example Response:
|
||||
```json
|
||||
{
|
||||
"plans": {
|
||||
"starter": {
|
||||
"name": "Starter",
|
||||
"description": "Perfect for small bakeries getting started",
|
||||
"monthly_price": 49.00,
|
||||
"yearly_price": 490.00,
|
||||
"features": [...],
|
||||
"limits": {...}
|
||||
},
|
||||
...
|
||||
}
|
||||
}
|
||||
```
|
||||
"""
|
||||
try:
|
||||
plans_data = {}
|
||||
|
||||
for tier in SubscriptionTier:
|
||||
metadata = SubscriptionPlanMetadata.PLANS[tier]
|
||||
|
||||
# Convert Decimal to float for JSON serialization
|
||||
plans_data[tier.value] = {
|
||||
"name": metadata["name"],
|
||||
"description": metadata["description"],
|
||||
"tagline": metadata["tagline"],
|
||||
"popular": metadata["popular"],
|
||||
"monthly_price": float(metadata["monthly_price"]),
|
||||
"yearly_price": float(metadata["yearly_price"]),
|
||||
"trial_days": metadata["trial_days"],
|
||||
"features": metadata["features"],
|
||||
"limits": {
|
||||
"users": metadata["limits"]["users"],
|
||||
"locations": metadata["limits"]["locations"],
|
||||
"products": metadata["limits"]["products"],
|
||||
"forecasts_per_day": metadata["limits"]["forecasts_per_day"],
|
||||
},
|
||||
"support": metadata["support"],
|
||||
"recommended_for": metadata["recommended_for"],
|
||||
"contact_sales": metadata.get("contact_sales", False),
|
||||
}
|
||||
|
||||
logger.info("subscription_plans_fetched", tier_count=len(plans_data))
|
||||
|
||||
return {"plans": plans_data}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("failed_to_fetch_plans", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to fetch subscription plans"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{tier}", response_model=Dict[str, Any])
|
||||
async def get_plan_by_tier(tier: str):
|
||||
"""
|
||||
Get metadata for a specific subscription tier
|
||||
|
||||
**Public endpoint** - No authentication required
|
||||
|
||||
Args:
|
||||
tier: Subscription tier (starter, professional, enterprise)
|
||||
|
||||
Returns:
|
||||
Plan metadata for the specified tier
|
||||
|
||||
Raises:
|
||||
404: If tier is not found
|
||||
"""
|
||||
try:
|
||||
# Validate tier
|
||||
tier_enum = SubscriptionTier(tier.lower())
|
||||
|
||||
metadata = SubscriptionPlanMetadata.PLANS[tier_enum]
|
||||
|
||||
plan_data = {
|
||||
"tier": tier_enum.value,
|
||||
"name": metadata["name"],
|
||||
"description": metadata["description"],
|
||||
"tagline": metadata["tagline"],
|
||||
"popular": metadata["popular"],
|
||||
"monthly_price": float(metadata["monthly_price"]),
|
||||
"yearly_price": float(metadata["yearly_price"]),
|
||||
"trial_days": metadata["trial_days"],
|
||||
"features": metadata["features"],
|
||||
"limits": {
|
||||
"users": metadata["limits"]["users"],
|
||||
"locations": metadata["limits"]["locations"],
|
||||
"products": metadata["limits"]["products"],
|
||||
"forecasts_per_day": metadata["limits"]["forecasts_per_day"],
|
||||
},
|
||||
"support": metadata["support"],
|
||||
"recommended_for": metadata["recommended_for"],
|
||||
"contact_sales": metadata.get("contact_sales", False),
|
||||
}
|
||||
|
||||
logger.info("subscription_plan_fetched", tier=tier)
|
||||
|
||||
return plan_data
|
||||
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Subscription tier '{tier}' not found"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("failed_to_fetch_plan", tier=tier, error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to fetch subscription plan"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{tier}/features")
|
||||
async def get_plan_features(tier: str):
|
||||
"""
|
||||
Get all features available in a subscription tier
|
||||
|
||||
**Public endpoint** - No authentication required
|
||||
|
||||
Args:
|
||||
tier: Subscription tier (starter, professional, enterprise)
|
||||
|
||||
Returns:
|
||||
List of feature keys available in the tier
|
||||
"""
|
||||
try:
|
||||
tier_enum = SubscriptionTier(tier.lower())
|
||||
features = PlanFeatures.get_features(tier_enum.value)
|
||||
|
||||
return {
|
||||
"tier": tier_enum.value,
|
||||
"features": features,
|
||||
"feature_count": len(features)
|
||||
}
|
||||
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Subscription tier '{tier}' not found"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{tier}/limits")
|
||||
async def get_plan_limits(tier: str):
|
||||
"""
|
||||
Get all quota limits for a subscription tier
|
||||
|
||||
**Public endpoint** - No authentication required
|
||||
|
||||
Args:
|
||||
tier: Subscription tier (starter, professional, enterprise)
|
||||
|
||||
Returns:
|
||||
All quota limits for the tier
|
||||
"""
|
||||
try:
|
||||
tier_enum = SubscriptionTier(tier.lower())
|
||||
|
||||
limits = {
|
||||
"tier": tier_enum.value,
|
||||
"team_and_organization": {
|
||||
"max_users": QuotaLimits.MAX_USERS[tier_enum],
|
||||
"max_locations": QuotaLimits.MAX_LOCATIONS[tier_enum],
|
||||
},
|
||||
"product_and_inventory": {
|
||||
"max_products": QuotaLimits.MAX_PRODUCTS[tier_enum],
|
||||
"max_recipes": QuotaLimits.MAX_RECIPES[tier_enum],
|
||||
"max_suppliers": QuotaLimits.MAX_SUPPLIERS[tier_enum],
|
||||
},
|
||||
"ml_and_analytics": {
|
||||
"training_jobs_per_day": QuotaLimits.TRAINING_JOBS_PER_DAY[tier_enum],
|
||||
"forecast_generation_per_day": QuotaLimits.FORECAST_GENERATION_PER_DAY[tier_enum],
|
||||
"dataset_size_rows": QuotaLimits.DATASET_SIZE_ROWS[tier_enum],
|
||||
"forecast_horizon_days": QuotaLimits.FORECAST_HORIZON_DAYS[tier_enum],
|
||||
"historical_data_access_days": QuotaLimits.HISTORICAL_DATA_ACCESS_DAYS[tier_enum],
|
||||
},
|
||||
"import_export": {
|
||||
"bulk_import_rows": QuotaLimits.BULK_IMPORT_ROWS[tier_enum],
|
||||
"bulk_export_rows": QuotaLimits.BULK_EXPORT_ROWS[tier_enum],
|
||||
},
|
||||
"integrations": {
|
||||
"pos_sync_interval_minutes": QuotaLimits.POS_SYNC_INTERVAL_MINUTES[tier_enum],
|
||||
"api_calls_per_hour": QuotaLimits.API_CALLS_PER_HOUR[tier_enum],
|
||||
"webhook_endpoints": QuotaLimits.WEBHOOK_ENDPOINTS[tier_enum],
|
||||
},
|
||||
"storage": {
|
||||
"file_storage_gb": QuotaLimits.FILE_STORAGE_GB[tier_enum],
|
||||
"report_retention_days": QuotaLimits.REPORT_RETENTION_DAYS[tier_enum],
|
||||
}
|
||||
}
|
||||
|
||||
return limits
|
||||
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Subscription tier '{tier}' not found"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/compare")
|
||||
async def compare_plans():
|
||||
"""
|
||||
Get plan comparison data for all tiers
|
||||
|
||||
**Public endpoint** - No authentication required
|
||||
|
||||
Returns:
|
||||
Comparison matrix of all plans with key features and limits
|
||||
"""
|
||||
try:
|
||||
comparison = {
|
||||
"tiers": ["starter", "professional", "enterprise"],
|
||||
"pricing": {},
|
||||
"key_features": {},
|
||||
"key_limits": {}
|
||||
}
|
||||
|
||||
for tier in SubscriptionTier:
|
||||
metadata = SubscriptionPlanMetadata.PLANS[tier]
|
||||
|
||||
# Pricing
|
||||
comparison["pricing"][tier.value] = {
|
||||
"monthly": float(metadata["monthly_price"]),
|
||||
"yearly": float(metadata["yearly_price"]),
|
||||
"savings_percentage": round(
|
||||
((float(metadata["monthly_price"]) * 12) - float(metadata["yearly_price"])) /
|
||||
(float(metadata["monthly_price"]) * 12) * 100
|
||||
)
|
||||
}
|
||||
|
||||
# Key features (first 10)
|
||||
comparison["key_features"][tier.value] = metadata["features"][:10]
|
||||
|
||||
# Key limits
|
||||
comparison["key_limits"][tier.value] = {
|
||||
"users": metadata["limits"]["users"],
|
||||
"locations": metadata["limits"]["locations"],
|
||||
"products": metadata["limits"]["products"],
|
||||
"forecasts_per_day": metadata["limits"]["forecasts_per_day"],
|
||||
"training_jobs_per_day": QuotaLimits.TRAINING_JOBS_PER_DAY[tier],
|
||||
}
|
||||
|
||||
return comparison
|
||||
|
||||
except Exception as e:
|
||||
logger.error("failed_to_compare_plans", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to generate plan comparison"
|
||||
)
|
||||
@@ -8,6 +8,7 @@ from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Path, Query
|
||||
from typing import List, Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
import shared.redis_utils
|
||||
|
||||
from app.schemas.tenants import (
|
||||
BakeryRegistration, TenantResponse, TenantAccessResponse,
|
||||
@@ -20,14 +21,22 @@ from shared.auth.decorators import (
|
||||
get_current_user_dep,
|
||||
require_admin_role_dep
|
||||
)
|
||||
from shared.auth.access_control import owner_role_required, admin_role_required
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.metrics import track_endpoint_metrics
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
route_builder = RouteBuilder("tenants")
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("tenant-service")
|
||||
|
||||
# Global Redis client
|
||||
_redis_client = None
|
||||
|
||||
# Dependency injection for enhanced tenant service
|
||||
def get_enhanced_tenant_service():
|
||||
try:
|
||||
@@ -38,11 +47,25 @@ def get_enhanced_tenant_service():
|
||||
logger.error("Failed to create enhanced tenant service", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Service initialization failed")
|
||||
|
||||
async def get_tenant_redis_client():
|
||||
"""Get or create Redis client"""
|
||||
global _redis_client
|
||||
try:
|
||||
if _redis_client is None:
|
||||
from app.core.config import settings
|
||||
_redis_client = await shared.redis_utils.initialize_redis(settings.REDIS_URL)
|
||||
logger.info("Redis client initialized using shared utilities")
|
||||
return _redis_client
|
||||
except Exception as e:
|
||||
logger.warning("Failed to initialize Redis client, service will work with limited functionality", error=str(e))
|
||||
return None
|
||||
|
||||
def get_subscription_limit_service():
|
||||
try:
|
||||
from app.core.config import settings
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "tenant-service")
|
||||
return SubscriptionLimitService(database_manager)
|
||||
redis_client = get_tenant_redis_client()
|
||||
return SubscriptionLimitService(database_manager, redis_client)
|
||||
except Exception as e:
|
||||
logger.error("Failed to create subscription limit service", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Service initialization failed")
|
||||
@@ -325,6 +348,7 @@ async def update_tenant_model_status(
|
||||
|
||||
@router.post(route_builder.build_base_route("{tenant_id}/deactivate", include_tenant_prefix=False))
|
||||
@track_endpoint_metrics("tenant_deactivate")
|
||||
@owner_role_required
|
||||
async def deactivate_tenant(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
@@ -339,6 +363,25 @@ async def deactivate_tenant(
|
||||
)
|
||||
|
||||
if success:
|
||||
# Log audit event for tenant deactivation
|
||||
try:
|
||||
from app.core.database import get_db_session
|
||||
async with get_db_session() as db:
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
action=AuditAction.DEACTIVATE.value,
|
||||
resource_type="tenant",
|
||||
resource_id=str(tenant_id),
|
||||
severity=AuditSeverity.CRITICAL.value,
|
||||
description=f"Owner {current_user.get('email', current_user['user_id'])} deactivated tenant",
|
||||
endpoint="/{tenant_id}/deactivate",
|
||||
method="POST"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return {"success": True, "message": "Tenant deactivated successfully"}
|
||||
else:
|
||||
raise HTTPException(
|
||||
@@ -359,6 +402,7 @@ async def deactivate_tenant(
|
||||
|
||||
@router.post(route_builder.build_base_route("{tenant_id}/activate", include_tenant_prefix=False))
|
||||
@track_endpoint_metrics("tenant_activate")
|
||||
@owner_role_required
|
||||
async def activate_tenant(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
@@ -373,6 +417,25 @@ async def activate_tenant(
|
||||
)
|
||||
|
||||
if success:
|
||||
# Log audit event for tenant activation
|
||||
try:
|
||||
from app.core.database import get_db_session
|
||||
async with get_db_session() as db:
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
action=AuditAction.ACTIVATE.value,
|
||||
resource_type="tenant",
|
||||
resource_id=str(tenant_id),
|
||||
severity=AuditSeverity.HIGH.value,
|
||||
description=f"Owner {current_user.get('email', current_user['user_id'])} activated tenant",
|
||||
endpoint="/{tenant_id}/activate",
|
||||
method="POST"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return {"success": True, "message": "Tenant activated successfully"}
|
||||
else:
|
||||
raise HTTPException(
|
||||
@@ -644,91 +707,10 @@ async def upgrade_subscription_plan(
|
||||
detail="Failed to upgrade subscription plan"
|
||||
)
|
||||
|
||||
@router.get("/api/v1/plans")
|
||||
async def get_available_plans():
|
||||
"""Get all available subscription plans with features and pricing - Public endpoint"""
|
||||
|
||||
try:
|
||||
# This could be moved to a config service or database
|
||||
plans = {
|
||||
"starter": {
|
||||
"name": "Starter",
|
||||
"description": "Ideal para panaderías pequeñas o nuevas",
|
||||
"monthly_price": 49.0,
|
||||
"max_users": 5,
|
||||
"max_locations": 1,
|
||||
"max_products": 50,
|
||||
"features": {
|
||||
"inventory_management": "basic",
|
||||
"demand_prediction": "basic",
|
||||
"production_reports": "basic",
|
||||
"analytics": "basic",
|
||||
"support": "email",
|
||||
"trial_days": 14,
|
||||
"locations": "1_location",
|
||||
"ai_model_configuration": "basic"
|
||||
},
|
||||
"trial_available": True
|
||||
},
|
||||
"professional": {
|
||||
"name": "Professional",
|
||||
"description": "Ideal para panaderías y cadenas en crecimiento",
|
||||
"monthly_price": 129.0,
|
||||
"max_users": 15,
|
||||
"max_locations": 2,
|
||||
"max_products": -1, # Unlimited
|
||||
"features": {
|
||||
"inventory_management": "advanced",
|
||||
"demand_prediction": "ai_92_percent",
|
||||
"production_management": "complete",
|
||||
"pos_integrated": True,
|
||||
"logistics": "basic",
|
||||
"analytics": "advanced",
|
||||
"support": "priority_24_7",
|
||||
"trial_days": 14,
|
||||
"locations": "1_2_locations",
|
||||
"ai_model_configuration": "advanced"
|
||||
},
|
||||
"trial_available": True,
|
||||
"popular": True
|
||||
},
|
||||
"enterprise": {
|
||||
"name": "Enterprise",
|
||||
"description": "Ideal para cadenas con obradores centrales",
|
||||
"monthly_price": 399.0,
|
||||
"max_users": -1, # Unlimited
|
||||
"max_locations": -1, # Unlimited
|
||||
"max_products": -1, # Unlimited
|
||||
"features": {
|
||||
"inventory_management": "multi_location",
|
||||
"demand_prediction": "ai_personalized",
|
||||
"production_optimization": "capacity",
|
||||
"erp_integration": True,
|
||||
"logistics": "advanced",
|
||||
"analytics": "predictive",
|
||||
"api_access": "personalized",
|
||||
"account_manager": True,
|
||||
"demo": "personalized",
|
||||
"locations": "unlimited_obradores",
|
||||
"ai_model_configuration": "enterprise"
|
||||
},
|
||||
"trial_available": False,
|
||||
"contact_sales": True
|
||||
}
|
||||
}
|
||||
|
||||
return {"plans": plans}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get available plans", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get available plans"
|
||||
)
|
||||
|
||||
# ============================================================================
|
||||
# PAYMENT OPERATIONS
|
||||
# ============================================================================
|
||||
# Note: /plans endpoint moved to app/api/plans.py for better organization
|
||||
|
||||
@router.post(route_builder.build_base_route("subscriptions/register-with-subscription", include_tenant_prefix=False))
|
||||
async def register_with_subscription(
|
||||
|
||||
@@ -11,6 +11,7 @@ from uuid import UUID
|
||||
from app.schemas.tenants import TenantResponse, TenantUpdate
|
||||
from app.services.tenant_service import EnhancedTenantService
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import admin_role_required
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.metrics import track_endpoint_metrics
|
||||
@@ -48,13 +49,14 @@ async def get_tenant(
|
||||
return tenant
|
||||
|
||||
@router.put(route_builder.build_base_route("{tenant_id}", include_tenant_prefix=False), response_model=TenantResponse)
|
||||
@admin_role_required
|
||||
async def update_tenant(
|
||||
update_data: TenantUpdate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
tenant_service: EnhancedTenantService = Depends(get_enhanced_tenant_service)
|
||||
):
|
||||
"""Update tenant information - ATOMIC operation"""
|
||||
"""Update tenant information - ATOMIC operation (Admin+ only)"""
|
||||
|
||||
try:
|
||||
result = await tenant_service.update_tenant(
|
||||
|
||||
@@ -39,7 +39,11 @@ class TenantSettings(BaseServiceSettings):
|
||||
|
||||
# Redis Database (dedicated for tenant data)
|
||||
REDIS_DB: int = 4
|
||||
|
||||
|
||||
# Service URLs for usage tracking
|
||||
RECIPES_SERVICE_URL: str = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8004")
|
||||
SUPPLIERS_SERVICE_URL: str = os.getenv("SUPPLIERS_SERVICE_URL", "http://suppliers-service:8005")
|
||||
|
||||
# Subscription Plans
|
||||
DEFAULT_PLAN: str = os.getenv("DEFAULT_PLAN", "basic")
|
||||
TRIAL_PERIOD_DAYS: int = int(os.getenv("TRIAL_PERIOD_DAYS", "14"))
|
||||
|
||||
@@ -7,7 +7,7 @@ from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.api import tenants, tenant_members, tenant_operations, webhooks, internal_demo
|
||||
from app.api import tenants, tenant_members, tenant_operations, webhooks, internal_demo, plans
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
|
||||
@@ -111,6 +111,7 @@ service.setup_standard_endpoints()
|
||||
service.setup_custom_endpoints()
|
||||
|
||||
# Include routers
|
||||
service.add_router(plans.router, tags=["subscription-plans"]) # Public endpoint
|
||||
service.add_router(tenants.router, tags=["tenants"])
|
||||
service.add_router(tenant_members.router, tags=["tenant-members"])
|
||||
service.add_router(tenant_operations.router, tags=["tenant-operations"])
|
||||
|
||||
@@ -4,6 +4,13 @@ Tenant Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .tenants import Tenant, TenantMember, Subscription
|
||||
|
||||
@@ -12,4 +19,5 @@ __all__ = [
|
||||
"Tenant",
|
||||
"TenantMember",
|
||||
"Subscription",
|
||||
"AuditLog",
|
||||
]
|
||||
|
||||
@@ -7,21 +7,24 @@ import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from fastapi import HTTPException, status
|
||||
from datetime import datetime, timezone
|
||||
import httpx
|
||||
|
||||
from app.repositories import SubscriptionRepository, TenantRepository, TenantMemberRepository
|
||||
from app.models.tenants import Subscription, Tenant, TenantMember
|
||||
from shared.database.exceptions import DatabaseError
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.subscription.plans import SubscriptionPlanMetadata, get_training_job_quota, get_forecast_quota
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SubscriptionLimitService:
|
||||
"""Service for validating subscription limits and features"""
|
||||
|
||||
def __init__(self, database_manager=None):
|
||||
|
||||
def __init__(self, database_manager=None, redis_client=None):
|
||||
self.database_manager = database_manager or create_database_manager()
|
||||
self.redis = redis_client
|
||||
|
||||
async def _init_repositories(self, session):
|
||||
"""Initialize repositories with session"""
|
||||
@@ -277,19 +280,19 @@ class SubscriptionLimitService:
|
||||
return {"can_upgrade": False, "reason": "Error validating upgrade"}
|
||||
|
||||
async def get_usage_summary(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Get a summary of current usage vs limits for a tenant"""
|
||||
"""Get a summary of current usage vs limits for a tenant - ALL 9 METRICS"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as db_session:
|
||||
await self._init_repositories(db_session)
|
||||
|
||||
subscription = await self.subscription_repo.get_active_subscription(tenant_id)
|
||||
if not subscription:
|
||||
# FIX: Return mock subscription for demo tenants instead of error
|
||||
logger.info("No subscription found, returning mock data", tenant_id=tenant_id)
|
||||
return {
|
||||
"plan": "demo",
|
||||
"monthly_price": 0,
|
||||
"status": "active",
|
||||
"billing_cycle": "monthly",
|
||||
"usage": {
|
||||
"users": {
|
||||
"current": 1,
|
||||
@@ -308,52 +311,121 @@ class SubscriptionLimitService:
|
||||
"limit": 50,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
},
|
||||
"recipes": {
|
||||
"current": 0,
|
||||
"limit": 100,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
},
|
||||
"suppliers": {
|
||||
"current": 0,
|
||||
"limit": 20,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
},
|
||||
"training_jobs_today": {
|
||||
"current": 0,
|
||||
"limit": 2,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
},
|
||||
"forecasts_today": {
|
||||
"current": 0,
|
||||
"limit": 10,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
},
|
||||
"api_calls_this_hour": {
|
||||
"current": 0,
|
||||
"limit": 100,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
},
|
||||
"file_storage_used_gb": {
|
||||
"current": 0.0,
|
||||
"limit": 1.0,
|
||||
"unlimited": False,
|
||||
"usage_percentage": 0.0
|
||||
}
|
||||
},
|
||||
"features": {},
|
||||
"next_billing_date": None,
|
||||
"trial_ends_at": None
|
||||
}
|
||||
|
||||
# Get current usage
|
||||
|
||||
# Get current usage - Team & Organization
|
||||
members = await self.member_repo.get_tenant_members(tenant_id, active_only=True)
|
||||
current_users = len(members)
|
||||
current_locations = 1 # TODO: Implement actual location count from locations service
|
||||
|
||||
# Get actual ingredient/product count from inventory service
|
||||
# Get current usage - Products & Inventory
|
||||
current_products = await self._get_ingredient_count(tenant_id)
|
||||
current_recipes = await self._get_recipe_count(tenant_id)
|
||||
current_suppliers = await self._get_supplier_count(tenant_id)
|
||||
|
||||
# Get current usage - IA & Analytics (Redis-based daily quotas)
|
||||
training_jobs_usage = await self._get_training_jobs_today(tenant_id, subscription.plan)
|
||||
forecasts_usage = await self._get_forecasts_today(tenant_id, subscription.plan)
|
||||
|
||||
# Get current usage - API & Storage (Redis-based)
|
||||
api_calls_usage = await self._get_api_calls_this_hour(tenant_id, subscription.plan)
|
||||
storage_usage = await self._get_file_storage_usage_gb(tenant_id, subscription.plan)
|
||||
|
||||
# Get limits from subscription
|
||||
recipes_limit = await self._get_limit_from_plan(subscription.plan, 'recipes')
|
||||
suppliers_limit = await self._get_limit_from_plan(subscription.plan, 'suppliers')
|
||||
|
||||
# TODO: Implement actual location count
|
||||
current_locations = 1
|
||||
|
||||
return {
|
||||
"plan": subscription.plan,
|
||||
"monthly_price": subscription.monthly_price,
|
||||
"status": subscription.status,
|
||||
"billing_cycle": subscription.billing_cycle or "monthly",
|
||||
"usage": {
|
||||
# Team & Organization
|
||||
"users": {
|
||||
"current": current_users,
|
||||
"limit": subscription.max_users,
|
||||
"unlimited": subscription.max_users == -1,
|
||||
"usage_percentage": 0 if subscription.max_users == -1 else (current_users / subscription.max_users) * 100
|
||||
"usage_percentage": 0 if subscription.max_users == -1 else self._calculate_percentage(current_users, subscription.max_users)
|
||||
},
|
||||
"locations": {
|
||||
"current": current_locations,
|
||||
"limit": subscription.max_locations,
|
||||
"unlimited": subscription.max_locations == -1,
|
||||
"usage_percentage": 0 if subscription.max_locations == -1 else (current_locations / subscription.max_locations) * 100
|
||||
"usage_percentage": 0 if subscription.max_locations == -1 else self._calculate_percentage(current_locations, subscription.max_locations)
|
||||
},
|
||||
# Products & Inventory
|
||||
"products": {
|
||||
"current": current_products,
|
||||
"limit": subscription.max_products,
|
||||
"unlimited": subscription.max_products == -1,
|
||||
"usage_percentage": 0 if subscription.max_products == -1 else (current_products / subscription.max_products) * 100 if subscription.max_products > 0 else 0
|
||||
}
|
||||
"usage_percentage": 0 if subscription.max_products == -1 else self._calculate_percentage(current_products, subscription.max_products)
|
||||
},
|
||||
"recipes": {
|
||||
"current": current_recipes,
|
||||
"limit": recipes_limit,
|
||||
"unlimited": recipes_limit is None,
|
||||
"usage_percentage": self._calculate_percentage(current_recipes, recipes_limit)
|
||||
},
|
||||
"suppliers": {
|
||||
"current": current_suppliers,
|
||||
"limit": suppliers_limit,
|
||||
"unlimited": suppliers_limit is None,
|
||||
"usage_percentage": self._calculate_percentage(current_suppliers, suppliers_limit)
|
||||
},
|
||||
# IA & Analytics (Daily quotas)
|
||||
"training_jobs_today": training_jobs_usage,
|
||||
"forecasts_today": forecasts_usage,
|
||||
# API & Storage
|
||||
"api_calls_this_hour": api_calls_usage,
|
||||
"file_storage_used_gb": storage_usage
|
||||
},
|
||||
"features": subscription.features or {},
|
||||
"next_billing_date": subscription.next_billing_date.isoformat() if subscription.next_billing_date else None,
|
||||
"trial_ends_at": subscription.trial_ends_at.isoformat() if subscription.trial_ends_at else None
|
||||
}
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get usage summary",
|
||||
tenant_id=tenant_id,
|
||||
@@ -386,6 +458,153 @@ class SubscriptionLimitService:
|
||||
# Return 0 as fallback to avoid breaking subscription display
|
||||
return 0
|
||||
|
||||
async def _get_recipe_count(self, tenant_id: str) -> int:
|
||||
"""Get recipe count from recipes service"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
|
||||
# Legacy alias for backward compatibility
|
||||
SubscriptionService = SubscriptionLimitService
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.RECIPES_SERVICE_URL}/api/v1/tenants/{tenant_id}/recipes/count",
|
||||
headers={"X-Internal-Request": "true"}
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
count = data.get("count", 0)
|
||||
|
||||
logger.info("Retrieved recipe count", tenant_id=tenant_id, count=count)
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe count", tenant_id=tenant_id, error=str(e))
|
||||
return 0
|
||||
|
||||
async def _get_supplier_count(self, tenant_id: str) -> int:
|
||||
"""Get supplier count from suppliers service"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.SUPPLIERS_SERVICE_URL}/api/v1/tenants/{tenant_id}/suppliers/count",
|
||||
headers={"X-Internal-Request": "true"}
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
count = data.get("count", 0)
|
||||
|
||||
logger.info("Retrieved supplier count", tenant_id=tenant_id, count=count)
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier count", tenant_id=tenant_id, error=str(e))
|
||||
return 0
|
||||
|
||||
async def _get_redis_quota(self, quota_key: str) -> int:
|
||||
"""Get current count from Redis quota key"""
|
||||
try:
|
||||
if not self.redis:
|
||||
return 0
|
||||
|
||||
current = await self.redis.get(quota_key)
|
||||
return int(current) if current else 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting Redis quota", key=quota_key, error=str(e))
|
||||
return 0
|
||||
|
||||
async def _get_training_jobs_today(self, tenant_id: str, plan: str) -> Dict[str, Any]:
|
||||
"""Get training jobs usage for today"""
|
||||
try:
|
||||
date_str = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
quota_key = f"quota:daily:training_jobs:{tenant_id}:{date_str}"
|
||||
current_count = await self._get_redis_quota(quota_key)
|
||||
|
||||
limit = get_training_job_quota(plan)
|
||||
|
||||
return {
|
||||
"current": current_count,
|
||||
"limit": limit,
|
||||
"unlimited": limit is None,
|
||||
"usage_percentage": self._calculate_percentage(current_count, limit)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting training jobs today", tenant_id=tenant_id, error=str(e))
|
||||
return {"current": 0, "limit": None, "unlimited": True, "usage_percentage": 0.0}
|
||||
|
||||
async def _get_forecasts_today(self, tenant_id: str, plan: str) -> Dict[str, Any]:
|
||||
"""Get forecast generation usage for today"""
|
||||
try:
|
||||
date_str = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
quota_key = f"quota:daily:forecast_generation:{tenant_id}:{date_str}"
|
||||
current_count = await self._get_redis_quota(quota_key)
|
||||
|
||||
limit = get_forecast_quota(plan)
|
||||
|
||||
return {
|
||||
"current": current_count,
|
||||
"limit": limit,
|
||||
"unlimited": limit is None,
|
||||
"usage_percentage": self._calculate_percentage(current_count, limit)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting forecasts today", tenant_id=tenant_id, error=str(e))
|
||||
return {"current": 0, "limit": None, "unlimited": True, "usage_percentage": 0.0}
|
||||
|
||||
async def _get_api_calls_this_hour(self, tenant_id: str, plan: str) -> Dict[str, Any]:
|
||||
"""Get API calls usage for current hour"""
|
||||
try:
|
||||
hour_str = datetime.now(timezone.utc).strftime('%Y-%m-%d-%H')
|
||||
quota_key = f"quota:hourly:api_calls:{tenant_id}:{hour_str}"
|
||||
current_count = await self._get_redis_quota(quota_key)
|
||||
|
||||
plan_metadata = SubscriptionPlanMetadata.PLANS.get(plan, {})
|
||||
limit = plan_metadata.get('limits', {}).get('api_calls_per_hour')
|
||||
|
||||
return {
|
||||
"current": current_count,
|
||||
"limit": limit,
|
||||
"unlimited": limit is None,
|
||||
"usage_percentage": self._calculate_percentage(current_count, limit)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting API calls this hour", tenant_id=tenant_id, error=str(e))
|
||||
return {"current": 0, "limit": None, "unlimited": True, "usage_percentage": 0.0}
|
||||
|
||||
async def _get_file_storage_usage_gb(self, tenant_id: str, plan: str) -> Dict[str, Any]:
|
||||
"""Get file storage usage in GB"""
|
||||
try:
|
||||
storage_key = f"storage:total_bytes:{tenant_id}"
|
||||
total_bytes = await self._get_redis_quota(storage_key)
|
||||
total_gb = round(total_bytes / (1024 ** 3), 2) if total_bytes > 0 else 0.0
|
||||
|
||||
plan_metadata = SubscriptionPlanMetadata.PLANS.get(plan, {})
|
||||
limit = plan_metadata.get('limits', {}).get('file_storage_gb')
|
||||
|
||||
return {
|
||||
"current": total_gb,
|
||||
"limit": limit,
|
||||
"unlimited": limit is None,
|
||||
"usage_percentage": self._calculate_percentage(total_gb, limit)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting file storage usage", tenant_id=tenant_id, error=str(e))
|
||||
return {"current": 0.0, "limit": None, "unlimited": True, "usage_percentage": 0.0}
|
||||
|
||||
def _calculate_percentage(self, current: float, limit: Optional[int]) -> float:
|
||||
"""Calculate usage percentage"""
|
||||
if limit is None or limit == -1:
|
||||
return 0.0
|
||||
if limit == 0:
|
||||
return 0.0
|
||||
return round((current / limit) * 100, 1)
|
||||
|
||||
async def _get_limit_from_plan(self, plan: str, limit_key: str) -> Optional[int]:
|
||||
"""Get limit value from plan metadata"""
|
||||
plan_metadata = SubscriptionPlanMetadata.PLANS.get(plan, {})
|
||||
limit = plan_metadata.get('limits', {}).get(limit_key)
|
||||
return limit if limit != -1 else None
|
||||
@@ -19,6 +19,7 @@ from app.services.messaging import publish_tenant_created, publish_member_added
|
||||
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.database.unit_of_work import UnitOfWork
|
||||
from shared.clients.nominatim_client import NominatimClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -55,7 +56,51 @@ class EnhancedTenantService:
|
||||
tenant_repo = uow.register_repository("tenants", TenantRepository, Tenant)
|
||||
member_repo = uow.register_repository("members", TenantMemberRepository, TenantMember)
|
||||
subscription_repo = uow.register_repository("subscriptions", SubscriptionRepository, Subscription)
|
||||
|
||||
|
||||
# Geocode address using Nominatim
|
||||
latitude = getattr(bakery_data, 'latitude', None)
|
||||
longitude = getattr(bakery_data, 'longitude', None)
|
||||
|
||||
if not latitude or not longitude:
|
||||
try:
|
||||
from app.core.config import settings
|
||||
nominatim_client = NominatimClient(settings)
|
||||
|
||||
location = await nominatim_client.geocode_address(
|
||||
street=bakery_data.address,
|
||||
city=bakery_data.city,
|
||||
postal_code=bakery_data.postal_code,
|
||||
country="Spain"
|
||||
)
|
||||
|
||||
if location:
|
||||
latitude = float(location["lat"])
|
||||
longitude = float(location["lon"])
|
||||
logger.info(
|
||||
"Address geocoded successfully",
|
||||
address=bakery_data.address,
|
||||
city=bakery_data.city,
|
||||
latitude=latitude,
|
||||
longitude=longitude
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Could not geocode address, using default Madrid coordinates",
|
||||
address=bakery_data.address,
|
||||
city=bakery_data.city
|
||||
)
|
||||
latitude = 40.4168
|
||||
longitude = -3.7038
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Geocoding failed, using default coordinates",
|
||||
address=bakery_data.address,
|
||||
error=str(e)
|
||||
)
|
||||
latitude = 40.4168
|
||||
longitude = -3.7038
|
||||
|
||||
# Prepare tenant data
|
||||
tenant_data = {
|
||||
"name": bakery_data.name,
|
||||
@@ -66,8 +111,8 @@ class EnhancedTenantService:
|
||||
"phone": bakery_data.phone,
|
||||
"owner_id": owner_id,
|
||||
"email": getattr(bakery_data, 'email', None),
|
||||
"latitude": getattr(bakery_data, 'latitude', None),
|
||||
"longitude": getattr(bakery_data, 'longitude', None),
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"is_active": True
|
||||
}
|
||||
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
"""add_metadata_column_to_tenants
|
||||
|
||||
Revision ID: 865dc00c1244
|
||||
Revises: 44b6798d898c
|
||||
Create Date: 2025-10-11 12:47:19.499034+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '865dc00c1244'
|
||||
down_revision: Union[str, None] = '44b6798d898c'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add metadata_ JSON column to tenants table
|
||||
op.add_column('tenants', sa.Column('metadata_', sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove metadata_ column from tenants table
|
||||
op.drop_column('tenants', 'metadata_')
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: 44b6798d898c
|
||||
Revision ID: 4e1ddc13dd0f
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:18.137489+02:00
|
||||
Create Date: 2025-10-15 12:30:04.847858+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '44b6798d898c'
|
||||
revision: str = '4e1ddc13dd0f'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('tenants',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=200), nullable=False),
|
||||
@@ -43,6 +75,7 @@ def upgrade() -> None:
|
||||
sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
|
||||
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('metadata_', sa.JSON(), nullable=True),
|
||||
sa.Column('owner_id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
@@ -101,4 +134,18 @@ def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
|
||||
op.drop_table('tenants')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/training/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -8,11 +8,19 @@ from typing import Optional, Dict, Any
|
||||
import structlog
|
||||
from datetime import datetime, timezone
|
||||
import uuid
|
||||
import shared.redis_utils
|
||||
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.monitoring.decorators import track_execution_time
|
||||
from shared.monitoring.metrics import get_metrics_collector
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.security import create_audit_logger, create_rate_limiter, AuditSeverity, AuditAction
|
||||
from shared.subscription.plans import (
|
||||
get_training_job_quota,
|
||||
get_dataset_size_limit
|
||||
)
|
||||
|
||||
from app.services.training_service import EnhancedTrainingService
|
||||
from app.schemas.training import (
|
||||
@@ -20,6 +28,11 @@ from app.schemas.training import (
|
||||
SingleProductTrainingRequest,
|
||||
TrainingJobResponse
|
||||
)
|
||||
from app.utils.time_estimation import (
|
||||
calculate_initial_estimate,
|
||||
calculate_estimated_completion_time,
|
||||
get_historical_average_estimate
|
||||
)
|
||||
from app.services.training_events import (
|
||||
publish_training_started,
|
||||
publish_training_completed,
|
||||
@@ -32,6 +45,30 @@ route_builder = RouteBuilder('training')
|
||||
|
||||
router = APIRouter(tags=["training-operations"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("training-service")
|
||||
|
||||
# Redis client for rate limiting
|
||||
_redis_client = None
|
||||
|
||||
async def get_training_redis_client():
|
||||
"""Get or create Redis client for rate limiting"""
|
||||
global _redis_client
|
||||
if _redis_client is None:
|
||||
# Initialize Redis if not already done
|
||||
try:
|
||||
from app.core.config import settings
|
||||
_redis_client = await shared.redis_utils.initialize_redis(settings.REDIS_URL)
|
||||
except:
|
||||
# Fallback to getting the client directly (if already initialized elsewhere)
|
||||
_redis_client = await shared.redis_utils.get_redis_client()
|
||||
return _redis_client
|
||||
|
||||
async def get_rate_limiter():
|
||||
"""Dependency for rate limiter"""
|
||||
redis_client = await get_training_redis_client()
|
||||
return create_rate_limiter(redis_client)
|
||||
|
||||
def get_enhanced_training_service():
|
||||
"""Dependency injection for EnhancedTrainingService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "training-service")
|
||||
@@ -40,31 +77,82 @@ def get_enhanced_training_service():
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("jobs"), response_model=TrainingJobResponse)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
@track_execution_time("enhanced_training_job_duration_seconds", "training-service")
|
||||
async def start_training_job(
|
||||
request: TrainingJobRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
background_tasks: BackgroundTasks = BackgroundTasks(),
|
||||
request_obj: Request = None,
|
||||
enhanced_training_service: EnhancedTrainingService = Depends(get_enhanced_training_service)
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
enhanced_training_service: EnhancedTrainingService = Depends(get_enhanced_training_service),
|
||||
rate_limiter = Depends(get_rate_limiter)
|
||||
):
|
||||
"""
|
||||
Start a new training job for all tenant products using repository pattern.
|
||||
Start a new training job for all tenant products (Admin+ only, quota enforced).
|
||||
|
||||
**RBAC:** Admin or Owner role required
|
||||
**Quotas:**
|
||||
- Starter: 1 training job/day, max 1,000 rows
|
||||
- Professional: 5 training jobs/day, max 10,000 rows
|
||||
- Enterprise: Unlimited jobs, unlimited rows
|
||||
|
||||
Enhanced immediate response pattern:
|
||||
1. Validate request with enhanced validation
|
||||
2. Create job record using repository pattern
|
||||
3. Return 200 with enhanced job details
|
||||
4. Execute enhanced training in background with repository tracking
|
||||
1. Validate subscription tier and quotas
|
||||
2. Validate request with enhanced validation
|
||||
3. Create job record using repository pattern
|
||||
4. Return 200 with enhanced job details
|
||||
5. Execute enhanced training in background with repository tracking
|
||||
|
||||
Enhanced features:
|
||||
- Repository pattern for data access
|
||||
- Quota enforcement by subscription tier
|
||||
- Audit logging for all operations
|
||||
- Enhanced error handling and logging
|
||||
- Metrics tracking and monitoring
|
||||
- Transactional operations
|
||||
"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
# Get subscription tier and enforce quotas
|
||||
tier = current_user.get('subscription_tier', 'starter')
|
||||
|
||||
# Estimate dataset size (this should come from the request or be calculated)
|
||||
# For now, we'll assume a reasonable estimate
|
||||
estimated_dataset_size = request.estimated_rows if hasattr(request, 'estimated_rows') else 500
|
||||
|
||||
# Initialize variables for later use
|
||||
quota_result = None
|
||||
quota_limit = None
|
||||
|
||||
try:
|
||||
# Validate dataset size limits
|
||||
await rate_limiter.validate_dataset_size(
|
||||
tenant_id, estimated_dataset_size, tier
|
||||
)
|
||||
|
||||
# Check daily training job quota
|
||||
quota_limit = get_training_job_quota(tier)
|
||||
quota_result = await rate_limiter.check_and_increment_quota(
|
||||
tenant_id,
|
||||
"training_jobs",
|
||||
quota_limit,
|
||||
period=86400 # 24 hours
|
||||
)
|
||||
|
||||
logger.info("Training job quota check passed",
|
||||
tenant_id=tenant_id,
|
||||
tier=tier,
|
||||
current_usage=quota_result.get('current', 0) if quota_result else 0,
|
||||
limit=quota_limit)
|
||||
|
||||
except HTTPException:
|
||||
# Quota or validation error - re-raise
|
||||
raise
|
||||
except Exception as quota_error:
|
||||
logger.error("Quota validation failed", error=str(quota_error))
|
||||
# Continue with job creation but log the error
|
||||
|
||||
try:
|
||||
# Generate enhanced job ID
|
||||
job_id = f"enhanced_training_{tenant_id}_{uuid.uuid4().hex[:8]}"
|
||||
@@ -85,6 +173,25 @@ async def start_training_job(
|
||||
total_products=0 # Will be updated when actual training starts
|
||||
)
|
||||
|
||||
# Calculate intelligent time estimate
|
||||
# We don't know exact product count yet, so use historical average or estimate
|
||||
try:
|
||||
# Try to get historical average for this tenant
|
||||
from app.core.database import get_db
|
||||
db = next(get_db())
|
||||
historical_avg = get_historical_average_estimate(db, tenant_id)
|
||||
|
||||
# If no historical data, estimate based on typical product count (10-20 products)
|
||||
estimated_products = 15 # Conservative estimate
|
||||
estimated_duration_minutes = calculate_initial_estimate(
|
||||
total_products=estimated_products,
|
||||
avg_training_time_per_product=historical_avg if historical_avg else 60.0
|
||||
)
|
||||
except Exception as est_error:
|
||||
logger.warning("Could not calculate intelligent estimate, using default",
|
||||
error=str(est_error))
|
||||
estimated_duration_minutes = 15 # Default fallback
|
||||
|
||||
# Add enhanced background task
|
||||
background_tasks.add_task(
|
||||
execute_training_job_background,
|
||||
@@ -92,7 +199,8 @@ async def start_training_job(
|
||||
job_id=job_id,
|
||||
bakery_location=(40.4168, -3.7038),
|
||||
requested_start=request.start_date,
|
||||
requested_end=request.end_date
|
||||
requested_end=request.end_date,
|
||||
estimated_duration_minutes=estimated_duration_minutes
|
||||
)
|
||||
|
||||
# Return enhanced immediate success response
|
||||
@@ -102,7 +210,7 @@ async def start_training_job(
|
||||
"status": "pending",
|
||||
"message": "Enhanced training job started successfully using repository pattern",
|
||||
"created_at": datetime.now(timezone.utc),
|
||||
"estimated_duration_minutes": 18,
|
||||
"estimated_duration_minutes": estimated_duration_minutes,
|
||||
"training_results": {
|
||||
"total_products": 0,
|
||||
"successful_trainings": 0,
|
||||
@@ -126,6 +234,32 @@ async def start_training_job(
|
||||
job_id=job_id,
|
||||
features=["repository-pattern", "dependency-injection", "enhanced-tracking"])
|
||||
|
||||
# Log audit event for training job creation
|
||||
try:
|
||||
from app.core.database import get_db
|
||||
db = next(get_db())
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
action=AuditAction.CREATE.value,
|
||||
resource_type="training_job",
|
||||
resource_id=job_id,
|
||||
severity=AuditSeverity.MEDIUM.value,
|
||||
description=f"Started training job (tier: {tier})",
|
||||
metadata={
|
||||
"job_id": job_id,
|
||||
"tier": tier,
|
||||
"estimated_dataset_size": estimated_dataset_size,
|
||||
"quota_usage": quota_result.get('current', 0) if quota_result else 0,
|
||||
"quota_limit": quota_limit if quota_limit else "unlimited"
|
||||
},
|
||||
endpoint="/jobs",
|
||||
method="POST"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return TrainingJobResponse(**response_data)
|
||||
|
||||
except HTTPException:
|
||||
@@ -157,7 +291,8 @@ async def execute_training_job_background(
|
||||
job_id: str,
|
||||
bakery_location: tuple,
|
||||
requested_start: Optional[datetime] = None,
|
||||
requested_end: Optional[datetime] = None
|
||||
requested_end: Optional[datetime] = None,
|
||||
estimated_duration_minutes: int = 15
|
||||
):
|
||||
"""
|
||||
Enhanced background task that executes the training job using repository pattern.
|
||||
@@ -202,7 +337,7 @@ async def execute_training_job_background(
|
||||
},
|
||||
"requested_start": requested_start.isoformat() if requested_start else None,
|
||||
"requested_end": requested_end.isoformat() if requested_end else None,
|
||||
"estimated_duration_minutes": 18,
|
||||
"estimated_duration_minutes": estimated_duration_minutes,
|
||||
"background_execution": True,
|
||||
"enhanced_features": True,
|
||||
"repository_pattern": True,
|
||||
@@ -278,16 +413,20 @@ async def execute_training_job_background(
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_detail_route("products", "inventory_product_id"), response_model=TrainingJobResponse)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
@track_execution_time("enhanced_single_product_training_duration_seconds", "training-service")
|
||||
async def start_single_product_training(
|
||||
request: SingleProductTrainingRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
inventory_product_id: str = Path(..., description="Inventory product UUID"),
|
||||
request_obj: Request = None,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
enhanced_training_service: EnhancedTrainingService = Depends(get_enhanced_training_service)
|
||||
):
|
||||
"""
|
||||
Start enhanced training for a single product using repository pattern.
|
||||
Start enhanced training for a single product (Admin+ only).
|
||||
|
||||
**RBAC:** Admin or Owner role required
|
||||
|
||||
Enhanced features:
|
||||
- Repository pattern for data access
|
||||
|
||||
@@ -4,6 +4,13 @@ Training Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .training import (
|
||||
TrainedModel,
|
||||
@@ -20,4 +27,5 @@ __all__ = [
|
||||
"ModelPerformanceMetric",
|
||||
"TrainingJobQueue",
|
||||
"ModelArtifact",
|
||||
"AuditLog",
|
||||
]
|
||||
|
||||
@@ -193,4 +193,59 @@ class TrainedModel(Base):
|
||||
"training_start_date": self.training_start_date.isoformat() if self.training_start_date else None,
|
||||
"training_end_date": self.training_end_date.isoformat() if self.training_end_date else None,
|
||||
"data_quality_score": self.data_quality_score
|
||||
}
|
||||
|
||||
|
||||
class TrainingPerformanceMetrics(Base):
|
||||
"""
|
||||
Table to track historical training performance for time estimation.
|
||||
Stores aggregated metrics from completed training jobs.
|
||||
"""
|
||||
__tablename__ = "training_performance_metrics"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
job_id = Column(String(255), nullable=False, index=True)
|
||||
|
||||
# Training job statistics
|
||||
total_products = Column(Integer, nullable=False)
|
||||
successful_products = Column(Integer, nullable=False)
|
||||
failed_products = Column(Integer, nullable=False)
|
||||
|
||||
# Time metrics
|
||||
total_duration_seconds = Column(Float, nullable=False)
|
||||
avg_time_per_product = Column(Float, nullable=False) # Key metric for estimation
|
||||
data_analysis_time_seconds = Column(Float, nullable=True)
|
||||
training_time_seconds = Column(Float, nullable=True)
|
||||
finalization_time_seconds = Column(Float, nullable=True)
|
||||
|
||||
# Job metadata
|
||||
completed_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<TrainingPerformanceMetrics("
|
||||
f"tenant_id={self.tenant_id}, "
|
||||
f"job_id={self.job_id}, "
|
||||
f"total_products={self.total_products}, "
|
||||
f"avg_time_per_product={self.avg_time_per_product:.2f}s"
|
||||
f")>"
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"tenant_id": str(self.tenant_id),
|
||||
"job_id": self.job_id,
|
||||
"total_products": self.total_products,
|
||||
"successful_products": self.successful_products,
|
||||
"failed_products": self.failed_products,
|
||||
"total_duration_seconds": self.total_duration_seconds,
|
||||
"avg_time_per_product": self.avg_time_per_product,
|
||||
"data_analysis_time_seconds": self.data_analysis_time_seconds,
|
||||
"training_time_seconds": self.training_time_seconds,
|
||||
"finalization_time_seconds": self.finalization_time_seconds,
|
||||
"completed_at": self.completed_at.isoformat() if self.completed_at else None,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None
|
||||
}
|
||||
@@ -112,6 +112,8 @@ class TrainingJobStatus(BaseModel):
|
||||
products_completed: int = Field(0, description="Number of products completed")
|
||||
products_failed: int = Field(0, description="Number of products that failed")
|
||||
error_message: Optional[str] = Field(None, description="Error message if failed")
|
||||
estimated_time_remaining_seconds: Optional[int] = Field(None, description="Estimated time remaining in seconds")
|
||||
message: Optional[str] = Field(None, description="Optional status message")
|
||||
|
||||
@validator('job_id', pre=True)
|
||||
def convert_uuid_to_string(cls, v):
|
||||
|
||||
@@ -38,10 +38,19 @@ async def cleanup_messaging():
|
||||
async def publish_training_started(
|
||||
job_id: str,
|
||||
tenant_id: str,
|
||||
total_products: int
|
||||
total_products: int,
|
||||
estimated_duration_minutes: Optional[int] = None,
|
||||
estimated_completion_time: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Event 1: Training Started (0% progress)
|
||||
|
||||
Args:
|
||||
job_id: Training job identifier
|
||||
tenant_id: Tenant identifier
|
||||
total_products: Number of products to train
|
||||
estimated_duration_minutes: Estimated time to completion in minutes
|
||||
estimated_completion_time: ISO timestamp of estimated completion
|
||||
"""
|
||||
event_data = {
|
||||
"service_name": "training-service",
|
||||
@@ -53,7 +62,10 @@ async def publish_training_started(
|
||||
"progress": 0,
|
||||
"current_step": "Training Started",
|
||||
"step_details": f"Starting training for {total_products} products",
|
||||
"total_products": total_products
|
||||
"total_products": total_products,
|
||||
"estimated_duration_minutes": estimated_duration_minutes,
|
||||
"estimated_completion_time": estimated_completion_time,
|
||||
"estimated_time_remaining_seconds": estimated_duration_minutes * 60 if estimated_duration_minutes else None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +79,8 @@ async def publish_training_started(
|
||||
logger.info("Published training started event",
|
||||
job_id=job_id,
|
||||
tenant_id=tenant_id,
|
||||
total_products=total_products)
|
||||
total_products=total_products,
|
||||
estimated_duration_minutes=estimated_duration_minutes)
|
||||
else:
|
||||
logger.error("Failed to publish training started event", job_id=job_id)
|
||||
|
||||
@@ -77,10 +90,17 @@ async def publish_training_started(
|
||||
async def publish_data_analysis(
|
||||
job_id: str,
|
||||
tenant_id: str,
|
||||
analysis_details: Optional[str] = None
|
||||
analysis_details: Optional[str] = None,
|
||||
estimated_time_remaining_seconds: Optional[int] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Event 2: Data Analysis (20% progress)
|
||||
|
||||
Args:
|
||||
job_id: Training job identifier
|
||||
tenant_id: Tenant identifier
|
||||
analysis_details: Details about the analysis
|
||||
estimated_time_remaining_seconds: Estimated time remaining in seconds
|
||||
"""
|
||||
event_data = {
|
||||
"service_name": "training-service",
|
||||
@@ -91,7 +111,8 @@ async def publish_data_analysis(
|
||||
"tenant_id": tenant_id,
|
||||
"progress": 20,
|
||||
"current_step": "Data Analysis",
|
||||
"step_details": analysis_details or "Analyzing sales, weather, and traffic data"
|
||||
"step_details": analysis_details or "Analyzing sales, weather, and traffic data",
|
||||
"estimated_time_remaining_seconds": estimated_time_remaining_seconds
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +137,8 @@ async def publish_product_training_completed(
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
products_completed: int,
|
||||
total_products: int
|
||||
total_products: int,
|
||||
estimated_time_remaining_seconds: Optional[int] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Event 3: Product Training Completed (contributes to 20-80% progress)
|
||||
@@ -124,6 +146,14 @@ async def publish_product_training_completed(
|
||||
This event is published each time a product training completes.
|
||||
The frontend/consumer will calculate the progress as:
|
||||
progress = 20 + (products_completed / total_products) * 60
|
||||
|
||||
Args:
|
||||
job_id: Training job identifier
|
||||
tenant_id: Tenant identifier
|
||||
product_name: Name of the product that was trained
|
||||
products_completed: Number of products completed so far
|
||||
total_products: Total number of products
|
||||
estimated_time_remaining_seconds: Estimated time remaining in seconds
|
||||
"""
|
||||
event_data = {
|
||||
"service_name": "training-service",
|
||||
@@ -136,7 +166,8 @@ async def publish_product_training_completed(
|
||||
"products_completed": products_completed,
|
||||
"total_products": total_products,
|
||||
"current_step": "Model Training",
|
||||
"step_details": f"Completed training for {product_name} ({products_completed}/{total_products})"
|
||||
"step_details": f"Completed training for {product_name} ({products_completed}/{total_products})",
|
||||
"estimated_time_remaining_seconds": estimated_time_remaining_seconds
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user