Add role-based filtering and imporve code

This commit is contained in:
Urtzi Alfaro
2025-10-15 16:12:49 +02:00
parent 96ad5c6692
commit 8f9e9a7edc
158 changed files with 11033 additions and 1544 deletions

View File

@@ -17,9 +17,13 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY shared/requirements-tracing.txt /tmp/
COPY services/inventory/requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
RUN pip install --no-cache-dir -r requirements.txt
# Copy shared libraries from the shared stage

View File

@@ -209,7 +209,7 @@ async def update_compliance_record(
@router.delete(
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
status_code=status.HTTP_204_NO_CONTENT
status_code=status.HTTP_403_FORBIDDEN
)
@require_user_role(['admin', 'owner'])
async def delete_compliance_record(
@@ -218,7 +218,33 @@ async def delete_compliance_record(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete (soft delete) compliance record"""
"""
Compliance records CANNOT be deleted for regulatory compliance.
Use the archive endpoint to mark records as inactive.
"""
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail={
"error": "compliance_records_cannot_be_deleted",
"message": "Compliance records cannot be deleted for regulatory compliance. Use PUT /food-safety/compliance/{id}/archive to archive records instead.",
"reason": "Food safety compliance records must be retained for regulatory audits",
"alternative_endpoint": f"/api/v1/tenants/{tenant_id}/inventory/food-safety/compliance/{compliance_id}/archive"
}
)
@router.put(
route_builder.build_nested_resource_route("food-safety/compliance", "compliance_id", "archive"),
response_model=dict
)
@require_user_role(['admin', 'owner'])
async def archive_compliance_record(
compliance_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Archive (soft delete) compliance record - marks as inactive but retains for audit"""
try:
query = """
UPDATE food_safety_compliance
@@ -228,7 +254,7 @@ async def delete_compliance_record(
result = await db.execute(query, {
"compliance_id": compliance_id,
"tenant_id": tenant_id,
"user_id": UUID(current_user["sub"])
"user_id": UUID(current_user["user_id"])
})
if result.rowcount == 0:
@@ -238,13 +264,38 @@ async def delete_compliance_record(
)
await db.commit()
return None
# Log audit event for archiving compliance record
try:
from shared.security import create_audit_logger, AuditSeverity, AuditAction
audit_logger = create_audit_logger("inventory-service")
await audit_logger.log_event(
db_session=db,
tenant_id=str(tenant_id),
user_id=current_user["user_id"],
action="archive",
resource_type="compliance_record",
resource_id=str(compliance_id),
severity=AuditSeverity.HIGH.value,
description=f"Archived compliance record (retained for regulatory compliance)",
endpoint=f"/food-safety/compliance/{compliance_id}/archive",
method="PUT"
)
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
return {
"message": "Compliance record archived successfully",
"compliance_id": str(compliance_id),
"archived": True,
"note": "Record retained for regulatory compliance audits"
}
except HTTPException:
raise
except Exception as e:
logger.error("Error deleting compliance record", error=str(e))
logger.error("Error archiving compliance record", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to delete compliance record"
detail="Failed to archive compliance record"
)

View File

@@ -22,12 +22,16 @@ from app.schemas.inventory import (
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role, admin_role_required, owner_role_required
from shared.routing import RouteBuilder
from shared.security import create_audit_logger, AuditSeverity, AuditAction
# Create route builder for consistent URL structure
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["ingredients"])
# Initialize audit logger
audit_logger = create_audit_logger("inventory-service")
# Helper function to extract user ID from user object
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
"""Extract user ID from current user context"""
@@ -264,6 +268,25 @@ async def hard_delete_ingredient(
try:
service = InventoryService()
deletion_summary = await service.hard_delete_ingredient(ingredient_id, tenant_id)
# Log audit event for hard deletion
try:
await audit_logger.log_deletion(
db_session=db,
tenant_id=str(tenant_id),
user_id=current_user["user_id"],
resource_type="ingredient",
resource_id=str(ingredient_id),
resource_data=deletion_summary,
description=f"Hard deleted ingredient and all associated data",
endpoint=f"/ingredients/{ingredient_id}/hard",
method="DELETE"
)
except Exception as audit_error:
import structlog
logger = structlog.get_logger()
logger.warning("Failed to log audit event", error=str(audit_error))
return deletion_summary
except ValueError as e:
raise HTTPException(

View File

@@ -4,6 +4,13 @@ Inventory Service Models Package
Import all models to ensure they are registered with SQLAlchemy Base.
"""
# Import AuditLog model for this service
from shared.security import create_audit_log_model
from shared.database.base import Base
# Create audit log model for this service
AuditLog = create_audit_log_model(Base)
# Import all models to register them with the Base metadata
from .inventory import (
Ingredient,
@@ -51,4 +58,5 @@ __all__ = [
"FoodSafetyStandard",
"ComplianceStatus",
"FoodSafetyAlertType",
"AuditLog",
]

View File

@@ -1,8 +1,8 @@
"""initial_schema_20251009_2038
"""initial_schema_20251015_1229
Revision ID: da978256de4a
Revision ID: e7fcea67bf4e
Revises:
Create Date: 2025-10-09 20:39:00.639427+02:00
Create Date: 2025-10-15 12:29:40.991849+02:00
"""
from typing import Sequence, Union
@@ -12,7 +12,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'da978256de4a'
revision: str = 'e7fcea67bf4e'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('ingredients',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
@@ -453,4 +485,18 @@ def downgrade() -> None:
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
op.drop_table('ingredients')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###