Add role-based filtering and imporve code

This commit is contained in:
Urtzi Alfaro
2025-10-15 16:12:49 +02:00
parent 96ad5c6692
commit 8f9e9a7edc
158 changed files with 11033 additions and 1544 deletions

View File

@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY shared/requirements-tracing.txt /tmp/
COPY services/recipes/requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
RUN pip install --no-cache-dir -r requirements.txt
# Copy shared libraries from the shared stage

View File

@@ -17,7 +17,7 @@ from ..schemas.recipes import (
RecipeStatisticsResponse,
)
from shared.routing import RouteBuilder, RouteCategory
from shared.auth.access_control import require_user_role
from shared.auth.access_control import require_user_role, analytics_tier_required
from shared.auth.decorators import get_current_user_dep
route_builder = RouteBuilder('recipes')
@@ -114,13 +114,18 @@ async def activate_recipe(
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "feasibility"]),
response_model=RecipeFeasibilityResponse
)
@analytics_tier_required
async def check_recipe_feasibility(
tenant_id: UUID,
recipe_id: UUID,
batch_multiplier: float = Query(1.0, gt=0),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Check if recipe can be produced with current inventory"""
"""
Check if recipe can be produced with current inventory (Professional+ tier)
Supports batch scaling for production planning
"""
try:
recipe_service = RecipeService(db)
@@ -187,3 +192,30 @@ async def get_recipe_categories(
except Exception as e:
logger.error(f"Error getting recipe categories: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get(
route_builder.build_custom_route(RouteCategory.BASE, ["count"])
)
async def get_recipe_count(
tenant_id: UUID,
x_internal_request: str = Header(None),
db: AsyncSession = Depends(get_db)
):
"""
Get total count of recipes for a tenant
Internal endpoint for subscription usage tracking
"""
if x_internal_request != "true":
raise HTTPException(status_code=403, detail="Internal endpoint only")
try:
recipe_service = RecipeService(db)
recipes = await recipe_service.search_recipes(tenant_id, limit=10000)
count = len(recipes)
return {"count": count}
except Exception as e:
logger.error(f"Error getting recipe count: {e}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -18,9 +18,11 @@ from ..schemas.recipes import (
)
from shared.routing import RouteBuilder, RouteCategory
from shared.auth.access_control import require_user_role
from shared.security import create_audit_logger, AuditSeverity, AuditAction
route_builder = RouteBuilder('recipes')
logger = logging.getLogger(__name__)
audit_logger = create_audit_logger("recipes-service")
router = APIRouter(tags=["recipes"])
@@ -193,9 +195,10 @@ async def update_recipe(
async def delete_recipe(
tenant_id: UUID,
recipe_id: UUID,
user_id: UUID = Depends(get_user_id),
db: AsyncSession = Depends(get_db)
):
"""Delete a recipe"""
"""Delete a recipe (Admin+ only)"""
try:
recipe_service = RecipeService(db)
@@ -206,10 +209,43 @@ async def delete_recipe(
if existing_recipe["tenant_id"] != str(tenant_id):
raise HTTPException(status_code=403, detail="Access denied")
# Capture recipe data before deletion
recipe_data = {
"recipe_name": existing_recipe.get("name"),
"category": existing_recipe.get("category"),
"difficulty_level": existing_recipe.get("difficulty_level"),
"ingredient_count": len(existing_recipe.get("ingredients", []))
}
success = await recipe_service.delete_recipe(recipe_id)
if not success:
raise HTTPException(status_code=404, detail="Recipe not found")
# Log audit event for recipe deletion
try:
# Get sync db for audit logging
from ..core.database import SessionLocal
sync_db = SessionLocal()
try:
await audit_logger.log_deletion(
db_session=sync_db,
tenant_id=str(tenant_id),
user_id=str(user_id),
resource_type="recipe",
resource_id=str(recipe_id),
resource_data=recipe_data,
description=f"Admin deleted recipe {recipe_data['recipe_name']}",
endpoint=f"/recipes/{recipe_id}",
method="DELETE"
)
sync_db.commit()
finally:
sync_db.close()
except Exception as audit_error:
logger.warning(f"Failed to log audit event: {audit_error}")
logger.info(f"Deleted recipe {recipe_id} by user {user_id}")
return {"message": "Recipe deleted successfully"}
except HTTPException:

View File

@@ -1,3 +1,10 @@
# Import AuditLog model for this service
from shared.security import create_audit_log_model
from shared.database.base import Base
# Create audit log model for this service
AuditLog = create_audit_log_model(Base)
# services/recipes/app/models/__init__.py
from .recipes import (
@@ -21,5 +28,6 @@ __all__ = [
"RecipeStatus",
"ProductionStatus",
"MeasurementUnit",
"ProductionPriority"
"ProductionPriority",
"AuditLog"
]

View File

@@ -1,8 +1,8 @@
"""initial_schema_20251009_2038
"""initial_schema_20251015_1228
Revision ID: a89b48099599
Revision ID: 3c4d0f57a312
Revises:
Create Date: 2025-10-09 20:38:32.626427+02:00
Create Date: 2025-10-15 12:28:57.066635+02:00
"""
from typing import Sequence, Union
@@ -12,7 +12,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'a89b48099599'
revision: str = '3c4d0f57a312'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('production_schedules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
@@ -285,4 +317,18 @@ def downgrade() -> None:
op.drop_index('idx_production_schedules_published', table_name='production_schedules')
op.drop_index('idx_production_schedules_completed', table_name='production_schedules')
op.drop_table('production_schedules')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###