Improve the frontend modals

This commit is contained in:
Urtzi Alfaro
2025-10-27 16:33:26 +01:00
parent 61376b7a9f
commit 858d985c92
143 changed files with 9289 additions and 2306 deletions

View File

@@ -10,7 +10,6 @@ from datetime import datetime, timezone
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.admin_delete import AdminUserDeleteService
from app.models.users import User
@@ -21,7 +20,6 @@ import httpx
logger = structlog.get_logger()
router = APIRouter()
route_builder = RouteBuilder('auth')
class AccountDeletionRequest(BaseModel):
@@ -39,7 +37,7 @@ class DeletionScheduleResponse(BaseModel):
grace_period_days: int = 30
@router.post("/api/v1/users/me/delete/request")
@router.delete("/api/v1/auth/me/account")
async def request_account_deletion(
deletion_request: AccountDeletionRequest,
request: Request,
@@ -62,7 +60,7 @@ async def request_account_deletion(
- Current password verification
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
user_email = current_user.get("email")
if deletion_request.confirm_email.lower() != user_email.lower():
@@ -149,7 +147,7 @@ async def request_account_deletion(
except Exception as e:
logger.error(
"account_deletion_failed",
user_id=current_user.get("sub"),
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(
@@ -158,7 +156,7 @@ async def request_account_deletion(
)
@router.get("/api/v1/users/me/delete/info")
@router.get("/api/v1/auth/me/account/deletion-info")
async def get_deletion_info(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
@@ -170,7 +168,7 @@ async def get_deletion_info(
account deletion. Transparency requirement under GDPR.
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
deletion_service = AdminUserDeleteService(db)
preview = await deletion_service.preview_user_deletion(str(user_id))
@@ -207,7 +205,7 @@ async def get_deletion_info(
except Exception as e:
logger.error(
"deletion_info_failed",
user_id=current_user.get("sub"),
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(

View File

@@ -5,6 +5,8 @@ Business logic for login, register, token refresh, password reset, and email ver
from fastapi import APIRouter, Depends, HTTPException, status, Request
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.ext.asyncio import AsyncSession
from typing import Dict, Any
import structlog
from app.schemas.auth import (
@@ -12,16 +14,17 @@ from app.schemas.auth import (
PasswordChange, PasswordReset, UserResponse
)
from app.services.auth_service import EnhancedAuthService
from app.models.users import User
from app.core.database import get_db
from shared.database.base import create_database_manager
from shared.monitoring.decorators import track_execution_time
from shared.monitoring.metrics import get_metrics_collector
from shared.routing import RouteBuilder
from shared.auth.decorators import get_current_user_dep
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(tags=["auth-operations"])
security = HTTPBearer()
route_builder = RouteBuilder('auth')
def get_auth_service():
@@ -30,7 +33,7 @@ def get_auth_service():
return EnhancedAuthService(database_manager)
@router.post(route_builder.build_base_route("register", include_tenant_prefix=False), response_model=TokenResponse)
@router.post("/api/v1/auth/register", response_model=TokenResponse)
@track_execution_time("enhanced_registration_duration_seconds", "auth-service")
async def register(
user_data: UserRegistration,
@@ -100,7 +103,7 @@ async def register(
)
@router.post(route_builder.build_base_route("login", include_tenant_prefix=False), response_model=TokenResponse)
@router.post("/api/v1/auth/login", response_model=TokenResponse)
@track_execution_time("enhanced_login_duration_seconds", "auth-service")
async def login(
login_data: UserLogin,
@@ -164,7 +167,7 @@ async def login(
)
@router.post(route_builder.build_base_route("refresh", include_tenant_prefix=False))
@router.post("/api/v1/auth/refresh")
@track_execution_time("enhanced_token_refresh_duration_seconds", "auth-service")
async def refresh_token(
refresh_data: RefreshTokenRequest,
@@ -201,7 +204,7 @@ async def refresh_token(
)
@router.post(route_builder.build_base_route("verify", include_tenant_prefix=False))
@router.post("/api/v1/auth/verify")
@track_execution_time("enhanced_token_verify_duration_seconds", "auth-service")
async def verify_token(
credentials: HTTPAuthorizationCredentials = Depends(security),
@@ -249,7 +252,7 @@ async def verify_token(
)
@router.post(route_builder.build_base_route("logout", include_tenant_prefix=False))
@router.post("/api/v1/auth/logout")
@track_execution_time("enhanced_logout_duration_seconds", "auth-service")
async def logout(
refresh_data: RefreshTokenRequest,
@@ -295,7 +298,7 @@ async def logout(
)
@router.post(route_builder.build_base_route("change-password", include_tenant_prefix=False))
@router.post("/api/v1/auth/change-password")
async def change_password(
password_data: PasswordChange,
credentials: HTTPAuthorizationCredentials = Depends(security),
@@ -358,98 +361,116 @@ async def change_password(
)
@router.get(route_builder.build_base_route("profile", include_tenant_prefix=False), response_model=UserResponse)
@router.get("/api/v1/auth/me", response_model=UserResponse)
async def get_profile(
credentials: HTTPAuthorizationCredentials = Depends(security),
auth_service: EnhancedAuthService = Depends(get_auth_service)
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get user profile using repository pattern"""
"""Get user profile - works for JWT auth AND demo sessions"""
try:
if not credentials:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Authentication required"
)
# Verify token and get user_id
payload = await auth_service.verify_user_token(credentials.credentials)
user_id = payload.get("user_id")
user_id = current_user.get("user_id")
if not user_id:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid token"
detail="Invalid user context"
)
# Get user profile using enhanced service
profile = await auth_service.get_user_profile(user_id)
if not profile:
# Fetch user from database
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
user = await user_repo.get_by_id(user_id)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User profile not found"
)
return profile
return UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
phone=user.phone,
language=user.language or "es",
timezone=user.timezone or "Europe/Madrid",
created_at=user.created_at,
last_login=user.last_login,
role=user.role,
tenant_id=current_user.get("tenant_id")
)
except HTTPException:
raise
except Exception as e:
logger.error("Get profile error using repository pattern", error=str(e))
logger.error("Get profile error", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get profile"
)
@router.put(route_builder.build_base_route("profile", include_tenant_prefix=False), response_model=UserResponse)
@router.put("/api/v1/auth/me", response_model=UserResponse)
async def update_profile(
update_data: dict,
credentials: HTTPAuthorizationCredentials = Depends(security),
auth_service: EnhancedAuthService = Depends(get_auth_service)
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Update user profile using repository pattern"""
"""Update user profile - works for JWT auth AND demo sessions"""
try:
if not credentials:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Authentication required"
)
# Verify token and get user_id
payload = await auth_service.verify_user_token(credentials.credentials)
user_id = payload.get("user_id")
user_id = current_user.get("user_id")
if not user_id:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid token"
detail="Invalid user context"
)
# Update profile using enhanced service
updated_profile = await auth_service.update_user_profile(user_id, update_data)
if not updated_profile:
# Prepare update data - filter out read-only fields
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
# Update user profile
updated_user = await user_repo.update(user_id, update_data)
if not updated_user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
logger.info("Profile updated using repository pattern",
logger.info("Profile updated",
user_id=user_id,
updated_fields=list(update_data.keys()))
return updated_profile
return UserResponse(
id=str(updated_user.id),
email=updated_user.email,
full_name=updated_user.full_name,
is_active=updated_user.is_active,
is_verified=updated_user.is_verified,
phone=updated_user.phone,
language=updated_user.language,
timezone=updated_user.timezone,
created_at=updated_user.created_at,
last_login=updated_user.last_login,
role=updated_user.role,
tenant_id=current_user.get("tenant_id")
)
except HTTPException:
raise
except Exception as e:
logger.error("Update profile error using repository pattern", error=str(e))
logger.error("Update profile error", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update profile"
)
@router.post(route_builder.build_base_route("verify-email", include_tenant_prefix=False))
@router.post("/api/v1/auth/verify-email")
async def verify_email(
user_id: str,
verification_token: str,
@@ -473,7 +494,7 @@ async def verify_email(
)
@router.post(route_builder.build_base_route("reset-password", include_tenant_prefix=False))
@router.post("/api/v1/auth/reset-password")
async def reset_password(
reset_data: PasswordReset,
request: Request,
@@ -504,7 +525,7 @@ async def reset_password(
)
@router.get(route_builder.build_base_route("health", include_tenant_prefix=False))
@router.get("/api/v1/auth/health")
async def health_check():
"""Health check endpoint for enhanced auth service"""
return {

View File

@@ -59,7 +59,7 @@ def hash_text(text: str) -> str:
return hashlib.sha256(text.encode()).hexdigest()
@router.post("/consent", response_model=ConsentResponse, status_code=status.HTTP_201_CREATED)
@router.post("/api/v1/auth/me/consent", response_model=ConsentResponse, status_code=status.HTTP_201_CREATED)
async def record_consent(
consent_data: ConsentRequest,
request: Request,
@@ -71,7 +71,7 @@ async def record_consent(
GDPR Article 7 - Conditions for consent
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
ip_address = request.client.host if request.client else None
user_agent = request.headers.get("user-agent")
@@ -129,14 +129,14 @@ async def record_consent(
except Exception as e:
await db.rollback()
logger.error("error_recording_consent", error=str(e), user_id=current_user.get("sub"))
logger.error("error_recording_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to record consent"
)
@router.get("/consent/current", response_model=Optional[ConsentResponse])
@router.get("/api/v1/auth/me/consent/current", response_model=Optional[ConsentResponse])
async def get_current_consent(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
@@ -145,7 +145,7 @@ async def get_current_consent(
Get current active consent for user
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
query = select(UserConsent).where(
and_(
@@ -174,14 +174,14 @@ async def get_current_consent(
)
except Exception as e:
logger.error("error_getting_consent", error=str(e), user_id=current_user.get("sub"))
logger.error("error_getting_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve consent"
)
@router.get("/consent/history", response_model=List[ConsentHistoryResponse])
@router.get("/api/v1/auth/me/consent/history", response_model=List[ConsentHistoryResponse])
async def get_consent_history(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
@@ -191,7 +191,7 @@ async def get_consent_history(
GDPR Article 7(1) - Demonstrating consent
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
query = select(ConsentHistory).where(
ConsentHistory.user_id == user_id
@@ -212,14 +212,14 @@ async def get_consent_history(
]
except Exception as e:
logger.error("error_getting_consent_history", error=str(e), user_id=current_user.get("sub"))
logger.error("error_getting_consent_history", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve consent history"
)
@router.put("/consent", response_model=ConsentResponse)
@router.put("/api/v1/auth/me/consent", response_model=ConsentResponse)
async def update_consent(
consent_data: ConsentRequest,
request: Request,
@@ -231,7 +231,7 @@ async def update_consent(
GDPR Article 7(3) - Withdrawal of consent
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
query = select(UserConsent).where(
and_(
@@ -309,14 +309,14 @@ async def update_consent(
except Exception as e:
await db.rollback()
logger.error("error_updating_consent", error=str(e), user_id=current_user.get("sub"))
logger.error("error_updating_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update consent"
)
@router.post("/consent/withdraw", status_code=status.HTTP_200_OK)
@router.post("/api/v1/auth/me/consent/withdraw", status_code=status.HTTP_200_OK)
async def withdraw_consent(
request: Request,
current_user: dict = Depends(get_current_user_dep),
@@ -327,7 +327,7 @@ async def withdraw_consent(
GDPR Article 7(3) - Right to withdraw consent
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
query = select(UserConsent).where(
and_(
@@ -365,7 +365,7 @@ async def withdraw_consent(
except Exception as e:
await db.rollback()
logger.error("error_withdrawing_consent", error=str(e), user_id=current_user.get("sub"))
logger.error("error_withdrawing_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to withdraw consent"

View File

@@ -9,17 +9,15 @@ from fastapi.responses import JSONResponse
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.data_export_service import DataExportService
logger = structlog.get_logger()
router = APIRouter()
route_builder = RouteBuilder('auth')
@router.get("/api/v1/users/me/export")
@router.get("/api/v1/auth/me/export")
async def export_my_data(
current_user: dict = Depends(get_current_user_dep),
db = Depends(get_db)
@@ -40,7 +38,7 @@ async def export_my_data(
Response is provided in JSON format for easy data portability.
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
export_service = DataExportService(db)
data = await export_service.export_user_data(user_id)
@@ -63,7 +61,7 @@ async def export_my_data(
except Exception as e:
logger.error(
"data_export_failed",
user_id=current_user.get("sub"),
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(
@@ -72,7 +70,7 @@ async def export_my_data(
)
@router.get("/api/v1/users/me/export/summary")
@router.get("/api/v1/auth/me/export/summary")
async def get_export_summary(
current_user: dict = Depends(get_current_user_dep),
db = Depends(get_db)
@@ -84,7 +82,7 @@ async def get_export_summary(
before they request full export.
"""
try:
user_id = UUID(current_user["sub"])
user_id = UUID(current_user["user_id"])
export_service = DataExportService(db)
data = await export_service.export_user_data(user_id)
@@ -114,7 +112,7 @@ async def get_export_summary(
except Exception as e:
logger.error(
"export_summary_failed",
user_id=current_user.get("sub"),
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(

View File

@@ -13,11 +13,9 @@ from app.core.database import get_db
from app.services.user_service import UserService
from app.repositories.onboarding_repository import OnboardingRepository
from shared.auth.decorators import get_current_user_dep
from shared.routing import RouteBuilder
logger = structlog.get_logger()
router = APIRouter(tags=["onboarding"])
route_builder = RouteBuilder('auth')
# Request/Response Models
class OnboardingStepStatus(BaseModel):
@@ -356,7 +354,7 @@ class OnboardingService:
# API Routes
@router.get(route_builder.build_base_route("me/onboarding/progress", include_tenant_prefix=False), response_model=UserProgress)
@router.get("/api/v1/auth/me/onboarding/progress", response_model=UserProgress)
async def get_user_progress(
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
@@ -375,7 +373,7 @@ async def get_user_progress(
detail="Failed to get onboarding progress"
)
@router.get(route_builder.build_base_route("{user_id}/onboarding/progress", include_tenant_prefix=False), response_model=UserProgress)
@router.get("/api/v1/auth/users/{user_id}/onboarding/progress", response_model=UserProgress)
async def get_user_progress_by_id(
user_id: str,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
@@ -408,7 +406,7 @@ async def get_user_progress_by_id(
detail="Failed to get onboarding progress"
)
@router.put(route_builder.build_base_route("me/onboarding/step", include_tenant_prefix=False), response_model=UserProgress)
@router.put("/api/v1/auth/me/onboarding/step", response_model=UserProgress)
async def update_onboarding_step(
update_request: UpdateStepRequest,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
@@ -433,7 +431,7 @@ async def update_onboarding_step(
detail="Failed to update onboarding step"
)
@router.get(route_builder.build_base_route("me/onboarding/next-step", include_tenant_prefix=False))
@router.get("/api/v1/auth/me/onboarding/next-step")
async def get_next_step(
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
@@ -452,7 +450,7 @@ async def get_next_step(
detail="Failed to get next step"
)
@router.get(route_builder.build_base_route("me/onboarding/can-access/{step_name}", include_tenant_prefix=False))
@router.get("/api/v1/auth/me/onboarding/can-access/{step_name}")
async def can_access_step(
step_name: str,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
@@ -475,7 +473,7 @@ async def can_access_step(
detail="Failed to check step access"
)
@router.post(route_builder.build_base_route("me/onboarding/complete", include_tenant_prefix=False))
@router.post("/api/v1/auth/me/onboarding/complete")
async def complete_onboarding(
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)

View File

@@ -12,7 +12,7 @@ from datetime import datetime, timezone
from app.core.database import get_db, get_background_db_session
from app.schemas.auth import UserResponse, PasswordChange
from app.schemas.users import UserUpdate, BatchUserRequest, OwnerUserCreate
from app.services.user_service import UserService
from app.services.user_service import UserService, EnhancedUserService
from app.models.users import User
from sqlalchemy.ext.asyncio import AsyncSession
@@ -24,133 +24,15 @@ from shared.auth.decorators import (
get_current_user_dep,
require_admin_role_dep
)
from shared.routing import RouteBuilder
from shared.security import create_audit_logger, AuditSeverity, AuditAction
logger = structlog.get_logger()
router = APIRouter(tags=["users"])
route_builder = RouteBuilder('auth')
# Initialize audit logger
audit_logger = create_audit_logger("auth-service")
@router.get(route_builder.build_base_route("me", include_tenant_prefix=False), response_model=UserResponse)
async def get_current_user_info(
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get current user information - FIXED VERSION"""
try:
logger.debug(f"Getting user info for: {current_user}")
# Handle both User object (direct auth) and dict (from gateway headers)
if isinstance(current_user, dict):
# Coming from gateway headers - need to fetch user from DB
user_id = current_user.get("user_id")
if not user_id:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid user context"
)
# ✅ FIX: Fetch full user from database to get the real role
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
user = await user_repo.get_by_id(user_id)
logger.debug(f"Fetched user from DB - Role: {user.role}, Email: {user.email}")
# ✅ FIX: Return role from database, not from JWT headers
return UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
phone=user.phone,
language=user.language or "es",
timezone=user.timezone or "Europe/Madrid",
created_at=user.created_at,
last_login=user.last_login,
role=user.role, # ✅ CRITICAL: Use role from database, not headers
tenant_id=current_user.get("tenant_id")
)
else:
# Direct User object (shouldn't happen in microservice architecture)
logger.debug(f"Direct user object received - Role: {current_user.role}")
return UserResponse(
id=str(current_user.id),
email=current_user.email,
full_name=current_user.full_name,
is_active=current_user.is_active,
is_verified=current_user.is_verified,
phone=current_user.phone,
language=current_user.language or "es",
timezone=current_user.timezone or "Europe/Madrid",
created_at=current_user.created_at,
last_login=current_user.last_login,
role=current_user.role, # ✅ Use role from database
tenant_id=None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Get user info error: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get user information"
)
@router.put(route_builder.build_base_route("me", include_tenant_prefix=False), response_model=UserResponse)
async def update_current_user(
user_update: UserUpdate,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Update current user information"""
try:
user_id = current_user.get("user_id") if isinstance(current_user, dict) else current_user.id
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
# Prepare update data
update_data = {}
if user_update.full_name is not None:
update_data["full_name"] = user_update.full_name
if user_update.phone is not None:
update_data["phone"] = user_update.phone
if user_update.language is not None:
update_data["language"] = user_update.language
if user_update.timezone is not None:
update_data["timezone"] = user_update.timezone
updated_user = await user_repo.update(user_id, update_data)
return UserResponse(
id=str(updated_user.id),
email=updated_user.email,
full_name=updated_user.full_name,
is_active=updated_user.is_active,
is_verified=updated_user.is_verified,
phone=updated_user.phone,
language=updated_user.language,
timezone=updated_user.timezone,
created_at=updated_user.created_at,
last_login=updated_user.last_login,
role=updated_user.role, # ✅ Include role
tenant_id=current_user.get("tenant_id") if isinstance(current_user, dict) else None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Update user error: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update user"
)
@router.delete(route_builder.build_base_route("delete/{user_id}", include_tenant_prefix=False))
@router.delete("/api/v1/auth/users/{user_id}")
async def delete_admin_user(
background_tasks: BackgroundTasks,
user_id: str = Path(..., description="User ID"),
@@ -244,7 +126,7 @@ async def execute_admin_user_deletion(user_id: str, requesting_user_id: str):
result=result)
@router.get(route_builder.build_base_route("delete/{user_id}/deletion-preview", include_tenant_prefix=False))
@router.get("/api/v1/auth/users/{user_id}/deletion-preview")
async def preview_user_deletion(
user_id: str = Path(..., description="User ID"),
db: AsyncSession = Depends(get_db)
@@ -294,7 +176,7 @@ async def preview_user_deletion(
return preview
@router.get(route_builder.build_base_route("users/{user_id}", include_tenant_prefix=False), response_model=UserResponse)
@router.get("/api/v1/auth/users/{user_id}", response_model=UserResponse)
async def get_user_by_id(
user_id: str = Path(..., description="User ID"),
db: AsyncSession = Depends(get_db)
@@ -353,7 +235,7 @@ async def get_user_by_id(
)
@router.post(route_builder.build_base_route("users/create-by-owner", include_tenant_prefix=False), response_model=UserResponse)
@router.post("/api/v1/auth/users/create-by-owner", response_model=UserResponse)
async def create_user_by_owner(
user_data: OwnerUserCreate,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
@@ -448,7 +330,7 @@ async def create_user_by_owner(
)
@router.post(route_builder.build_base_route("users/batch", include_tenant_prefix=False), response_model=Dict[str, Any])
@router.post("/api/v1/auth/users/batch", response_model=Dict[str, Any])
async def get_users_batch(
request: BatchUserRequest,
db: AsyncSession = Depends(get_db)
@@ -526,3 +408,75 @@ async def get_users_batch(
detail="Failed to fetch users"
)
@router.get("/api/v1/auth/users/{user_id}/activity")
async def get_user_activity(
user_id: str = Path(..., description="User ID"),
current_user = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get user activity information.
This endpoint returns detailed activity information for a user including:
- Last login timestamp
- Account creation date
- Active session count
- Last activity timestamp
- User status information
**Permissions:** User can view their own activity, admins can view any user's activity
"""
try:
# Validate UUID format
try:
uuid.UUID(user_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid user ID format"
)
# Check permissions - user can view their own activity, admins can view any
if current_user["user_id"] != user_id:
# Check if current user has admin privileges
user_role = current_user.get("role", "user")
if user_role not in ["admin", "super_admin", "manager"]:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Insufficient permissions to view this user's activity"
)
# Initialize enhanced user service
from app.core.config import settings
from shared.database.base import create_database_manager
database_manager = create_database_manager(settings.DATABASE_URL, "tenant-service")
user_service = EnhancedUserService(database_manager)
# Get user activity data
activity_data = await user_service.get_user_activity(user_id)
if "error" in activity_data:
if activity_data["error"] == "User not found":
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
else:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to get user activity: {activity_data['error']}"
)
logger.debug("Retrieved user activity", user_id=user_id)
return activity_data
except HTTPException:
raise
except Exception as e:
logger.error("Get user activity error", user_id=user_id, error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get user activity information"
)

View File

@@ -86,12 +86,12 @@ class DataExportService:
active_sessions = []
for token in tokens:
if token.expires_at > datetime.now(timezone.utc) and not token.revoked:
if token.expires_at > datetime.now(timezone.utc) and not token.is_revoked:
active_sessions.append({
"token_id": str(token.id),
"created_at": token.created_at.isoformat() if token.created_at else None,
"expires_at": token.expires_at.isoformat() if token.expires_at else None,
"device_info": token.device_info
"is_revoked": token.is_revoked
})
return {
@@ -118,9 +118,22 @@ class DataExportService:
async def _export_security_data(self, user_id: UUID) -> Dict[str, Any]:
"""Export security-related data"""
# First get user email
user_query = select(User).where(User.id == user_id)
user_result = await self.db.execute(user_query)
user = user_result.scalar_one_or_none()
if not user:
return {
"recent_login_attempts": [],
"total_attempts_exported": 0,
"note": "User not found"
}
# LoginAttempt uses email, not user_id
query = select(LoginAttempt).where(
LoginAttempt.user_id == user_id
).order_by(LoginAttempt.attempted_at.desc()).limit(50)
LoginAttempt.email == user.email
).order_by(LoginAttempt.created_at.desc()).limit(50)
result = await self.db.execute(query)
attempts = result.scalars().all()
@@ -128,7 +141,7 @@ class DataExportService:
login_attempts = []
for attempt in attempts:
login_attempts.append({
"attempted_at": attempt.attempted_at.isoformat() if attempt.attempted_at else None,
"attempted_at": attempt.created_at.isoformat() if attempt.created_at else None,
"success": attempt.success,
"ip_address": attempt.ip_address,
"user_agent": attempt.user_agent,

View File

@@ -463,7 +463,7 @@ class EnhancedUserService:
return {"error": "User not found"}
# Get token activity
active_tokens = await token_repo.get_user_active_tokens(user_id)
active_tokens = await token_repo.get_active_tokens_for_user(user_id)
return {
"user_id": user_id,
@@ -483,4 +483,4 @@ class EnhancedUserService:
# Legacy compatibility - alias EnhancedUserService as UserService
UserService = EnhancedUserService
UserService = EnhancedUserService

View File

@@ -398,11 +398,80 @@ class IngredientRepository(BaseRepository[Ingredient, IngredientCreate, Ingredie
from app.schemas.inventory import IngredientUpdate
update_data = IngredientUpdate(last_purchase_price=price)
return await self.update(ingredient_id, update_data)
except Exception as e:
logger.error("Failed to update last purchase price", error=str(e), ingredient_id=ingredient_id)
raise
async def update_weighted_average_cost(
self,
ingredient_id: UUID,
current_stock_quantity: float,
new_purchase_quantity: float,
new_unit_cost: float
) -> Optional[Ingredient]:
"""
Update the average cost using weighted average calculation.
Formula:
new_average_cost = (current_stock_qty × current_avg_cost + new_qty × new_cost) / (current_stock_qty + new_qty)
Args:
ingredient_id: ID of the ingredient
current_stock_quantity: Current stock quantity before this purchase
new_purchase_quantity: Quantity being purchased
new_unit_cost: Unit cost of the new purchase
Returns:
Updated ingredient or None if not found
"""
try:
# Get current ingredient data
ingredient = await self.get_by_id(ingredient_id)
if not ingredient:
logger.warning("Ingredient not found for average cost update", ingredient_id=ingredient_id)
return None
from decimal import Decimal
# Get current average cost (default to new cost if not set)
current_avg_cost = float(ingredient.average_cost) if ingredient.average_cost else float(new_unit_cost)
# Calculate weighted average
# If no current stock, just use the new purchase price
if current_stock_quantity <= 0:
new_average_cost = Decimal(str(new_unit_cost))
else:
# Weighted average formula
total_cost = (current_stock_quantity * current_avg_cost) + (new_purchase_quantity * new_unit_cost)
total_quantity = current_stock_quantity + new_purchase_quantity
new_average_cost = Decimal(str(total_cost / total_quantity))
# Update the ingredient
from app.schemas.inventory import IngredientUpdate
update_data = IngredientUpdate(average_cost=new_average_cost)
updated_ingredient = await self.update(ingredient_id, update_data)
logger.info(
"Updated weighted average cost",
ingredient_id=ingredient_id,
old_average_cost=current_avg_cost,
new_average_cost=float(new_average_cost),
current_stock_qty=current_stock_quantity,
new_purchase_qty=new_purchase_quantity,
new_unit_cost=new_unit_cost
)
return updated_ingredient
except Exception as e:
logger.error(
"Failed to update weighted average cost",
error=str(e),
ingredient_id=ingredient_id
)
raise
async def get_ingredients_by_category(self, tenant_id: UUID, category: str) -> List[Ingredient]:
"""Get all ingredients in a specific category"""
try:

View File

@@ -28,7 +28,9 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
self,
movement_data: StockMovementCreate,
tenant_id: UUID,
created_by: Optional[UUID] = None
created_by: Optional[UUID] = None,
quantity_before: Optional[float] = None,
quantity_after: Optional[float] = None
) -> StockMovement:
"""Create a new stock movement record"""
try:
@@ -37,6 +39,12 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
create_data['tenant_id'] = tenant_id
create_data['created_by'] = created_by
# Add quantity_before and quantity_after if provided
if quantity_before is not None:
create_data['quantity_before'] = quantity_before
if quantity_after is not None:
create_data['quantity_after'] = quantity_after
# Ensure movement_type is properly converted to enum value
if 'movement_type' in create_data:
movement_type = create_data['movement_type']
@@ -65,6 +73,8 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
ingredient_id=record.ingredient_id,
movement_type=record.movement_type if record.movement_type else None,
quantity=record.quantity,
quantity_before=record.quantity_before,
quantity_after=record.quantity_after,
tenant_id=tenant_id
)
return record
@@ -453,7 +463,7 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
# Generate reference number
reference_number = f"AUTO-EXPIRE-{batch_number or stock_id}"
# Create movement data
# Create movement data (without quantity_before/quantity_after - these will be calculated by the caller)
movement_data = {
'tenant_id': tenant_id,
'ingredient_id': ingredient_id,
@@ -462,8 +472,6 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
'quantity': quantity,
'unit_cost': Decimal(str(unit_cost)) if unit_cost else None,
'total_cost': total_cost,
'quantity_before': quantity,
'quantity_after': 0,
'reference_number': reference_number,
'reason_code': 'expired',
'notes': f"Lote automáticamente marcado como caducado. Vencimiento: {expiration_date.strftime('%Y-%m-%d')}",
@@ -536,4 +544,4 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
except Exception as e:
logger.error("Failed to get inventory waste total", error=str(e), tenant_id=str(tenant_id))
raise
raise

View File

@@ -43,10 +43,10 @@ class IngredientCreate(InventoryBaseSchema):
brand: Optional[str] = Field(None, max_length=100, description="Brand name")
unit_of_measure: UnitOfMeasure = Field(..., description="Unit of measure")
package_size: Optional[float] = Field(None, gt=0, description="Package size")
# Pricing
average_cost: Optional[Decimal] = Field(None, ge=0, description="Average cost per unit")
standard_cost: Optional[Decimal] = Field(None, ge=0, description="Standard cost per unit")
# Note: average_cost is calculated automatically from purchases (not set on create)
standard_cost: Optional[Decimal] = Field(None, ge=0, description="Standard/target cost per unit for budgeting")
# Stock management
low_stock_threshold: float = Field(10.0, ge=0, description="Low stock alert threshold")
@@ -187,6 +187,13 @@ class StockCreate(InventoryBaseSchema):
shelf_life_days: Optional[int] = Field(None, gt=0, description="Batch-specific shelf life in days")
storage_instructions: Optional[str] = Field(None, description="Batch-specific storage instructions")
@validator('supplier_id', pre=True)
def validate_supplier_id(cls, v):
"""Convert empty string to None for optional UUID field"""
if v == '' or (isinstance(v, str) and v.strip() == ''):
return None
return v
@validator('storage_temperature_max')
def validate_temperature_range(cls, v, values):
min_temp = values.get('storage_temperature_min')
@@ -233,6 +240,13 @@ class StockUpdate(InventoryBaseSchema):
shelf_life_days: Optional[int] = Field(None, gt=0, description="Batch-specific shelf life in days")
storage_instructions: Optional[str] = Field(None, description="Batch-specific storage instructions")
@validator('supplier_id', pre=True)
def validate_supplier_id(cls, v):
"""Convert empty string to None for optional UUID field"""
if v == '' or (isinstance(v, str) and v.strip() == ''):
return None
return v
class StockResponse(InventoryBaseSchema):
"""Schema for stock API responses"""

View File

@@ -10,6 +10,7 @@ import uuid
from typing import List, Dict, Any, Optional
from uuid import UUID
from datetime import datetime, timedelta, timezone
from decimal import Decimal
import structlog
from apscheduler.triggers.cron import CronTrigger
from sqlalchemy import text
@@ -19,6 +20,8 @@ from shared.alerts.templates import format_item_message
from app.repositories.stock_repository import StockRepository
from app.repositories.stock_movement_repository import StockMovementRepository
from app.repositories.inventory_alert_repository import InventoryAlertRepository
from app.schemas.inventory import StockMovementCreate
from app.models.inventory import StockMovementType
logger = structlog.get_logger()
@@ -780,18 +783,35 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
# 1. Mark the stock batch as expired
await stock_repo.mark_batch_as_expired(stock.id, tenant_id)
# 2. Create waste stock movement
await movement_repo.create_automatic_waste_movement(
# 2. Get current stock level before this movement
current_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, stock.ingredient_id)
quantity_before = current_stock['total_available']
quantity_after = quantity_before - stock.current_quantity
# 3. Create waste stock movement with proper quantity tracking
await movement_repo.create_movement(
movement_data=StockMovementCreate(
tenant_id=tenant_id,
ingredient_id=stock.ingredient_id,
stock_id=stock.id,
movement_type=StockMovementType.WASTE,
quantity=stock.current_quantity,
unit_cost=Decimal(str(stock.unit_cost)) if stock.unit_cost else None,
quantity_before=quantity_before,
quantity_after=quantity_after,
reference_number=f"AUTO-EXPIRE-{stock.batch_number or stock.id}",
reason_code='expired',
notes=f"Lote automáticamente marcado como caducado. Vencimiento: {effective_expiration_date.strftime('%Y-%m-%d')}",
movement_date=datetime.now(),
created_by=None
),
tenant_id=tenant_id,
ingredient_id=stock.ingredient_id,
stock_id=stock.id,
quantity=stock.current_quantity,
unit_cost=float(stock.unit_cost) if stock.unit_cost else None,
batch_number=stock.batch_number,
expiration_date=effective_expiration_date,
created_by=None # Automatic system operation
created_by=None
)
# 4. Update the stock quantity to 0 (moved to waste)
await stock_repo.update_stock_to_zero(stock.id, tenant_id)
# 3. Update the stock quantity to 0 (moved to waste)
await stock_repo.update_stock_to_zero(stock.id, tenant_id)

View File

@@ -280,6 +280,11 @@ class InventoryService:
# Create stock entry
stock = await stock_repo.create_stock_entry(stock_data, tenant_id)
# Get current stock level before this movement
current_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, UUID(stock_data.ingredient_id))
quantity_before = current_stock['total_available']
quantity_after = quantity_before + stock_data.current_quantity
# Create stock movement record
movement_data = StockMovementCreate(
ingredient_id=stock_data.ingredient_id,
@@ -289,14 +294,22 @@ class InventoryService:
unit_cost=stock_data.unit_cost,
notes=f"Initial stock entry - Batch: {stock_data.batch_number or 'N/A'}"
)
await movement_repo.create_movement(movement_data, tenant_id, user_id)
# Update ingredient's last purchase price
await movement_repo.create_movement(movement_data, tenant_id, user_id, quantity_before, quantity_after)
# Update ingredient's last purchase price and weighted average cost
if stock_data.unit_cost:
await ingredient_repo.update_last_purchase_price(
UUID(stock_data.ingredient_id),
UUID(stock_data.ingredient_id),
float(stock_data.unit_cost)
)
# Calculate and update weighted average cost
await ingredient_repo.update_weighted_average_cost(
ingredient_id=UUID(stock_data.ingredient_id),
current_stock_quantity=quantity_before,
new_purchase_quantity=stock_data.current_quantity,
new_unit_cost=float(stock_data.unit_cost)
)
# Convert to response schema
response = StockResponse(**stock.to_dict())
@@ -333,19 +346,28 @@ class InventoryService:
# Reserve stock first
reservations = await stock_repo.reserve_stock(tenant_id, ingredient_id, quantity, fifo)
if not reservations:
raise ValueError("Insufficient stock available")
# Get current stock level before this consumption
current_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, ingredient_id)
running_stock_level = current_stock['total_available']
consumed_items = []
for reservation in reservations:
stock_id = UUID(reservation['stock_id'])
reserved_qty = reservation['reserved_quantity']
# Calculate before/after for this specific batch
batch_quantity_before = running_stock_level
batch_quantity_after = running_stock_level - reserved_qty
running_stock_level = batch_quantity_after # Update for next iteration
# Consume from reserved stock
consumed_stock = await stock_repo.consume_stock(stock_id, reserved_qty, from_reserved=True)
# Create movement record
# Create movement record with progressive tracking
movement_data = StockMovementCreate(
ingredient_id=str(ingredient_id),
stock_id=str(stock_id),
@@ -354,7 +376,7 @@ class InventoryService:
reference_number=reference_number,
notes=notes or f"Stock consumption - Batch: {reservation.get('batch_number', 'N/A')}"
)
await movement_repo.create_movement(movement_data, tenant_id, user_id)
await movement_repo.create_movement(movement_data, tenant_id, user_id, batch_quantity_before, batch_quantity_after)
consumed_items.append({
'stock_id': str(stock_id),
@@ -650,6 +672,187 @@ class InventoryService:
logger.error("Failed to get stock entries", error=str(e), tenant_id=tenant_id)
raise
async def create_stock_movement(
self,
movement_data: StockMovementCreate,
tenant_id: UUID,
user_id: Optional[UUID] = None
) -> StockMovementResponse:
"""Create a stock movement record with proper quantity tracking"""
try:
async with get_db_transaction() as db:
movement_repo = StockMovementRepository(db)
ingredient_repo = IngredientRepository(db)
stock_repo = StockRepository(db)
# Validate ingredient exists
ingredient = await ingredient_repo.get_by_id(UUID(movement_data.ingredient_id))
if not ingredient or ingredient.tenant_id != tenant_id:
raise ValueError("Ingredient not found")
# Get current stock level before this movement
current_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, UUID(movement_data.ingredient_id))
quantity_before = current_stock['total_available']
# Calculate quantity_after based on movement type
movement_quantity = movement_data.quantity or 0
if movement_data.movement_type in [StockMovementType.PURCHASE, StockMovementType.TRANSFORMATION, StockMovementType.INITIAL_STOCK]:
# These are additions to stock
quantity_after = quantity_before + movement_quantity
else:
# These are subtractions from stock (PRODUCTION_USE, WASTE, ADJUSTMENT)
quantity_after = quantity_before - movement_quantity
# Create stock movement record
movement = await movement_repo.create_movement(
movement_data,
tenant_id,
user_id,
quantity_before,
quantity_after
)
# Convert to response schema
response = StockMovementResponse(**movement.to_dict())
response.ingredient = IngredientResponse(**ingredient.to_dict())
logger.info(
"Stock movement created successfully",
movement_id=movement.id,
ingredient_id=movement.ingredient_id,
quantity=movement.quantity,
quantity_before=quantity_before,
quantity_after=quantity_after
)
return response
except Exception as e:
logger.error("Failed to create stock movement", error=str(e), tenant_id=tenant_id)
raise
async def get_stock_entry(
self,
stock_id: UUID,
tenant_id: UUID
) -> Optional[StockResponse]:
"""Get a single stock entry by ID"""
try:
async with get_db_transaction() as db:
stock_repo = StockRepository(db)
ingredient_repo = IngredientRepository(db)
# Get stock entry
stock = await stock_repo.get_by_id(stock_id)
# Check if stock exists and belongs to tenant
if not stock or stock.tenant_id != tenant_id:
return None
# Get ingredient information
ingredient = await ingredient_repo.get_by_id(stock.ingredient_id)
response = StockResponse(**stock.to_dict())
if ingredient:
ingredient_dict = ingredient.to_dict()
# Map category field based on product type
if ingredient.product_type and ingredient.product_type.value == 'finished_product':
ingredient_dict['category'] = ingredient.product_category.value if ingredient.product_category else None
else:
ingredient_dict['category'] = ingredient.ingredient_category.value if ingredient.ingredient_category else None
response.ingredient = IngredientResponse(**ingredient_dict)
return response
except Exception as e:
logger.error("Failed to get stock entry", error=str(e), stock_id=stock_id, tenant_id=tenant_id)
raise
async def update_stock(
self,
stock_id: UUID,
stock_data: StockUpdate,
tenant_id: UUID
) -> Optional[StockResponse]:
"""Update a stock entry"""
try:
async with get_db_transaction() as db:
stock_repo = StockRepository(db)
ingredient_repo = IngredientRepository(db)
# Check if stock exists and belongs to tenant
existing_stock = await stock_repo.get_by_id(stock_id)
if not existing_stock or existing_stock.tenant_id != tenant_id:
return None
# Prepare update data
update_data = stock_data.model_dump(exclude_unset=True)
# Recalculate available_quantity if current_quantity or reserved_quantity changed
if 'current_quantity' in update_data or 'reserved_quantity' in update_data:
current_qty = update_data.get('current_quantity', existing_stock.current_quantity)
reserved_qty = update_data.get('reserved_quantity', existing_stock.reserved_quantity)
update_data['available_quantity'] = max(0, current_qty - reserved_qty)
# Recalculate total cost if unit_cost or current_quantity changed
if 'unit_cost' in update_data or 'current_quantity' in update_data:
unit_cost = update_data.get('unit_cost', existing_stock.unit_cost)
current_qty = update_data.get('current_quantity', existing_stock.current_quantity)
if unit_cost is not None and current_qty is not None:
from decimal import Decimal
update_data['total_cost'] = Decimal(str(unit_cost)) * Decimal(str(current_qty))
# Update the stock entry
updated_stock = await stock_repo.update(stock_id, update_data)
if not updated_stock:
return None
# Get ingredient information
ingredient = await ingredient_repo.get_by_id(updated_stock.ingredient_id)
response = StockResponse(**updated_stock.to_dict())
if ingredient:
ingredient_dict = ingredient.to_dict()
# Map category field based on product type
if ingredient.product_type and ingredient.product_type.value == 'finished_product':
ingredient_dict['category'] = ingredient.product_category.value if ingredient.product_category else None
else:
ingredient_dict['category'] = ingredient.ingredient_category.value if ingredient.ingredient_category else None
response.ingredient = IngredientResponse(**ingredient_dict)
logger.info("Stock entry updated successfully", stock_id=stock_id, tenant_id=tenant_id)
return response
except Exception as e:
logger.error("Failed to update stock entry", error=str(e), stock_id=stock_id, tenant_id=tenant_id)
raise
async def delete_stock(
self,
stock_id: UUID,
tenant_id: UUID
) -> bool:
"""Delete a stock entry"""
try:
async with get_db_transaction() as db:
stock_repo = StockRepository(db)
# Check if stock exists and belongs to tenant
existing_stock = await stock_repo.get_by_id(stock_id)
if not existing_stock or existing_stock.tenant_id != tenant_id:
return False
# Delete the stock entry
success = await stock_repo.delete_by_id(stock_id)
if success:
logger.info("Stock entry deleted successfully", stock_id=stock_id, tenant_id=tenant_id)
return success
except Exception as e:
logger.error("Failed to delete stock entry", error=str(e), stock_id=stock_id, tenant_id=tenant_id)
raise
# ===== DELETION METHODS =====
async def hard_delete_ingredient(

View File

@@ -312,19 +312,41 @@ class SustainabilityService:
baseline = await self._get_baseline_waste(db, tenant_id)
current_waste_percentage = waste_data['waste_percentage']
# Ensure baseline is at least the industry average if not available
baseline_percentage = baseline.get('waste_percentage', EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100)
# If baseline is too low (less than 1%), use industry average to prevent calculation errors
if baseline_percentage < 1.0:
baseline_percentage = EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100
# Calculate reduction from baseline
# If current waste is higher than baseline, show negative reduction (worse than baseline)
# If current waste is lower than baseline, show positive reduction (better than baseline)
if baseline_percentage > 0:
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
else:
reduction_percentage = 0
# SDG 12.3 target is 50% reduction
sdg_target = baseline_percentage * (1 - EnvironmentalConstants.SDG_TARGET_REDUCTION)
progress_to_target = (reduction_percentage / (EnvironmentalConstants.SDG_TARGET_REDUCTION * 100)) * 100
# Calculate progress toward 50% reduction target
# The target is to achieve 50% reduction from baseline
# So if baseline is 25%, target is to reach 12.5% (25% * 0.5)
target_reduction_percentage = 50.0
target_waste_percentage = baseline_percentage * (1 - (target_reduction_percentage / 100))
# Calculate progress: how much of the 50% target has been achieved
# If we've reduced from 25% to 19.28%, we've achieved (25-19.28)/(25-12.5) = 5.72/12.5 = 45.8% of target
if baseline_percentage > target_waste_percentage:
max_possible_reduction = baseline_percentage - target_waste_percentage
actual_reduction = baseline_percentage - current_waste_percentage
progress_to_target = (actual_reduction / max_possible_reduction) * 100 if max_possible_reduction > 0 else 0
else:
# If current is already better than target
progress_to_target = 100.0 if current_waste_percentage <= target_waste_percentage else 0.0
# Status assessment
# Ensure progress doesn't exceed 100%
progress_to_target = min(progress_to_target, 100.0)
# Status assessment based on actual reduction achieved
if reduction_percentage >= 50:
status = 'sdg_compliant'
status_label = 'SDG 12.3 Compliant'
@@ -334,6 +356,12 @@ class SustainabilityService:
elif reduction_percentage >= 10:
status = 'progressing'
status_label = 'Making Progress'
elif reduction_percentage > 0:
status = 'improving'
status_label = 'Improving'
elif reduction_percentage < 0:
status = 'baseline'
status_label = 'Above Baseline'
else:
status = 'baseline'
status_label = 'Establishing Baseline'
@@ -343,11 +371,11 @@ class SustainabilityService:
'baseline_waste_percentage': round(baseline_percentage, 2),
'current_waste_percentage': round(current_waste_percentage, 2),
'reduction_achieved': round(reduction_percentage, 2),
'target_reduction': 50.0,
'progress_to_target': round(min(progress_to_target, 100), 1),
'target_reduction': target_reduction_percentage,
'progress_to_target': round(max(progress_to_target, 0), 1), # Ensure non-negative
'status': status,
'status_label': status_label,
'target_waste_percentage': round(sdg_target, 2)
'target_waste_percentage': round(target_waste_percentage, 2)
},
'baseline_period': baseline.get('period', 'industry_average'),
'certification_ready': reduction_percentage >= 50,

View File

@@ -81,16 +81,25 @@ class TransformationService:
source_reservations
)
# Consume source stock and create movements
# Get current stock level before source consumption
current_source_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, UUID(transformation_data.source_ingredient_id))
running_stock_level = current_source_stock['total_available']
# Consume source stock and create movements with progressive tracking
consumed_items = []
for reservation in source_reservations:
stock_id = UUID(reservation['stock_id'])
reserved_qty = reservation['reserved_quantity']
# Calculate before/after for this specific batch
batch_quantity_before = running_stock_level
batch_quantity_after = running_stock_level - reserved_qty
running_stock_level = batch_quantity_after # Update for next iteration
# Consume from reserved stock
await stock_repo.consume_stock(stock_id, reserved_qty, from_reserved=True)
# Create movement record
# Create movement record for source consumption with progressive tracking
movement_data = StockMovementCreate(
ingredient_id=transformation_data.source_ingredient_id,
stock_id=str(stock_id),
@@ -99,7 +108,7 @@ class TransformationService:
reference_number=transformation.transformation_reference,
notes=f"Transformation: {transformation_data.source_stage.value}{transformation_data.target_stage.value}"
)
await movement_repo.create_movement(movement_data, tenant_id, user_id)
await movement_repo.create_movement(movement_data, tenant_id, user_id, batch_quantity_before, batch_quantity_after)
consumed_items.append({
'stock_id': str(stock_id),
@@ -124,6 +133,11 @@ class TransformationService:
target_stock = await stock_repo.create_stock_entry(target_stock_data, tenant_id)
# Get current stock level before target addition
current_target_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, UUID(transformation_data.target_ingredient_id))
target_quantity_before = current_target_stock['total_available']
target_quantity_after = target_quantity_before + transformation_data.target_quantity
# Create target stock movement
target_movement_data = StockMovementCreate(
ingredient_id=transformation_data.target_ingredient_id,
@@ -133,7 +147,7 @@ class TransformationService:
reference_number=transformation.transformation_reference,
notes=f"Transformation result: {transformation_data.source_stage.value}{transformation_data.target_stage.value}"
)
await movement_repo.create_movement(target_movement_data, tenant_id, user_id)
await movement_repo.create_movement(target_movement_data, tenant_id, user_id, target_quantity_before, target_quantity_after)
# Convert to response schema
response = ProductTransformationResponse(**transformation.to_dict())
@@ -329,4 +343,4 @@ class TransformationService:
except Exception as e:
logger.error("Failed to get transformation summary", error=str(e), tenant_id=tenant_id)
raise
raise

View File

@@ -0,0 +1,148 @@
#!/usr/bin/env python3
"""
Test script to demonstrate and verify the weighted average cost calculation
Location: services/inventory/tests/test_weighted_average_cost.py
"""
import sys
from decimal import Decimal
def calculate_weighted_average(current_stock: float, current_avg_cost: float,
new_quantity: float, new_unit_cost: float) -> float:
"""
Calculate weighted average cost - mirrors the implementation in ingredient_repository.py
Args:
current_stock: Current stock quantity before purchase
current_avg_cost: Current average cost per unit
new_quantity: Quantity being purchased
new_unit_cost: Unit cost of new purchase
Returns:
New average cost per unit
"""
if current_stock <= 0:
return new_unit_cost
total_cost = (current_stock * current_avg_cost) + (new_quantity * new_unit_cost)
total_quantity = current_stock + new_quantity
return total_cost / total_quantity
def print_test_case(case_num: int, title: str, current_stock: float, current_avg_cost: float,
new_quantity: float, new_unit_cost: float):
"""Print a formatted test case with calculation details"""
print(f"\nTest Case {case_num}: {title}")
print("-" * 60)
print(f"Current Stock: {current_stock} kg @ €{current_avg_cost:.2f}/kg")
print(f"New Purchase: {new_quantity} kg @ €{new_unit_cost:.2f}/kg")
new_avg_cost = calculate_weighted_average(current_stock, current_avg_cost,
new_quantity, new_unit_cost)
if current_stock > 0:
total_cost = (current_stock * current_avg_cost) + (new_quantity * new_unit_cost)
total_quantity = current_stock + new_quantity
print(f"Calculation: ({current_stock} ×{current_avg_cost:.2f} + {new_quantity} ×{new_unit_cost:.2f}) / {total_quantity}")
print(f" = (€{current_stock * current_avg_cost:.2f} + €{new_quantity * new_unit_cost:.2f}) / {total_quantity}")
print(f" = €{total_cost:.2f} / {total_quantity}")
print(f"→ New Average Cost: €{new_avg_cost:.2f}/kg")
return new_avg_cost
def test_weighted_average_calculation():
"""Run comprehensive tests of the weighted average cost calculation"""
print("=" * 80)
print("WEIGHTED AVERAGE COST CALCULATION - COMPREHENSIVE TEST SUITE")
print("=" * 80)
# Test Case 1: First Purchase (Bootstrap case)
print_test_case(
1, "First Purchase (No Existing Stock)",
current_stock=0,
current_avg_cost=0,
new_quantity=100,
new_unit_cost=5.00
)
# Test Case 2: Same Price Purchase
print_test_case(
2, "Second Purchase at Same Price",
current_stock=100,
current_avg_cost=5.00,
new_quantity=50,
new_unit_cost=5.00
)
# Test Case 3: Price Increase
avg_cost = print_test_case(
3, "Purchase at Higher Price (Inflation)",
current_stock=150,
current_avg_cost=5.00,
new_quantity=50,
new_unit_cost=6.00
)
# Test Case 4: Large Volume Discount
avg_cost = print_test_case(
4, "Large Purchase with Volume Discount",
current_stock=200,
current_avg_cost=5.25,
new_quantity=200,
new_unit_cost=4.50
)
# Test Case 5: Small Purchase After Consumption
avg_cost = print_test_case(
5, "Purchase After Heavy Consumption",
current_stock=50,
current_avg_cost=4.88,
new_quantity=100,
new_unit_cost=5.50
)
# Test Case 6: Tiny Emergency Purchase
print_test_case(
6, "Small Emergency Purchase at Premium Price",
current_stock=150,
current_avg_cost=5.29,
new_quantity=10,
new_unit_cost=8.00
)
# Summary
print("\n" + "=" * 80)
print("KEY INSIGHTS")
print("=" * 80)
print("""
✓ The weighted average considers both QUANTITY and PRICE:
- Larger purchases have more impact on the average
- Smaller purchases have minimal impact
✓ Behavior with price changes:
- Price increases gradually raise the average (dampened by existing stock)
- Price decreases gradually lower the average (dampened by existing stock)
- Volume discounts can significantly lower costs when buying in bulk
✓ Business implications:
- Encourages bulk purchasing when prices are favorable
- Protects against price spike impacts (averaged over time)
- Provides accurate COGS for financial reporting
- Helps identify procurement opportunities (compare to standard_cost)
✓ Implementation notes:
- Calculation happens automatically on every stock addition
- No user intervention required
- Logged for audit purposes
- Works with FIFO stock consumption
""")
print("=" * 80)
print("✓ All tests completed successfully!")
print("=" * 80)
if __name__ == "__main__":
test_weighted_average_calculation()

View File

@@ -75,9 +75,6 @@ async def create_customer(
):
"""Create a new customer"""
try:
# Ensure tenant_id matches
customer_data.tenant_id = tenant_id
# Check if customer code already exists
existing_customer = await orders_service.customer_repo.get_by_customer_code(
db, customer_data.customer_code, tenant_id
@@ -88,12 +85,25 @@ async def create_customer(
detail="Customer code already exists"
)
# Extract user ID safely
user_id = current_user.get("user_id")
if not user_id:
logger.error("User ID not found in current_user context", current_user_keys=list(current_user.keys()))
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="User authentication error"
)
customer = await orders_service.customer_repo.create(
db,
obj_in=customer_data.dict(),
created_by=UUID(current_user["sub"])
obj_in=customer_data,
created_by=UUID(user_id),
tenant_id=tenant_id
)
# Commit the transaction to persist changes
await db.commit()
logger.info("Customer created successfully",
customer_id=str(customer.id),
customer_code=customer.customer_code)
@@ -202,13 +212,25 @@ async def update_customer(
)
# Update customer
# Extract user ID safely for update
user_id = current_user.get("user_id")
if not user_id:
logger.error("User ID not found in current_user context for update", current_user_keys=list(current_user.keys()))
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="User authentication error"
)
updated_customer = await orders_service.customer_repo.update(
db,
db_obj=customer,
obj_in=customer_data.dict(exclude_unset=True),
updated_by=UUID(current_user["sub"])
updated_by=UUID(user_id)
)
# Commit the transaction to persist changes
await db.commit()
logger.info("Customer updated successfully",
customer_id=str(customer_id))
@@ -262,6 +284,9 @@ async def delete_customer(
await orders_service.customer_repo.delete(db, customer_id, tenant_id)
# Commit the transaction to persist deletion
await db.commit()
# Log HIGH severity audit event for customer deletion (GDPR compliance)
try:
await audit_logger.log_deletion(

View File

@@ -76,15 +76,24 @@ async def create_order(
):
"""Create a new customer order"""
try:
# Ensure tenant_id matches
order_data.tenant_id = tenant_id
# Extract user ID safely
user_id = current_user.get("user_id")
if not user_id:
logger.error("User ID not found in current_user context", current_user_keys=list(current_user.keys()))
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="User authentication error"
)
order = await orders_service.create_order(
db,
order_data,
user_id=UUID(current_user["sub"])
user_id=UUID(user_id)
)
# Commit the transaction to persist changes
await db.commit()
logger.info("Order created successfully",
order_id=str(order.id),
order_number=order.order_number)
@@ -211,6 +220,9 @@ async def update_order(
updated_by=UUID(current_user["sub"])
)
# Commit the transaction to persist changes
await db.commit()
logger.info("Order updated successfully",
order_id=str(order_id))
@@ -260,6 +272,9 @@ async def delete_order(
await orders_service.order_repo.delete(db, order_id, tenant_id)
# Commit the transaction to persist deletion
await db.commit()
# Log audit event for order deletion
try:
await audit_logger.log_deletion(
@@ -290,4 +305,4 @@ async def delete_order(
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to delete order"
)
)

View File

@@ -195,7 +195,17 @@ class ProcurementRequirement(Base):
source_orders = Column(JSONB, nullable=True) # Orders that contributed to this requirement
source_production_batches = Column(JSONB, nullable=True) # Production batches needing this
demand_analysis = Column(JSONB, nullable=True) # Detailed demand breakdown
# Smart procurement calculation metadata
calculation_method = Column(String(100), nullable=True) # Method used: REORDER_POINT_TRIGGERED, FORECAST_DRIVEN_PROACTIVE, etc.
ai_suggested_quantity = Column(Numeric(12, 3), nullable=True) # Pure AI forecast quantity
adjusted_quantity = Column(Numeric(12, 3), nullable=True) # Final quantity after applying constraints
adjustment_reason = Column(Text, nullable=True) # Human-readable explanation of adjustments
price_tier_applied = Column(JSONB, nullable=True) # Price tier information if applicable
supplier_minimum_applied = Column(Boolean, nullable=False, default=False) # Whether supplier minimum was enforced
storage_limit_applied = Column(Boolean, nullable=False, default=False) # Whether storage limit was hit
reorder_rule_applied = Column(Boolean, nullable=False, default=False) # Whether reorder rules were used
# Approval and authorization
approved_quantity = Column(Numeric(12, 3), nullable=True)
approved_cost = Column(Numeric(12, 2), nullable=True)

View File

@@ -156,7 +156,8 @@ class BaseRepository(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
db: AsyncSession,
*,
obj_in: CreateSchemaType,
created_by: Optional[UUID] = None
created_by: Optional[UUID] = None,
tenant_id: Optional[UUID] = None
) -> ModelType:
"""Create a new record"""
try:
@@ -166,6 +167,10 @@ class BaseRepository(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
else:
obj_data = obj_in
# Add tenant_id if the model supports it and it's provided
if tenant_id and hasattr(self.model, 'tenant_id'):
obj_data['tenant_id'] = tenant_id
# Add created_by if the model supports it
if created_by and hasattr(self.model, 'created_by'):
obj_data['created_by'] = created_by
@@ -281,4 +286,4 @@ class BaseRepository(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
model=self.model.__name__,
id=str(id),
error=str(e))
raise
raise

View File

@@ -402,6 +402,87 @@ class OrderRepository(BaseRepository[CustomerOrder, OrderCreate, OrderUpdate]):
)
)
# Calculate repeat customers rate
# Count customers who have made more than one order
repeat_customers_query = await db.execute(
select(func.count()).select_from(
select(CustomerOrder.customer_id)
.where(CustomerOrder.tenant_id == tenant_id)
.group_by(CustomerOrder.customer_id)
.having(func.count(CustomerOrder.id) > 1)
.subquery()
)
)
total_customers_query = await db.execute(
select(func.count(func.distinct(CustomerOrder.customer_id))).where(
CustomerOrder.tenant_id == tenant_id
)
)
repeat_customers_count = repeat_customers_query.scalar() or 0
total_customers_count = total_customers_query.scalar() or 0
repeat_customers_rate = Decimal("0.0")
if total_customers_count > 0:
repeat_customers_rate = Decimal(str(repeat_customers_count)) / Decimal(str(total_customers_count))
repeat_customers_rate = repeat_customers_rate * Decimal("100.0") # Convert to percentage
# Calculate order fulfillment rate
total_orders_query = await db.execute(
select(func.count()).where(
and_(
CustomerOrder.tenant_id == tenant_id,
CustomerOrder.status != "cancelled"
)
)
)
fulfilled_orders_query = await db.execute(
select(func.count()).where(
and_(
CustomerOrder.tenant_id == tenant_id,
CustomerOrder.status.in_(["delivered", "completed"])
)
)
)
total_orders_count = total_orders_query.scalar() or 0
fulfilled_orders_count = fulfilled_orders_query.scalar() or 0
fulfillment_rate = Decimal("0.0")
if total_orders_count > 0:
fulfillment_rate = Decimal(str(fulfilled_orders_count)) / Decimal(str(total_orders_count))
fulfillment_rate = fulfillment_rate * Decimal("100.0") # Convert to percentage
# Calculate on-time delivery rate
on_time_delivered_query = await db.execute(
select(func.count()).where(
and_(
CustomerOrder.tenant_id == tenant_id,
CustomerOrder.status == "delivered",
CustomerOrder.actual_delivery_date <= CustomerOrder.requested_delivery_date
)
)
)
total_delivered_query = await db.execute(
select(func.count()).where(
and_(
CustomerOrder.tenant_id == tenant_id,
CustomerOrder.status == "delivered"
)
)
)
on_time_delivered_count = on_time_delivered_query.scalar() or 0
total_delivered_count = total_delivered_query.scalar() or 0
on_time_delivery_rate = Decimal("0.0")
if total_delivered_count > 0:
on_time_delivery_rate = Decimal(str(on_time_delivered_count)) / Decimal(str(total_delivered_count))
on_time_delivery_rate = on_time_delivery_rate * Decimal("100.0") # Convert to percentage
return {
"total_orders_today": orders_today.scalar(),
"total_orders_this_week": orders_week.scalar(),
@@ -410,7 +491,16 @@ class OrderRepository(BaseRepository[CustomerOrder, OrderCreate, OrderUpdate]):
"revenue_this_week": revenue_week.scalar(),
"revenue_this_month": revenue_month.scalar(),
"status_breakdown": status_breakdown,
"average_order_value": avg_order_value.scalar()
"average_order_value": avg_order_value.scalar(),
"repeat_customers_rate": repeat_customers_rate,
"fulfillment_rate": fulfillment_rate,
"on_time_delivery_rate": on_time_delivery_rate,
"repeat_customers_count": repeat_customers_count,
"total_customers_count": total_customers_count,
"total_orders_count": total_orders_count,
"fulfilled_orders_count": fulfilled_orders_count,
"on_time_delivered_count": on_time_delivered_count,
"total_delivered_count": total_delivered_count
}
except Exception as e:
logger.error("Error getting dashboard metrics", error=str(e))

View File

@@ -51,7 +51,6 @@ class CustomerBase(BaseModel):
class CustomerCreate(CustomerBase):
customer_code: str = Field(..., min_length=1, max_length=50)
tenant_id: UUID
class CustomerUpdate(BaseModel):
@@ -288,7 +287,6 @@ class ProcurementPlanBase(BaseModel):
class ProcurementPlanCreate(ProcurementPlanBase):
tenant_id: UUID
requirements: List[ProcurementRequirementCreate] = Field(..., min_items=1)
@@ -395,4 +393,4 @@ class ProcurementPlanningData(BaseModel):
# Recommendations
recommended_purchases: List[Dict[str, Any]]
critical_shortages: List[Dict[str, Any]]
critical_shortages: List[Dict[str, Any]]

View File

@@ -75,6 +75,16 @@ class ProcurementRequirementCreate(ProcurementRequirementBase):
quality_specifications: Optional[Dict[str, Any]] = None
procurement_notes: Optional[str] = None
# Smart procurement calculation metadata
calculation_method: Optional[str] = Field(None, max_length=100)
ai_suggested_quantity: Optional[Decimal] = Field(None, ge=0)
adjusted_quantity: Optional[Decimal] = Field(None, ge=0)
adjustment_reason: Optional[str] = None
price_tier_applied: Optional[Dict[str, Any]] = None
supplier_minimum_applied: bool = False
storage_limit_applied: bool = False
reorder_rule_applied: bool = False
class ProcurementRequirementUpdate(ProcurementBase):
"""Schema for updating procurement requirements"""
@@ -101,36 +111,46 @@ class ProcurementRequirementResponse(ProcurementRequirementBase):
id: uuid.UUID
plan_id: uuid.UUID
requirement_number: str
status: str
created_at: datetime
updated_at: datetime
purchase_order_id: Optional[uuid.UUID] = None
purchase_order_number: Optional[str] = None
ordered_quantity: Decimal
ordered_at: Optional[datetime] = None
expected_delivery_date: Optional[date] = None
actual_delivery_date: Optional[date] = None
received_quantity: Decimal
delivery_status: str
fulfillment_rate: Optional[Decimal] = None
on_time_delivery: Optional[bool] = None
quality_rating: Optional[Decimal] = None
approved_quantity: Optional[Decimal] = None
approved_cost: Optional[Decimal] = None
approved_at: Optional[datetime] = None
approved_by: Optional[uuid.UUID] = None
special_requirements: Optional[str] = None
storage_requirements: Optional[str] = None
shelf_life_days: Optional[int] = None
quality_specifications: Optional[Dict[str, Any]] = None
procurement_notes: Optional[str] = None
# Smart procurement calculation metadata
calculation_method: Optional[str] = None
ai_suggested_quantity: Optional[Decimal] = None
adjusted_quantity: Optional[Decimal] = None
adjustment_reason: Optional[str] = None
price_tier_applied: Optional[Dict[str, Any]] = None
supplier_minimum_applied: bool = False
storage_limit_applied: bool = False
reorder_rule_applied: bool = False
# ================================================================
# PROCUREMENT PLAN SCHEMAS

View File

@@ -60,11 +60,10 @@ class OrdersService:
self.production_client = production_client
self.sales_client = sales_client
@transactional
async def create_order(
self,
db,
order_data: OrderCreate,
self,
db,
order_data: OrderCreate,
user_id: Optional[UUID] = None
) -> OrderResponse:
"""Create a new customer order with comprehensive processing"""
@@ -170,7 +169,6 @@ class OrdersService:
error=str(e))
raise
@transactional
async def update_order_status(
self,
db,
@@ -358,10 +356,15 @@ class OrdersService:
# Detect business model
business_model = await self.detect_business_model(db, tenant_id)
# Calculate performance metrics
fulfillment_rate = Decimal("95.0") # Calculate from actual data
on_time_delivery_rate = Decimal("92.0") # Calculate from actual data
repeat_customers_rate = Decimal("65.0") # Calculate from actual data
# Calculate performance metrics from actual data
fulfillment_rate = metrics.get("fulfillment_rate", Decimal("0.0")) # Use actual calculated rate
on_time_delivery_rate = metrics.get("on_time_delivery_rate", Decimal("0.0")) # Use actual calculated rate
repeat_customers_rate = metrics.get("repeat_customers_rate", Decimal("0.0")) # Use actual calculated rate
# Use the actual calculated values from the repository
order_fulfillment_rate = metrics.get("fulfillment_rate", Decimal("0.0"))
on_time_delivery_rate_metric = metrics.get("on_time_delivery_rate", Decimal("0.0"))
repeat_customers_rate_metric = metrics.get("repeat_customers_rate", Decimal("0.0"))
return OrdersDashboardSummary(
total_orders_today=metrics["total_orders_today"],
@@ -377,10 +380,10 @@ class OrdersService:
delivered_orders=metrics["status_breakdown"].get("delivered", 0),
total_customers=total_customers,
new_customers_this_month=new_customers_this_month,
repeat_customers_rate=repeat_customers_rate,
repeat_customers_rate=repeat_customers_rate_metric,
average_order_value=metrics["average_order_value"],
order_fulfillment_rate=fulfillment_rate,
on_time_delivery_rate=on_time_delivery_rate,
order_fulfillment_rate=order_fulfillment_rate,
on_time_delivery_rate=on_time_delivery_rate_metric,
business_model=business_model,
business_model_confidence=Decimal("85.0") if business_model else None,
recent_orders=[OrderResponse.from_orm(order) for order in recent_orders],
@@ -480,4 +483,3 @@ class OrdersService:
logger.warning("Failed to send status notification",
order_id=str(order.id),
error=str(e))

View File

@@ -29,6 +29,8 @@ from shared.config.base import BaseServiceSettings
from shared.messaging.rabbitmq import RabbitMQClient
from shared.monitoring.decorators import monitor_performance
from app.services.cache_service import get_cache_service, CacheService
from app.services.smart_procurement_calculator import SmartProcurementCalculator
from shared.utils.tenant_settings_client import TenantSettingsClient
logger = structlog.get_logger()
@@ -56,6 +58,10 @@ class ProcurementService:
self.suppliers_client = suppliers_client or SuppliersServiceClient(config)
self.cache_service = cache_service or get_cache_service()
# Initialize tenant settings client
tenant_service_url = getattr(config, 'TENANT_SERVICE_URL', 'http://tenant-service:8000')
self.tenant_settings_client = TenantSettingsClient(tenant_service_url=tenant_service_url)
# Initialize RabbitMQ client
rabbitmq_url = getattr(config, 'RABBITMQ_URL', 'amqp://guest:guest@localhost:5672/')
self.rabbitmq_client = RabbitMQClient(rabbitmq_url, "orders-service")
@@ -951,10 +957,17 @@ class ProcurementService:
seasonality_factor: float = 1.0
) -> List[Dict[str, Any]]:
"""
Create procurement requirements data with supplier integration (Bug #1 FIX)
Create procurement requirements data with smart hybrid calculation
Combines AI forecasting with ingredient reorder rules and supplier constraints
"""
requirements = []
# Get tenant procurement settings
procurement_settings = await self.tenant_settings_client.get_procurement_settings(tenant_id)
# Initialize smart calculator
calculator = SmartProcurementCalculator(procurement_settings)
for item in inventory_items:
item_id = item.get('id')
if not item_id or item_id not in forecasts:
@@ -963,27 +976,41 @@ class ProcurementService:
forecast = forecasts[item_id]
current_stock = Decimal(str(item.get('current_stock', 0)))
# Get predicted demand and apply seasonality (Feature #4)
# Get predicted demand and apply seasonality
base_predicted_demand = Decimal(str(forecast.get('predicted_demand', 0)))
predicted_demand = base_predicted_demand * Decimal(str(seasonality_factor))
# Calculate safety stock
safety_stock = predicted_demand * (request.safety_stock_percentage / 100)
total_needed = predicted_demand + safety_stock
# Round up to avoid under-ordering
total_needed_rounded = Decimal(str(math.ceil(float(total_needed))))
# Round up AI forecast to avoid under-ordering
predicted_demand_rounded = Decimal(str(math.ceil(float(predicted_demand))))
safety_stock_rounded = total_needed_rounded - predicted_demand_rounded
net_requirement = max(Decimal('0'), total_needed_rounded - current_stock)
# Get best supplier and price list for this product
best_supplier = await self._get_best_supplier_for_product(
tenant_id, item_id, suppliers
)
if net_requirement > 0:
# Bug #1 FIX: Get best supplier for this product
best_supplier = await self._get_best_supplier_for_product(
tenant_id, item_id, suppliers
)
# Get price list entry if supplier exists
price_list_entry = None
if best_supplier and best_supplier.get('price_lists'):
for pl in best_supplier.get('price_lists', []):
if pl.get('inventory_product_id') == item_id:
price_list_entry = pl
break
# Use smart calculator to determine optimal order quantity
calc_result = calculator.calculate_procurement_quantity(
ingredient=item,
supplier=best_supplier,
price_list_entry=price_list_entry,
ai_forecast_quantity=predicted_demand_rounded,
current_stock=current_stock,
safety_stock_percentage=request.safety_stock_percentage
)
# Extract calculation results
order_quantity = calc_result['order_quantity']
# Only create requirement if there's a positive order quantity
if order_quantity > 0:
requirement_number = await self.requirement_repo.generate_requirement_number(plan_id)
required_by_date = request.plan_date or date.today()
@@ -994,22 +1021,22 @@ class ProcurementService:
suggested_order_date = required_by_date - timedelta(days=lead_time_days)
latest_order_date = required_by_date - timedelta(days=1)
# Calculate expected delivery date
expected_delivery_date = suggested_order_date + timedelta(days=lead_time_days)
# Calculate priority and risk
priority = self._calculate_priority(net_requirement, current_stock, item)
# Calculate safety stock quantities
safety_stock_qty = order_quantity * (request.safety_stock_percentage / Decimal('100'))
total_needed = predicted_demand_rounded + safety_stock_qty
# Calculate priority and risk (using the adjusted quantity now)
priority = self._calculate_priority(order_quantity, current_stock, item)
risk_level = self._calculate_risk_level(item, forecast)
# Get supplier pricing if available
estimated_unit_cost = Decimal(str(item.get('avg_cost', 0)))
if best_supplier and best_supplier.get('pricing'):
# Try to find pricing for this product
supplier_price = best_supplier.get('pricing', {}).get(item_id)
if supplier_price:
estimated_unit_cost = Decimal(str(supplier_price))
# Get supplier pricing
estimated_unit_cost = Decimal(str(item.get('average_cost') or item.get('avg_cost', 0)))
if price_list_entry:
estimated_unit_cost = Decimal(str(price_list_entry.get('unit_price', estimated_unit_cost)))
# Build requirement data with smart calculation metadata
requirement_data = {
'plan_id': plan_id,
'requirement_number': requirement_number,
@@ -1019,14 +1046,14 @@ class ProcurementService:
'product_category': item.get('category', ''),
'product_type': 'product',
'required_quantity': predicted_demand_rounded,
'unit_of_measure': item.get('unit', 'units'),
'safety_stock_quantity': safety_stock_rounded,
'total_quantity_needed': total_needed_rounded,
'unit_of_measure': item.get('unit_of_measure') or item.get('unit', 'units'),
'safety_stock_quantity': safety_stock_qty,
'total_quantity_needed': total_needed,
'current_stock_level': current_stock,
'available_stock': current_stock,
'net_requirement': net_requirement,
'net_requirement': order_quantity,
'forecast_demand': predicted_demand_rounded,
'buffer_demand': safety_stock_rounded,
'buffer_demand': safety_stock_qty,
'required_by_date': required_by_date,
'suggested_order_date': suggested_order_date,
'latest_order_date': latest_order_date,
@@ -1038,12 +1065,21 @@ class ProcurementService:
'ordered_quantity': Decimal('0'),
'received_quantity': Decimal('0'),
'estimated_unit_cost': estimated_unit_cost,
'estimated_total_cost': net_requirement * estimated_unit_cost,
# Bug #1 FIX: Add supplier information
'estimated_total_cost': order_quantity * estimated_unit_cost,
'preferred_supplier_id': uuid.UUID(best_supplier['id']) if best_supplier and best_supplier.get('id') else None,
'supplier_name': best_supplier.get('name') if best_supplier else None,
'supplier_lead_time_days': lead_time_days,
'minimum_order_quantity': Decimal(str(best_supplier.get('minimum_order_quantity', 0))) if best_supplier else None,
'minimum_order_quantity': Decimal(str(price_list_entry.get('minimum_order_quantity', 0))) if price_list_entry else None,
# Smart procurement calculation metadata
'calculation_method': calc_result.get('calculation_method'),
'ai_suggested_quantity': calc_result.get('ai_suggested_quantity'),
'adjusted_quantity': calc_result.get('adjusted_quantity'),
'adjustment_reason': calc_result.get('adjustment_reason'),
'price_tier_applied': calc_result.get('price_tier_applied'),
'supplier_minimum_applied': calc_result.get('supplier_minimum_applied', False),
'storage_limit_applied': calc_result.get('storage_limit_applied', False),
'reorder_rule_applied': calc_result.get('reorder_rule_applied', False),
}
requirements.append(requirement_data)

View File

@@ -0,0 +1,339 @@
# services/orders/app/services/smart_procurement_calculator.py
"""
Smart Procurement Calculator
Implements multi-constraint procurement quantity optimization combining:
- AI demand forecasting
- Ingredient reorder rules (reorder_point, reorder_quantity)
- Supplier constraints (minimum_order_quantity, minimum_order_amount)
- Storage limits (max_stock_level)
- Price tier optimization
"""
import math
from decimal import Decimal
from typing import Dict, Any, List, Tuple, Optional
import structlog
logger = structlog.get_logger()
class SmartProcurementCalculator:
"""
Smart procurement quantity calculator with multi-tier constraint optimization
"""
def __init__(self, procurement_settings: Dict[str, Any]):
"""
Initialize calculator with tenant procurement settings
Args:
procurement_settings: Tenant settings dict with flags:
- use_reorder_rules: bool
- economic_rounding: bool
- respect_storage_limits: bool
- use_supplier_minimums: bool
- optimize_price_tiers: bool
"""
self.use_reorder_rules = procurement_settings.get('use_reorder_rules', True)
self.economic_rounding = procurement_settings.get('economic_rounding', True)
self.respect_storage_limits = procurement_settings.get('respect_storage_limits', True)
self.use_supplier_minimums = procurement_settings.get('use_supplier_minimums', True)
self.optimize_price_tiers = procurement_settings.get('optimize_price_tiers', True)
def calculate_procurement_quantity(
self,
ingredient: Dict[str, Any],
supplier: Optional[Dict[str, Any]],
price_list_entry: Optional[Dict[str, Any]],
ai_forecast_quantity: Decimal,
current_stock: Decimal,
safety_stock_percentage: Decimal = Decimal('20.0')
) -> Dict[str, Any]:
"""
Calculate optimal procurement quantity using smart hybrid approach
Args:
ingredient: Ingredient data with reorder_point, reorder_quantity, max_stock_level
supplier: Supplier data with minimum_order_amount
price_list_entry: Price list with minimum_order_quantity, tier_pricing
ai_forecast_quantity: AI-predicted demand quantity
current_stock: Current stock level
safety_stock_percentage: Safety stock buffer percentage
Returns:
Dict with:
- order_quantity: Final calculated quantity to order
- calculation_method: Method used (e.g., 'REORDER_POINT_TRIGGERED')
- ai_suggested_quantity: Original AI forecast
- adjusted_quantity: Final quantity after constraints
- adjustment_reason: Human-readable explanation
- warnings: List of warnings/notes
- supplier_minimum_applied: bool
- storage_limit_applied: bool
- reorder_rule_applied: bool
- price_tier_applied: Dict or None
"""
warnings = []
result = {
'ai_suggested_quantity': ai_forecast_quantity,
'supplier_minimum_applied': False,
'storage_limit_applied': False,
'reorder_rule_applied': False,
'price_tier_applied': None
}
# Extract ingredient parameters
reorder_point = Decimal(str(ingredient.get('reorder_point', 0)))
reorder_quantity = Decimal(str(ingredient.get('reorder_quantity', 0)))
low_stock_threshold = Decimal(str(ingredient.get('low_stock_threshold', 0)))
max_stock_level = Decimal(str(ingredient.get('max_stock_level') or 'Infinity'))
# Extract supplier/price list parameters
supplier_min_qty = Decimal('0')
supplier_min_amount = Decimal('0')
tier_pricing = []
if price_list_entry:
supplier_min_qty = Decimal(str(price_list_entry.get('minimum_order_quantity', 0)))
tier_pricing = price_list_entry.get('tier_pricing') or []
if supplier:
supplier_min_amount = Decimal(str(supplier.get('minimum_order_amount', 0)))
# Calculate AI-based net requirement with safety stock
safety_stock = ai_forecast_quantity * (safety_stock_percentage / Decimal('100'))
total_needed = ai_forecast_quantity + safety_stock
ai_net_requirement = max(Decimal('0'), total_needed - current_stock)
# TIER 1: Critical Safety Check (Emergency Override)
if self.use_reorder_rules and current_stock <= low_stock_threshold:
base_order = max(reorder_quantity, ai_net_requirement)
result['calculation_method'] = 'CRITICAL_STOCK_EMERGENCY'
result['reorder_rule_applied'] = True
warnings.append(f"CRITICAL: Stock ({current_stock}) below threshold ({low_stock_threshold})")
order_qty = base_order
# TIER 2: Reorder Point Triggered
elif self.use_reorder_rules and current_stock <= reorder_point:
base_order = max(reorder_quantity, ai_net_requirement)
result['calculation_method'] = 'REORDER_POINT_TRIGGERED'
result['reorder_rule_applied'] = True
warnings.append(f"Reorder point triggered: stock ({current_stock}) ≤ reorder point ({reorder_point})")
order_qty = base_order
# TIER 3: Forecast-Driven (Above reorder point, no immediate need)
elif ai_net_requirement > 0:
order_qty = ai_net_requirement
result['calculation_method'] = 'FORECAST_DRIVEN_PROACTIVE'
warnings.append(f"AI forecast suggests ordering {ai_net_requirement} units")
# TIER 4: No Order Needed
else:
result['order_quantity'] = Decimal('0')
result['adjusted_quantity'] = Decimal('0')
result['calculation_method'] = 'SUFFICIENT_STOCK'
result['adjustment_reason'] = f"Current stock ({current_stock}) is sufficient. No order needed."
result['warnings'] = warnings
return result
# Apply Economic Rounding (reorder_quantity multiples)
if self.economic_rounding and reorder_quantity > 0:
multiples = math.ceil(float(order_qty / reorder_quantity))
rounded_qty = Decimal(multiples) * reorder_quantity
if rounded_qty > order_qty:
warnings.append(f"Rounded to {multiples}× reorder quantity ({reorder_quantity}) = {rounded_qty}")
order_qty = rounded_qty
# Apply Supplier Minimum Quantity Constraint
if self.use_supplier_minimums and supplier_min_qty > 0:
if order_qty < supplier_min_qty:
warnings.append(f"Increased from {order_qty} to supplier minimum ({supplier_min_qty})")
order_qty = supplier_min_qty
result['supplier_minimum_applied'] = True
else:
# Round to multiples of minimum_order_quantity (packaging constraint)
multiples = math.ceil(float(order_qty / supplier_min_qty))
rounded_qty = Decimal(multiples) * supplier_min_qty
if rounded_qty > order_qty:
warnings.append(f"Rounded to {multiples}× supplier packaging ({supplier_min_qty}) = {rounded_qty}")
result['supplier_minimum_applied'] = True
order_qty = rounded_qty
# Apply Price Tier Optimization
if self.optimize_price_tiers and tier_pricing and price_list_entry:
unit_price = Decimal(str(price_list_entry.get('unit_price', 0)))
tier_result = self._optimize_price_tier(
order_qty,
unit_price,
tier_pricing,
current_stock,
max_stock_level
)
if tier_result['tier_applied']:
order_qty = tier_result['optimized_quantity']
result['price_tier_applied'] = tier_result['tier_info']
warnings.append(tier_result['message'])
# Apply Storage Capacity Constraint
if self.respect_storage_limits and max_stock_level != Decimal('Infinity'):
if (current_stock + order_qty) > max_stock_level:
capped_qty = max(Decimal('0'), max_stock_level - current_stock)
warnings.append(f"Capped from {order_qty} to {capped_qty} due to storage limit ({max_stock_level})")
order_qty = capped_qty
result['storage_limit_applied'] = True
result['calculation_method'] += '_STORAGE_LIMITED'
# Check supplier minimum_order_amount (total order value constraint)
if self.use_supplier_minimums and supplier_min_amount > 0 and price_list_entry:
unit_price = Decimal(str(price_list_entry.get('unit_price', 0)))
order_value = order_qty * unit_price
if order_value < supplier_min_amount:
warnings.append(
f"⚠️ Order value €{order_value:.2f} < supplier minimum €{supplier_min_amount:.2f}. "
"This item needs to be combined with other products in the same PO."
)
result['calculation_method'] += '_NEEDS_CONSOLIDATION'
# Build final result
result['order_quantity'] = order_qty
result['adjusted_quantity'] = order_qty
result['adjustment_reason'] = self._build_adjustment_reason(
ai_forecast_quantity,
ai_net_requirement,
order_qty,
warnings,
result
)
result['warnings'] = warnings
return result
def _optimize_price_tier(
self,
current_qty: Decimal,
base_unit_price: Decimal,
tier_pricing: List[Dict[str, Any]],
current_stock: Decimal,
max_stock_level: Decimal
) -> Dict[str, Any]:
"""
Optimize order quantity to capture volume discount tiers if beneficial
Args:
current_qty: Current calculated order quantity
base_unit_price: Base unit price without tiers
tier_pricing: List of tier dicts with 'quantity' and 'price'
current_stock: Current stock level
max_stock_level: Maximum storage capacity
Returns:
Dict with tier_applied (bool), optimized_quantity, tier_info, message
"""
if not tier_pricing:
return {'tier_applied': False, 'optimized_quantity': current_qty}
# Sort tiers by quantity
sorted_tiers = sorted(tier_pricing, key=lambda x: x['quantity'])
best_tier = None
best_savings = Decimal('0')
for tier in sorted_tiers:
tier_qty = Decimal(str(tier['quantity']))
tier_price = Decimal(str(tier['price']))
# Skip if tier quantity is below current quantity (already captured)
if tier_qty <= current_qty:
continue
# Skip if tier would exceed storage capacity
if self.respect_storage_limits and (current_stock + tier_qty) > max_stock_level:
continue
# Skip if tier is more than 50% above current quantity (too much excess)
if tier_qty > current_qty * Decimal('1.5'):
continue
# Calculate savings
current_cost = current_qty * base_unit_price
tier_cost = tier_qty * tier_price
savings = current_cost - tier_cost
if savings > best_savings:
best_savings = savings
best_tier = {
'quantity': tier_qty,
'price': tier_price,
'savings': savings
}
if best_tier:
return {
'tier_applied': True,
'optimized_quantity': best_tier['quantity'],
'tier_info': best_tier,
'message': (
f"Upgraded to {best_tier['quantity']} units "
f"@ €{best_tier['price']}/unit "
f"(saves €{best_tier['savings']:.2f})"
)
}
return {'tier_applied': False, 'optimized_quantity': current_qty}
def _build_adjustment_reason(
self,
ai_forecast: Decimal,
ai_net_requirement: Decimal,
final_quantity: Decimal,
warnings: List[str],
result: Dict[str, Any]
) -> str:
"""
Build human-readable explanation of quantity adjustments
Args:
ai_forecast: Original AI forecast
ai_net_requirement: AI forecast + safety stock - current stock
final_quantity: Final order quantity after all adjustments
warnings: List of warning messages
result: Calculation result dict
Returns:
Human-readable adjustment explanation
"""
parts = []
# Start with calculation method
method = result.get('calculation_method', 'UNKNOWN')
parts.append(f"Method: {method.replace('_', ' ').title()}")
# AI forecast base
parts.append(f"AI Forecast: {ai_forecast} units, Net Requirement: {ai_net_requirement} units")
# Adjustments applied
adjustments = []
if result.get('reorder_rule_applied'):
adjustments.append("reorder rules")
if result.get('supplier_minimum_applied'):
adjustments.append("supplier minimums")
if result.get('storage_limit_applied'):
adjustments.append("storage limits")
if result.get('price_tier_applied'):
adjustments.append("price tier optimization")
if adjustments:
parts.append(f"Adjustments: {', '.join(adjustments)}")
# Final quantity
parts.append(f"Final Quantity: {final_quantity} units")
# Key warnings
if warnings:
key_warnings = [w for w in warnings if '⚠️' in w or 'CRITICAL' in w or 'saves €' in w]
if key_warnings:
parts.append(f"Notes: {'; '.join(key_warnings)}")
return " | ".join(parts)

View File

@@ -0,0 +1,44 @@
"""add smart procurement calculation fields
Revision ID: smart_procurement_v1
Revises: 7f882c2ca25c
Create Date: 2025-10-25
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB
# revision identifiers, used by Alembic.
revision = 'smart_procurement_v1'
down_revision = '7f882c2ca25c'
branch_labels = None
depends_on = None
def upgrade():
"""Add smart procurement calculation tracking fields"""
# Add new columns to procurement_requirements table
op.add_column('procurement_requirements', sa.Column('calculation_method', sa.String(100), nullable=True))
op.add_column('procurement_requirements', sa.Column('ai_suggested_quantity', sa.Numeric(12, 3), nullable=True))
op.add_column('procurement_requirements', sa.Column('adjusted_quantity', sa.Numeric(12, 3), nullable=True))
op.add_column('procurement_requirements', sa.Column('adjustment_reason', sa.Text, nullable=True))
op.add_column('procurement_requirements', sa.Column('price_tier_applied', JSONB, nullable=True))
op.add_column('procurement_requirements', sa.Column('supplier_minimum_applied', sa.Boolean, nullable=False, server_default='false'))
op.add_column('procurement_requirements', sa.Column('storage_limit_applied', sa.Boolean, nullable=False, server_default='false'))
op.add_column('procurement_requirements', sa.Column('reorder_rule_applied', sa.Boolean, nullable=False, server_default='false'))
def downgrade():
"""Remove smart procurement calculation tracking fields"""
# Remove columns from procurement_requirements table
op.drop_column('procurement_requirements', 'reorder_rule_applied')
op.drop_column('procurement_requirements', 'storage_limit_applied')
op.drop_column('procurement_requirements', 'supplier_minimum_applied')
op.drop_column('procurement_requirements', 'price_tier_applied')
op.drop_column('procurement_requirements', 'adjustment_reason')
op.drop_column('procurement_requirements', 'adjusted_quantity')
op.drop_column('procurement_requirements', 'ai_suggested_quantity')
op.drop_column('procurement_requirements', 'calculation_method')

View File

@@ -132,7 +132,6 @@ async def clone_demo_data(
instructions=recipe.instructions,
preparation_notes=recipe.preparation_notes,
storage_instructions=recipe.storage_instructions,
quality_standards=recipe.quality_standards,
serves_count=recipe.serves_count,
nutritional_info=recipe.nutritional_info,
allergen_info=recipe.allergen_info,
@@ -142,9 +141,7 @@ async def clone_demo_data(
maximum_batch_size=recipe.maximum_batch_size,
optimal_production_temperature=recipe.optimal_production_temperature,
optimal_humidity=recipe.optimal_humidity,
quality_check_points=recipe.quality_check_points,
quality_check_configuration=recipe.quality_check_configuration,
common_issues=recipe.common_issues,
status=recipe.status,
is_seasonal=recipe.is_seasonal,
season_start_month=recipe.season_start_month,

View File

@@ -3,7 +3,7 @@
Recipes API - Atomic CRUD operations on Recipe model
"""
from fastapi import APIRouter, Depends, HTTPException, Header, Query
from fastapi import APIRouter, Depends, HTTPException, Header, Query, Request
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List, Optional
from uuid import UUID
@@ -18,6 +18,7 @@ from ..schemas.recipes import (
)
from shared.routing import RouteBuilder, RouteCategory
from shared.auth.access_control import require_user_role
from shared.auth.decorators import get_current_user_dep
from shared.security import create_audit_logger, AuditSeverity, AuditAction
route_builder = RouteBuilder('recipes')
@@ -43,6 +44,7 @@ async def create_recipe(
tenant_id: UUID,
recipe_data: RecipeCreate,
user_id: UUID = Depends(get_user_id),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create a new recipe"""
@@ -86,6 +88,7 @@ async def search_recipes(
difficulty_level: Optional[int] = Query(None, ge=1, le=5),
limit: int = Query(100, ge=1, le=1000),
offset: int = Query(0, ge=0),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Search recipes with filters"""
@@ -135,7 +138,7 @@ async def count_recipes(
return {"count": count}
except Exception as e:
logger.error(f"Error counting recipes for tenant {tenant_id}: {e}")
logger.error(f"Error counting recipes for tenant: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@@ -151,6 +154,7 @@ async def get_recipe(
"""Get recipe by ID with ingredients"""
try:
recipe_service = RecipeService(db)
recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
if not recipe:
@@ -178,6 +182,7 @@ async def update_recipe(
recipe_id: UUID,
recipe_data: RecipeUpdate,
user_id: UUID = Depends(get_user_id),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Update an existing recipe"""
@@ -224,6 +229,7 @@ async def delete_recipe(
tenant_id: UUID,
recipe_id: UUID,
user_id: UUID = Depends(get_user_id),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete a recipe (Admin+ only)"""
@@ -237,6 +243,20 @@ async def delete_recipe(
if existing_recipe["tenant_id"] != str(tenant_id):
raise HTTPException(status_code=403, detail="Access denied")
# Check if deletion is safe
summary = await recipe_service.get_deletion_summary(recipe_id)
if not summary["success"]:
raise HTTPException(status_code=500, detail=summary["error"])
if not summary["data"]["can_delete"]:
raise HTTPException(
status_code=400,
detail={
"message": "Cannot delete recipe with active dependencies",
"warnings": summary["data"]["warnings"]
}
)
# Capture recipe data before deletion
recipe_data = {
"recipe_name": existing_recipe.get("name"),
@@ -281,3 +301,91 @@ async def delete_recipe(
except Exception as e:
logger.error(f"Error deleting recipe {recipe_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.patch(
route_builder.build_custom_route(RouteCategory.OPERATIONS, ["{recipe_id}", "archive"])
)
@require_user_role(['admin', 'owner'])
async def archive_recipe(
tenant_id: UUID,
recipe_id: UUID,
user_id: UUID = Depends(get_user_id),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Archive (soft delete) a recipe by setting status to ARCHIVED"""
try:
recipe_service = RecipeService(db)
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
if not existing_recipe:
raise HTTPException(status_code=404, detail="Recipe not found")
if existing_recipe["tenant_id"] != str(tenant_id):
raise HTTPException(status_code=403, detail="Not authorized")
# Check status transitions (business rule)
current_status = existing_recipe.get("status")
if current_status == "DISCONTINUED":
raise HTTPException(
status_code=400,
detail="Cannot archive a discontinued recipe. Use hard delete instead."
)
# Update status to ARCHIVED
from ..schemas.recipes import RecipeUpdate, RecipeStatus
update_data = RecipeUpdate(status=RecipeStatus.ARCHIVED)
updated_recipe = await recipe_service.update_recipe(
recipe_id,
update_data.dict(exclude_unset=True),
user_id
)
if not updated_recipe["success"]:
raise HTTPException(status_code=400, detail=updated_recipe["error"])
logger.info(f"Archived recipe {recipe_id} by user {user_id}")
return RecipeResponse(**updated_recipe["data"])
except HTTPException:
raise
except Exception as e:
logger.error(f"Error archiving recipe: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get(
route_builder.build_custom_route(RouteCategory.OPERATIONS, ["{recipe_id}", "deletion-summary"])
)
@require_user_role(['admin', 'owner'])
async def get_recipe_deletion_summary(
tenant_id: UUID,
recipe_id: UUID,
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get summary of what will be affected by deleting this recipe"""
try:
recipe_service = RecipeService(db)
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
if not existing_recipe:
raise HTTPException(status_code=404, detail="Recipe not found")
if existing_recipe["tenant_id"] != str(tenant_id):
raise HTTPException(status_code=403, detail="Not authorized")
summary = await recipe_service.get_deletion_summary(recipe_id)
if not summary["success"]:
raise HTTPException(status_code=500, detail=summary["error"])
return summary["data"]
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting deletion summary: {e}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -95,7 +95,6 @@ class Recipe(Base):
instructions = Column(JSONB, nullable=True) # Structured step-by-step instructions
preparation_notes = Column(Text, nullable=True)
storage_instructions = Column(Text, nullable=True)
quality_standards = Column(Text, nullable=True)
# Recipe metadata
serves_count = Column(Integer, nullable=True) # How many people/portions
@@ -111,9 +110,7 @@ class Recipe(Base):
optimal_humidity = Column(Float, nullable=True) # Percentage
# Quality control
quality_check_points = Column(JSONB, nullable=True) # Key checkpoints during production
quality_check_configuration = Column(JSONB, nullable=True) # Stage-based quality check config
common_issues = Column(JSONB, nullable=True) # Known issues and solutions
# Status and lifecycle
status = Column(SQLEnum(RecipeStatus), nullable=False, default=RecipeStatus.DRAFT, index=True)
@@ -170,7 +167,6 @@ class Recipe(Base):
'instructions': self.instructions,
'preparation_notes': self.preparation_notes,
'storage_instructions': self.storage_instructions,
'quality_standards': self.quality_standards,
'serves_count': self.serves_count,
'nutritional_info': self.nutritional_info,
'allergen_info': self.allergen_info,
@@ -180,9 +176,7 @@ class Recipe(Base):
'maximum_batch_size': self.maximum_batch_size,
'optimal_production_temperature': self.optimal_production_temperature,
'optimal_humidity': self.optimal_humidity,
'quality_check_points': self.quality_check_points,
'quality_check_configuration': self.quality_check_configuration,
'common_issues': self.common_issues,
'status': self.status.value if self.status else None,
'is_seasonal': self.is_seasonal,
'season_start_month': self.season_start_month,

View File

@@ -47,8 +47,8 @@ class RecipeRepository(BaseRepository[Recipe, RecipeCreate, RecipeUpdate]):
"category": recipe.category,
"cuisine_type": recipe.cuisine_type,
"difficulty_level": recipe.difficulty_level,
"yield_quantity": recipe.yield_quantity,
"yield_unit": recipe.yield_unit.value if recipe.yield_unit else None,
"yield_quantity": float(recipe.yield_quantity),
"yield_unit": recipe.yield_unit.value if hasattr(recipe.yield_unit, 'value') else recipe.yield_unit,
"prep_time_minutes": recipe.prep_time_minutes,
"cook_time_minutes": recipe.cook_time_minutes,
"total_time_minutes": recipe.total_time_minutes,
@@ -61,29 +61,46 @@ class RecipeRepository(BaseRepository[Recipe, RecipeCreate, RecipeUpdate]):
"instructions": recipe.instructions,
"preparation_notes": recipe.preparation_notes,
"storage_instructions": recipe.storage_instructions,
"quality_standards": recipe.quality_standards,
"quality_check_configuration": recipe.quality_check_configuration,
"serves_count": recipe.serves_count,
"nutritional_info": recipe.nutritional_info,
"allergen_info": recipe.allergen_info,
"dietary_tags": recipe.dietary_tags,
"batch_size_multiplier": recipe.batch_size_multiplier,
"minimum_batch_size": recipe.minimum_batch_size,
"maximum_batch_size": recipe.maximum_batch_size,
"status": recipe.status,
"batch_size_multiplier": float(recipe.batch_size_multiplier),
"minimum_batch_size": float(recipe.minimum_batch_size) if recipe.minimum_batch_size else None,
"maximum_batch_size": float(recipe.maximum_batch_size) if recipe.maximum_batch_size else None,
"optimal_production_temperature": float(recipe.optimal_production_temperature) if recipe.optimal_production_temperature else None,
"optimal_humidity": float(recipe.optimal_humidity) if recipe.optimal_humidity else None,
"status": recipe.status.value if hasattr(recipe.status, 'value') else recipe.status,
"is_seasonal": recipe.is_seasonal,
"season_start_month": recipe.season_start_month,
"season_end_month": recipe.season_end_month,
"is_signature_item": recipe.is_signature_item,
"created_at": recipe.created_at.isoformat() if recipe.created_at else None,
"updated_at": recipe.updated_at.isoformat() if recipe.updated_at else None,
"created_by": str(recipe.created_by) if recipe.created_by else None,
"updated_by": str(recipe.updated_by) if hasattr(recipe, 'updated_by') and recipe.updated_by else None,
"ingredients": [
{
"id": str(ingredient.id),
"tenant_id": str(ingredient.tenant_id),
"recipe_id": str(ingredient.recipe_id),
"ingredient_id": str(ingredient.ingredient_id),
"quantity": float(ingredient.quantity),
"unit": ingredient.unit,
"unit": ingredient.unit.value if hasattr(ingredient.unit, 'value') else ingredient.unit,
"quantity_in_base_unit": float(ingredient.quantity_in_base_unit) if ingredient.quantity_in_base_unit else None,
"alternative_quantity": float(ingredient.alternative_quantity) if ingredient.alternative_quantity else None,
"alternative_unit": ingredient.alternative_unit.value if hasattr(ingredient.alternative_unit, 'value') and ingredient.alternative_unit else None,
"preparation_method": ingredient.preparation_method,
"notes": ingredient.notes
"ingredient_notes": ingredient.ingredient_notes,
"is_optional": ingredient.is_optional,
"ingredient_order": ingredient.ingredient_order,
"ingredient_group": ingredient.ingredient_group,
"substitution_options": ingredient.substitution_options,
"substitution_ratio": float(ingredient.substitution_ratio) if ingredient.substitution_ratio else None,
"unit_cost": float(ingredient.unit_cost) if hasattr(ingredient, 'unit_cost') and ingredient.unit_cost else None,
"total_cost": float(ingredient.total_cost) if hasattr(ingredient, 'total_cost') and ingredient.total_cost else None,
"cost_updated_at": ingredient.cost_updated_at.isoformat() if hasattr(ingredient, 'cost_updated_at') and ingredient.cost_updated_at else None
}
for ingredient in recipe.ingredients
] if hasattr(recipe, 'ingredients') else []
@@ -151,8 +168,8 @@ class RecipeRepository(BaseRepository[Recipe, RecipeCreate, RecipeUpdate]):
"category": recipe.category,
"cuisine_type": recipe.cuisine_type,
"difficulty_level": recipe.difficulty_level,
"yield_quantity": recipe.yield_quantity,
"yield_unit": recipe.yield_unit.value if recipe.yield_unit else None,
"yield_quantity": float(recipe.yield_quantity),
"yield_unit": recipe.yield_unit.value if hasattr(recipe.yield_unit, 'value') else recipe.yield_unit,
"prep_time_minutes": recipe.prep_time_minutes,
"cook_time_minutes": recipe.cook_time_minutes,
"total_time_minutes": recipe.total_time_minutes,
@@ -165,21 +182,26 @@ class RecipeRepository(BaseRepository[Recipe, RecipeCreate, RecipeUpdate]):
"instructions": recipe.instructions,
"preparation_notes": recipe.preparation_notes,
"storage_instructions": recipe.storage_instructions,
"quality_standards": recipe.quality_standards,
"quality_check_configuration": recipe.quality_check_configuration,
"serves_count": recipe.serves_count,
"nutritional_info": recipe.nutritional_info,
"allergen_info": recipe.allergen_info,
"dietary_tags": recipe.dietary_tags,
"batch_size_multiplier": recipe.batch_size_multiplier,
"minimum_batch_size": recipe.minimum_batch_size,
"maximum_batch_size": recipe.maximum_batch_size,
"status": recipe.status,
"batch_size_multiplier": float(recipe.batch_size_multiplier),
"minimum_batch_size": float(recipe.minimum_batch_size) if recipe.minimum_batch_size else None,
"maximum_batch_size": float(recipe.maximum_batch_size) if recipe.maximum_batch_size else None,
"optimal_production_temperature": float(recipe.optimal_production_temperature) if recipe.optimal_production_temperature else None,
"optimal_humidity": float(recipe.optimal_humidity) if recipe.optimal_humidity else None,
"status": recipe.status.value if hasattr(recipe.status, 'value') else recipe.status,
"is_seasonal": recipe.is_seasonal,
"season_start_month": recipe.season_start_month,
"season_end_month": recipe.season_end_month,
"is_signature_item": recipe.is_signature_item,
"created_at": recipe.created_at.isoformat() if recipe.created_at else None,
"updated_at": recipe.updated_at.isoformat() if recipe.updated_at else None
"updated_at": recipe.updated_at.isoformat() if recipe.updated_at else None,
"created_by": str(recipe.created_by) if recipe.created_by else None,
"updated_by": str(recipe.updated_by) if hasattr(recipe, 'updated_by') and recipe.updated_by else None,
"ingredients": [] # For list view, don't load ingredients to improve performance
}
for recipe in recipes
]

View File

@@ -117,7 +117,6 @@ class RecipeCreate(BaseModel):
instructions: Optional[Dict[str, Any]] = None
preparation_notes: Optional[str] = None
storage_instructions: Optional[str] = None
quality_standards: Optional[str] = None
quality_check_configuration: Optional[RecipeQualityConfiguration] = None
serves_count: Optional[int] = Field(None, ge=1)
nutritional_info: Optional[Dict[str, Any]] = None
@@ -128,8 +127,6 @@ class RecipeCreate(BaseModel):
maximum_batch_size: Optional[float] = Field(None, gt=0)
optimal_production_temperature: Optional[float] = None
optimal_humidity: Optional[float] = Field(None, ge=0, le=100)
quality_check_points: Optional[Dict[str, Any]] = None
common_issues: Optional[Dict[str, Any]] = None
is_seasonal: bool = False
season_start_month: Optional[int] = Field(None, ge=1, le=12)
season_end_month: Optional[int] = Field(None, ge=1, le=12)
@@ -156,7 +153,6 @@ class RecipeUpdate(BaseModel):
instructions: Optional[Dict[str, Any]] = None
preparation_notes: Optional[str] = None
storage_instructions: Optional[str] = None
quality_standards: Optional[str] = None
quality_check_configuration: Optional[RecipeQualityConfigurationUpdate] = None
serves_count: Optional[int] = Field(None, ge=1)
nutritional_info: Optional[Dict[str, Any]] = None
@@ -167,8 +163,6 @@ class RecipeUpdate(BaseModel):
maximum_batch_size: Optional[float] = Field(None, gt=0)
optimal_production_temperature: Optional[float] = None
optimal_humidity: Optional[float] = Field(None, ge=0, le=100)
quality_check_points: Optional[Dict[str, Any]] = None
common_issues: Optional[Dict[str, Any]] = None
status: Optional[RecipeStatus] = None
is_seasonal: Optional[bool] = None
season_start_month: Optional[int] = Field(None, ge=1, le=12)
@@ -204,7 +198,6 @@ class RecipeResponse(BaseModel):
instructions: Optional[Dict[str, Any]] = None
preparation_notes: Optional[str] = None
storage_instructions: Optional[str] = None
quality_standards: Optional[str] = None
quality_check_configuration: Optional[RecipeQualityConfiguration] = None
serves_count: Optional[int] = None
nutritional_info: Optional[Dict[str, Any]] = None
@@ -215,8 +208,6 @@ class RecipeResponse(BaseModel):
maximum_batch_size: Optional[float] = None
optimal_production_temperature: Optional[float] = None
optimal_humidity: Optional[float] = None
quality_check_points: Optional[Dict[str, Any]] = None
common_issues: Optional[Dict[str, Any]] = None
status: str
is_seasonal: bool
season_start_month: Optional[int] = None
@@ -232,6 +223,20 @@ class RecipeResponse(BaseModel):
from_attributes = True
class RecipeDeletionSummary(BaseModel):
"""Summary of what will be deleted when hard-deleting a recipe"""
recipe_id: UUID
recipe_name: str
recipe_code: str
production_batches_count: int
recipe_ingredients_count: int
dependent_recipes_count: int # Recipes that use this as ingredient/sub-recipe
affected_orders_count: int # Orders that include this recipe
last_used_date: Optional[datetime] = None
can_delete: bool
warnings: List[str] = []
class RecipeSearchRequest(BaseModel):
"""Schema for recipe search requests"""
search_term: Optional[str] = None

View File

@@ -67,6 +67,83 @@ class RecipeService:
logger.error(f"Error getting recipe statistics: {e}")
return {"total_recipes": 0, "active_recipes": 0, "signature_recipes": 0, "seasonal_recipes": 0}
async def get_deletion_summary(self, recipe_id: UUID) -> Dict[str, Any]:
"""Get summary of what will be affected by deleting this recipe"""
try:
from sqlalchemy import select, func
from ..models.recipes import RecipeIngredient
# Get recipe info
recipe = await self.recipe_repo.get_by_id(recipe_id)
if not recipe:
return {"success": False, "error": "Recipe not found"}
# Count recipe ingredients
ingredients_result = await self.session.execute(
select(func.count(RecipeIngredient.id))
.where(RecipeIngredient.recipe_id == recipe_id)
)
ingredients_count = ingredients_result.scalar() or 0
# Count production batches using this recipe (if production tables exist)
production_batches_count = 0
try:
# Try to import production models if they exist
production_batches_result = await self.session.execute(
select(func.count()).select_from(
select(1).where(
# This would need actual production_batches table reference
# For now, set to 0
).subquery()
)
)
production_batches_count = 0 # Set to 0 for now
except:
production_batches_count = 0
# Count dependent recipes (recipes using this as ingredient) - future feature
dependent_recipes_count = 0
# Count affected orders - would need orders service integration
affected_orders_count = 0
# Determine if deletion is safe
warnings = []
can_delete = True
if production_batches_count > 0:
warnings.append(f"Esta receta tiene {production_batches_count} lotes de producción asociados")
can_delete = False
if affected_orders_count > 0:
warnings.append(f"Esta receta está en {affected_orders_count} pedidos")
can_delete = False
if dependent_recipes_count > 0:
warnings.append(f"{dependent_recipes_count} recetas dependen de esta")
if recipe.status == RecipeStatus.ACTIVE:
warnings.append("Esta receta está activa. Considera archivarla primero.")
return {
"success": True,
"data": {
"recipe_id": str(recipe.id),
"recipe_name": recipe.name,
"recipe_code": recipe.recipe_code or "",
"production_batches_count": production_batches_count,
"recipe_ingredients_count": ingredients_count,
"dependent_recipes_count": dependent_recipes_count,
"affected_orders_count": affected_orders_count,
"last_used_date": None,
"can_delete": can_delete,
"warnings": warnings
}
}
except Exception as e:
logger.error(f"Error getting deletion summary: {e}")
return {"success": False, "error": str(e)}
async def create_recipe(
self,
recipe_data: Dict[str, Any],
@@ -74,17 +151,35 @@ class RecipeService:
created_by: UUID
) -> Dict[str, Any]:
"""Create a new recipe with ingredients"""
from ..models.recipes import Recipe, RecipeIngredient, RecipeStatus
try:
# Add metadata
recipe_data["created_by"] = created_by
recipe_data["created_at"] = datetime.utcnow()
recipe_data["updated_at"] = datetime.utcnow()
recipe_data["status"] = recipe_data.get("status", RecipeStatus.DRAFT)
# Use the shared repository's create method
recipe_create = RecipeCreate(**recipe_data)
recipe = await self.recipe_repo.create(recipe_create)
# Create Recipe model directly (without ingredients)
recipe = Recipe(**recipe_data)
self.session.add(recipe)
await self.session.flush() # Get the recipe ID
# Get the created recipe with ingredients (if the repository supports it)
# Now create ingredients with the recipe_id and tenant_id
for ing_data in ingredients_data:
ingredient = RecipeIngredient(
recipe_id=recipe.id,
tenant_id=recipe.tenant_id, # Add tenant_id from recipe
**ing_data
)
self.session.add(ingredient)
await self.session.flush()
# Commit the transaction to persist changes
await self.session.commit()
# Get the created recipe with ingredients
result = await self.recipe_repo.get_recipe_with_ingredients(recipe.id)
return {
@@ -117,6 +212,45 @@ class RecipeService:
"error": "Recipe not found"
}
# Status transition business rules
if "status" in recipe_data:
from ..models.recipes import RecipeStatus
new_status = recipe_data["status"]
current_status = existing_recipe.status
# Cannot reactivate discontinued recipes
if current_status == RecipeStatus.DISCONTINUED:
if new_status != RecipeStatus.DISCONTINUED:
return {
"success": False,
"error": "Cannot reactivate a discontinued recipe. Create a new version instead."
}
# Can only archive active or testing recipes
if new_status == RecipeStatus.ARCHIVED:
if current_status not in [RecipeStatus.ACTIVE, RecipeStatus.TESTING]:
return {
"success": False,
"error": "Can only archive active or testing recipes."
}
# Cannot activate drafts without ingredients
if new_status == RecipeStatus.ACTIVE and current_status == RecipeStatus.DRAFT:
# Check if recipe has ingredients
from sqlalchemy import select, func
from ..models.recipes import RecipeIngredient
result = await self.session.execute(
select(func.count(RecipeIngredient.id)).where(RecipeIngredient.recipe_id == recipe_id)
)
ingredient_count = result.scalar()
if ingredient_count == 0:
return {
"success": False,
"error": "Cannot activate a recipe without ingredients."
}
# Add metadata
if updated_by:
recipe_data["updated_by"] = updated_by

View File

@@ -0,0 +1,34 @@
"""remove legacy quality fields
Revision ID: 20251027_remove_quality
Revises: 3c4d0f57a312
Create Date: 2025-10-27
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '20251027_remove_quality'
down_revision = '3c4d0f57a312'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Remove deprecated quality fields from recipes table"""
# Drop columns that are no longer used
# Using batch operations for safer column drops
with op.batch_alter_table('recipes', schema=None) as batch_op:
batch_op.drop_column('quality_standards')
batch_op.drop_column('quality_check_points')
batch_op.drop_column('common_issues')
def downgrade() -> None:
"""Restore deprecated quality fields (for rollback purposes only)"""
# Add back the columns in case of rollback
op.add_column('recipes', sa.Column('quality_standards', sa.Text(), nullable=True))
op.add_column('recipes', sa.Column('quality_check_points', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
op.add_column('recipes', sa.Column('common_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True))

View File

@@ -50,7 +50,7 @@
},
"preparation_notes": "Es crucial usar vapor al inicio del horneado para lograr una corteza crujiente. La temperatura del agua debe estar entre 18-20°C.",
"storage_instructions": "Consumir el mismo día de producción. Se puede congelar después del horneado.",
"quality_standards": "Color dorado uniforme, corteza muy crujiente, miga alveolada con alveolos irregulares, aroma característico a trigo.",
"is_seasonal": false,
"is_signature_item": true,
"ingredientes": [
@@ -147,7 +147,7 @@
},
"preparation_notes": "La mantequilla para laminar debe estar a 15-16°C, flexible pero no blanda. Trabajar en ambiente fresco.",
"storage_instructions": "Consumir el día de producción. Se puede congelar la masa formada antes de la fermentación final.",
"quality_standards": "Laminado perfecto con capas visibles, color marrón brillante, estructura hojaldrada bien definida, aroma intenso a mantequilla.",
"is_seasonal": false,
"is_signature_item": true,
"ingredientes": [
@@ -280,7 +280,7 @@
},
"preparation_notes": "La masa madre debe estar activa y en su punto óptimo. La temperatura final de la masa debe ser 24-25°C.",
"storage_instructions": "Se conserva hasta 5-7 días en bolsa de papel. Mejora al segundo día.",
"quality_standards": "Corteza gruesa y oscura, miga densa pero húmeda, alveolos irregulares, sabor complejo ligeramente ácido.",
"is_seasonal": false,
"is_signature_item": true,
"ingredientes": [
@@ -378,7 +378,7 @@
},
"preparation_notes": "El chocolate debe ser de buena calidad para un mejor resultado. No sobrecargar de chocolate.",
"storage_instructions": "Consumir preferiblemente el día de producción.",
"quality_standards": "Hojaldre bien desarrollado, chocolate fundido en el interior, color dorado brillante.",
"is_seasonal": false,
"is_signature_item": false,
"ingredientes": [

View File

@@ -149,7 +149,7 @@ async def seed_recipes_for_tenant(
instructions=recipe_data.get("instructions"),
preparation_notes=recipe_data.get("preparation_notes"),
storage_instructions=recipe_data.get("storage_instructions"),
quality_standards=recipe_data.get("quality_standards"),
quality_check_configuration=recipe_data.get("quality_check_configuration"),
status=RecipeStatus.ACTIVE,
is_seasonal=recipe_data.get("is_seasonal", False),
is_signature_item=recipe_data.get("is_signature_item", False),

View File

@@ -67,7 +67,7 @@ async def get_delivery_performance_stats(
try:
service = DeliveryService(db)
stats = await service.get_delivery_performance_stats(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
days_back=days_back,
supplier_id=supplier_id
)
@@ -89,7 +89,7 @@ async def get_delivery_summary_stats(
"""Get delivery summary statistics for dashboard"""
try:
service = DeliveryService(db)
stats = await service.get_upcoming_deliveries_summary(current_user.tenant_id)
stats = await service.get_upcoming_deliveries_summary(current_user["tenant_id"])
return DeliverySummaryStats(**stats)
except Exception as e:
logger.error("Error getting delivery summary stats", error=str(e))

View File

@@ -41,9 +41,9 @@ async def create_delivery(
try:
service = DeliveryService(db)
delivery = await service.create_delivery(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
delivery_data=delivery_data,
created_by=current_user.user_id
created_by=current_user["user_id"]
)
return DeliveryResponse.from_orm(delivery)
except ValueError as e:
@@ -106,7 +106,7 @@ async def list_deliveries(
)
deliveries = await service.search_deliveries(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
search_params=search_params
)
@@ -135,7 +135,7 @@ async def get_delivery(
raise HTTPException(status_code=404, detail="Delivery not found")
# Check tenant access
if delivery.tenant_id != current_user.tenant_id:
if delivery.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
return DeliveryResponse.from_orm(delivery)
@@ -164,13 +164,13 @@ async def update_delivery(
existing_delivery = await service.get_delivery(delivery_id)
if not existing_delivery:
raise HTTPException(status_code=404, detail="Delivery not found")
if existing_delivery.tenant_id != current_user.tenant_id:
if existing_delivery.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
delivery = await service.update_delivery(
delivery_id=delivery_id,
delivery_data=delivery_data,
updated_by=current_user.user_id
updated_by=current_user["user_id"]
)
if not delivery:

View File

@@ -102,7 +102,7 @@ async def get_suppliers_needing_review(
raise HTTPException(status_code=500, detail="Failed to retrieve suppliers needing review")
@router.post(route_builder.build_nested_resource_route("suppliers", "supplier_id", "approve"), response_model=SupplierResponse)
@router.post(route_builder.build_resource_action_route("", "supplier_id", "approve"), response_model=SupplierResponse)
@require_user_role(['admin', 'owner', 'member'])
async def approve_supplier(
approval_data: SupplierApproval,
@@ -123,7 +123,7 @@ async def approve_supplier(
if approval_data.action == "approve":
supplier = await service.approve_supplier(
supplier_id=supplier_id,
approved_by=current_user.user_id,
approved_by=current_user["user_id"],
notes=approval_data.notes
)
elif approval_data.action == "reject":
@@ -132,7 +132,7 @@ async def approve_supplier(
supplier = await service.reject_supplier(
supplier_id=supplier_id,
rejection_reason=approval_data.notes,
rejected_by=current_user.user_id
rejected_by=current_user["user_id"]
)
else:
raise HTTPException(status_code=400, detail="Invalid action")
@@ -148,7 +148,7 @@ async def approve_supplier(
raise HTTPException(status_code=500, detail="Failed to process supplier approval")
@router.get(route_builder.build_resource_detail_route("suppliers/types", "supplier_type"), response_model=List[SupplierSummary])
@router.get(route_builder.build_resource_detail_route("types", "supplier_type"), response_model=List[SupplierSummary])
async def get_suppliers_by_type(
supplier_type: str = Path(..., description="Supplier type"),
tenant_id: str = Path(..., description="Tenant ID"),
@@ -183,7 +183,7 @@ async def get_todays_deliveries(
"""Get deliveries scheduled for today"""
try:
service = DeliveryService(db)
deliveries = await service.get_todays_deliveries(current_user.tenant_id)
deliveries = await service.get_todays_deliveries(current_user["tenant_id"])
return [DeliverySummary.from_orm(delivery) for delivery in deliveries]
except Exception as e:
logger.error("Error getting today's deliveries", error=str(e))
@@ -199,7 +199,7 @@ async def get_overdue_deliveries(
"""Get overdue deliveries"""
try:
service = DeliveryService(db)
deliveries = await service.get_overdue_deliveries(current_user.tenant_id)
deliveries = await service.get_overdue_deliveries(current_user["tenant_id"])
return [DeliverySummary.from_orm(delivery) for delivery in deliveries]
except Exception as e:
logger.error("Error getting overdue deliveries", error=str(e))
@@ -233,7 +233,7 @@ async def get_scheduled_deliveries(
service = DeliveryService(db)
deliveries = await service.get_scheduled_deliveries(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
date_from=date_from_parsed,
date_to=date_to_parsed
)
@@ -262,13 +262,13 @@ async def update_delivery_status(
existing_delivery = await service.get_delivery(delivery_id)
if not existing_delivery:
raise HTTPException(status_code=404, detail="Delivery not found")
if existing_delivery.tenant_id != current_user.tenant_id:
if existing_delivery.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
delivery = await service.update_delivery_status(
delivery_id=delivery_id,
status=status_data.status,
updated_by=current_user.user_id,
updated_by=current_user["user_id"],
notes=status_data.notes,
update_timestamps=status_data.update_timestamps
)
@@ -303,12 +303,12 @@ async def receive_delivery(
existing_delivery = await service.get_delivery(delivery_id)
if not existing_delivery:
raise HTTPException(status_code=404, detail="Delivery not found")
if existing_delivery.tenant_id != current_user.tenant_id:
if existing_delivery.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
delivery = await service.mark_as_received(
delivery_id=delivery_id,
received_by=current_user.user_id,
received_by=current_user["user_id"],
inspection_passed=receipt_data.inspection_passed,
inspection_notes=receipt_data.inspection_notes,
quality_issues=receipt_data.quality_issues,
@@ -341,7 +341,7 @@ async def get_deliveries_by_purchase_order(
deliveries = await service.get_deliveries_by_purchase_order(po_id)
# Check tenant access for first delivery (all should belong to same tenant)
if deliveries and deliveries[0].tenant_id != current_user.tenant_id:
if deliveries and deliveries[0].tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
return [DeliverySummary.from_orm(delivery) for delivery in deliveries]
@@ -363,7 +363,7 @@ async def get_purchase_order_statistics(
"""Get purchase order statistics for dashboard"""
try:
service = PurchaseOrderService(db)
stats = await service.get_purchase_order_statistics(current_user.tenant_id)
stats = await service.get_purchase_order_statistics(current_user["tenant_id"])
return stats
except Exception as e:
logger.error("Error getting purchase order statistics", error=str(e))
@@ -379,7 +379,7 @@ async def get_orders_requiring_approval(
"""Get purchase orders requiring approval"""
try:
service = PurchaseOrderService(db)
orders = await service.get_orders_requiring_approval(current_user.tenant_id)
orders = await service.get_orders_requiring_approval(current_user["tenant_id"])
return [PurchaseOrderSummary.from_orm(order) for order in orders]
except Exception as e:
logger.error("Error getting orders requiring approval", error=str(e))
@@ -395,7 +395,7 @@ async def get_overdue_orders(
"""Get overdue purchase orders"""
try:
service = PurchaseOrderService(db)
orders = await service.get_overdue_orders(current_user.tenant_id)
orders = await service.get_overdue_orders(current_user["tenant_id"])
return [PurchaseOrderSummary.from_orm(order) for order in orders]
except Exception as e:
logger.error("Error getting overdue orders", error=str(e))
@@ -419,13 +419,13 @@ async def update_purchase_order_status(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
purchase_order = await service.update_order_status(
po_id=po_id,
status=status_data.status,
updated_by=current_user.user_id,
updated_by=current_user["user_id"],
notes=status_data.notes
)
@@ -459,7 +459,7 @@ async def approve_purchase_order(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
# Capture PO details for audit
@@ -473,7 +473,7 @@ async def approve_purchase_order(
if approval_data.action == "approve":
purchase_order = await service.approve_purchase_order(
po_id=po_id,
approved_by=current_user.user_id,
approved_by=current_user["user_id"],
approval_notes=approval_data.notes
)
action = "approve"
@@ -484,7 +484,7 @@ async def approve_purchase_order(
purchase_order = await service.reject_purchase_order(
po_id=po_id,
rejection_reason=approval_data.notes,
rejected_by=current_user.user_id
rejected_by=current_user["user_id"]
)
action = "reject"
description = f"Admin {current_user.get('email', 'unknown')} rejected purchase order {po_details['po_number']}"
@@ -550,12 +550,12 @@ async def send_to_supplier(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
purchase_order = await service.send_to_supplier(
po_id=po_id,
sent_by=current_user.user_id,
sent_by=current_user["user_id"],
send_email=send_email
)
@@ -589,13 +589,13 @@ async def confirm_supplier_receipt(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
purchase_order = await service.confirm_supplier_receipt(
po_id=po_id,
supplier_reference=supplier_reference,
confirmed_by=current_user.user_id
confirmed_by=current_user["user_id"]
)
if not purchase_order:
@@ -628,13 +628,13 @@ async def cancel_purchase_order(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
purchase_order = await service.cancel_purchase_order(
po_id=po_id,
cancellation_reason=cancellation_reason,
cancelled_by=current_user.user_id
cancelled_by=current_user["user_id"]
)
if not purchase_order:
@@ -662,7 +662,7 @@ async def get_orders_by_supplier(
try:
service = PurchaseOrderService(db)
orders = await service.get_orders_by_supplier(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
supplier_id=supplier_id,
limit=limit
)
@@ -684,7 +684,7 @@ async def get_inventory_product_purchase_history(
try:
service = PurchaseOrderService(db)
history = await service.get_inventory_product_purchase_history(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
inventory_product_id=inventory_product_id,
days_back=days_back
)
@@ -706,7 +706,7 @@ async def get_top_purchased_inventory_products(
try:
service = PurchaseOrderService(db)
products = await service.get_top_purchased_inventory_products(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
days_back=days_back,
limit=limit
)
@@ -732,7 +732,7 @@ async def get_supplier_count(
try:
service = SupplierService(db)
suppliers = await service.get_suppliers(tenant_id=current_user.tenant_id)
suppliers = await service.get_suppliers(tenant_id=current_user["tenant_id"])
count = len(suppliers)
return {"count": count}

View File

@@ -15,7 +15,7 @@ from app.services.supplier_service import SupplierService
from app.models.suppliers import SupplierPriceList
from app.schemas.suppliers import (
SupplierCreate, SupplierUpdate, SupplierResponse, SupplierSummary,
SupplierSearchParams
SupplierSearchParams, SupplierDeletionSummary
)
from shared.auth.decorators import get_current_user_dep
from shared.routing import RouteBuilder
@@ -30,7 +30,7 @@ router = APIRouter(tags=["suppliers"])
logger = structlog.get_logger()
audit_logger = create_audit_logger("suppliers-service")
@router.post(route_builder.build_base_route("suppliers"), response_model=SupplierResponse)
@router.post(route_builder.build_base_route(""), response_model=SupplierResponse)
@require_user_role(['admin', 'owner', 'member'])
async def create_supplier(
supplier_data: SupplierCreate,
@@ -41,10 +41,15 @@ async def create_supplier(
"""Create a new supplier"""
try:
service = SupplierService(db)
# Get user role from current_user dict
user_role = current_user.get("role", "member").lower()
supplier = await service.create_supplier(
tenant_id=UUID(tenant_id),
supplier_data=supplier_data,
created_by=current_user.user_id
created_by=current_user["user_id"],
created_by_role=user_role
)
return SupplierResponse.from_orm(supplier)
except ValueError as e:
@@ -54,7 +59,7 @@ async def create_supplier(
raise HTTPException(status_code=500, detail="Failed to create supplier")
@router.get(route_builder.build_base_route("suppliers"), response_model=List[SupplierSummary])
@router.get(route_builder.build_base_route(""), response_model=List[SupplierSummary])
async def list_suppliers(
tenant_id: str = Path(..., description="Tenant ID"),
search_term: Optional[str] = Query(None, description="Search term"),
@@ -84,7 +89,7 @@ async def list_suppliers(
raise HTTPException(status_code=500, detail="Failed to retrieve suppliers")
@router.get(route_builder.build_resource_detail_route("suppliers", "supplier_id"), response_model=SupplierResponse)
@router.get(route_builder.build_resource_detail_route("", "supplier_id"), response_model=SupplierResponse)
async def get_supplier(
supplier_id: UUID = Path(..., description="Supplier ID"),
tenant_id: str = Path(..., description="Tenant ID"),
@@ -106,7 +111,7 @@ async def get_supplier(
raise HTTPException(status_code=500, detail="Failed to retrieve supplier")
@router.put(route_builder.build_resource_detail_route("suppliers", "supplier_id"), response_model=SupplierResponse)
@router.put(route_builder.build_resource_detail_route("", "supplier_id"), response_model=SupplierResponse)
@require_user_role(['admin', 'owner', 'member'])
async def update_supplier(
supplier_data: SupplierUpdate,
@@ -126,7 +131,7 @@ async def update_supplier(
supplier = await service.update_supplier(
supplier_id=supplier_id,
supplier_data=supplier_data,
updated_by=current_user.user_id
updated_by=current_user["user_id"]
)
if not supplier:
@@ -142,7 +147,7 @@ async def update_supplier(
raise HTTPException(status_code=500, detail="Failed to update supplier")
@router.delete(route_builder.build_resource_detail_route("suppliers", "supplier_id"))
@router.delete(route_builder.build_resource_detail_route("", "supplier_id"))
@require_user_role(['admin', 'owner'])
async def delete_supplier(
supplier_id: UUID = Path(..., description="Supplier ID"),
@@ -207,6 +212,77 @@ async def delete_supplier(
raise HTTPException(status_code=500, detail="Failed to delete supplier")
@router.delete(
route_builder.build_resource_action_route("", "supplier_id", "hard"),
response_model=SupplierDeletionSummary
)
@require_user_role(['admin', 'owner'])
async def hard_delete_supplier(
supplier_id: UUID = Path(..., description="Supplier ID"),
tenant_id: str = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Hard delete supplier and all associated data (Admin/Owner only, permanent)"""
try:
service = SupplierService(db)
# Check supplier exists
existing_supplier = await service.get_supplier(supplier_id)
if not existing_supplier:
raise HTTPException(status_code=404, detail="Supplier not found")
# Capture supplier data before deletion
supplier_data = {
"id": str(existing_supplier.id),
"name": existing_supplier.name,
"status": existing_supplier.status.value,
"supplier_code": existing_supplier.supplier_code
}
# Perform hard deletion
deletion_summary = await service.hard_delete_supplier(supplier_id, UUID(tenant_id))
# Log audit event for hard deletion
try:
# Get sync db session for audit logging
from app.core.database import SessionLocal
sync_db = SessionLocal()
try:
await audit_logger.log_deletion(
db_session=sync_db,
tenant_id=tenant_id,
user_id=current_user["user_id"],
resource_type="supplier",
resource_id=str(supplier_id),
resource_data=supplier_data,
description=f"Hard deleted supplier '{supplier_data['name']}' and all associated data",
endpoint=f"/suppliers/{supplier_id}/hard",
method="DELETE",
metadata=deletion_summary
)
sync_db.commit()
finally:
sync_db.close()
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
logger.info("Hard deleted supplier",
supplier_id=str(supplier_id),
tenant_id=tenant_id,
user_id=current_user["user_id"],
deletion_summary=deletion_summary)
return deletion_summary
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except HTTPException:
raise
except Exception as e:
logger.error("Error hard deleting supplier", supplier_id=str(supplier_id), error=str(e))
raise HTTPException(status_code=500, detail="Failed to hard delete supplier")
@router.get(
route_builder.build_base_route("count"),
response_model=dict
@@ -237,7 +313,7 @@ async def count_suppliers(
@router.get(
route_builder.build_resource_action_route("suppliers", "supplier_id", "products"),
route_builder.build_resource_action_route("", "supplier_id", "products"),
response_model=List[Dict[str, Any]]
)
async def get_supplier_products(

View File

@@ -102,11 +102,13 @@ app = service.create_app()
service.setup_standard_endpoints()
# Include API routers
service.add_router(suppliers.router)
service.add_router(deliveries.router)
service.add_router(purchase_orders.router)
service.add_router(supplier_operations.router)
service.add_router(analytics.router)
# IMPORTANT: Order matters! More specific routes must come first
# to avoid path parameter matching issues
service.add_router(purchase_orders.router) # /suppliers/purchase-orders/...
service.add_router(deliveries.router) # /suppliers/deliveries/...
service.add_router(supplier_operations.router) # /suppliers/operations/...
service.add_router(analytics.router) # /suppliers/analytics/...
service.add_router(suppliers.router) # /suppliers/{supplier_id} - catch-all, must be last
service.add_router(internal_demo.router)

View File

@@ -217,44 +217,134 @@ class SupplierRepository(BaseRepository[Supplier]):
"total_spend": float(total_spend)
}
def approve_supplier(
async def approve_supplier(
self,
supplier_id: UUID,
approved_by: UUID,
approval_date: Optional[datetime] = None
) -> Optional[Supplier]:
"""Approve a pending supplier"""
supplier = self.get_by_id(supplier_id)
if not supplier or supplier.status != SupplierStatus.PENDING_APPROVAL:
supplier = await self.get_by_id(supplier_id)
if not supplier or supplier.status != SupplierStatus.pending_approval:
return None
supplier.status = SupplierStatus.ACTIVE
supplier.status = SupplierStatus.active
supplier.approved_by = approved_by
supplier.approved_at = approval_date or datetime.utcnow()
supplier.rejection_reason = None
supplier.updated_at = datetime.utcnow()
self.db.commit()
self.db.refresh(supplier)
await self.db.commit()
await self.db.refresh(supplier)
return supplier
def reject_supplier(
async def reject_supplier(
self,
supplier_id: UUID,
rejection_reason: str,
approved_by: UUID
) -> Optional[Supplier]:
"""Reject a pending supplier"""
supplier = self.get_by_id(supplier_id)
if not supplier or supplier.status != SupplierStatus.PENDING_APPROVAL:
supplier = await self.get_by_id(supplier_id)
if not supplier or supplier.status != SupplierStatus.pending_approval:
return None
supplier.status = SupplierStatus.INACTIVE
supplier.status = SupplierStatus.inactive
supplier.rejection_reason = rejection_reason
supplier.approved_by = approved_by
supplier.approved_at = datetime.utcnow()
supplier.updated_at = datetime.utcnow()
self.db.commit()
self.db.refresh(supplier)
return supplier
await self.db.commit()
await self.db.refresh(supplier)
return supplier
async def hard_delete_supplier(self, supplier_id: UUID) -> Dict[str, Any]:
"""
Hard delete supplier and all associated data
Returns counts of deleted records
"""
from app.models.suppliers import (
SupplierPriceList, SupplierQualityReview,
SupplierAlert, SupplierScorecard, PurchaseOrderStatus, PurchaseOrder
)
from app.models.performance import SupplierPerformanceMetric
from sqlalchemy import delete
# Get supplier first
supplier = await self.get_by_id(supplier_id)
if not supplier:
return None
# Check for active purchase orders (block deletion if any exist)
active_statuses = [
PurchaseOrderStatus.draft,
PurchaseOrderStatus.pending_approval,
PurchaseOrderStatus.approved,
PurchaseOrderStatus.sent_to_supplier,
PurchaseOrderStatus.confirmed
]
stmt = select(PurchaseOrder).where(
PurchaseOrder.supplier_id == supplier_id,
PurchaseOrder.status.in_(active_statuses)
)
result = await self.db.execute(stmt)
active_pos = result.scalars().all()
if active_pos:
raise ValueError(
f"Cannot delete supplier with {len(active_pos)} active purchase orders. "
"Complete or cancel all purchase orders first."
)
# Count related records before deletion
stmt = select(SupplierPriceList).where(SupplierPriceList.supplier_id == supplier_id)
result = await self.db.execute(stmt)
price_lists_count = len(result.scalars().all())
stmt = select(SupplierQualityReview).where(SupplierQualityReview.supplier_id == supplier_id)
result = await self.db.execute(stmt)
quality_reviews_count = len(result.scalars().all())
stmt = select(SupplierPerformanceMetric).where(SupplierPerformanceMetric.supplier_id == supplier_id)
result = await self.db.execute(stmt)
metrics_count = len(result.scalars().all())
stmt = select(SupplierAlert).where(SupplierAlert.supplier_id == supplier_id)
result = await self.db.execute(stmt)
alerts_count = len(result.scalars().all())
stmt = select(SupplierScorecard).where(SupplierScorecard.supplier_id == supplier_id)
result = await self.db.execute(stmt)
scorecards_count = len(result.scalars().all())
# Delete related records (in reverse dependency order)
stmt = delete(SupplierScorecard).where(SupplierScorecard.supplier_id == supplier_id)
await self.db.execute(stmt)
stmt = delete(SupplierAlert).where(SupplierAlert.supplier_id == supplier_id)
await self.db.execute(stmt)
stmt = delete(SupplierPerformanceMetric).where(SupplierPerformanceMetric.supplier_id == supplier_id)
await self.db.execute(stmt)
stmt = delete(SupplierQualityReview).where(SupplierQualityReview.supplier_id == supplier_id)
await self.db.execute(stmt)
stmt = delete(SupplierPriceList).where(SupplierPriceList.supplier_id == supplier_id)
await self.db.execute(stmt)
# Delete the supplier itself
await self.delete(supplier_id)
await self.db.commit()
return {
"supplier_name": supplier.name,
"deleted_price_lists": price_lists_count,
"deleted_quality_reviews": quality_reviews_count,
"deleted_performance_metrics": metrics_count,
"deleted_alerts": alerts_count,
"deleted_scorecards": scorecards_count,
"deletion_timestamp": datetime.utcnow()
}

View File

@@ -170,6 +170,13 @@ class SupplierSummary(BaseModel):
phone: Optional[str] = None
city: Optional[str] = None
country: Optional[str] = None
# Business terms - Added for list view
payment_terms: PaymentTerms
standard_lead_time: int
minimum_order_amount: Optional[Decimal] = None
# Performance metrics
quality_rating: Optional[float] = None
delivery_rating: Optional[float] = None
total_orders: int
@@ -180,6 +187,20 @@ class SupplierSummary(BaseModel):
from_attributes = True
class SupplierDeletionSummary(BaseModel):
"""Schema for supplier deletion summary"""
supplier_name: str
deleted_price_lists: int = 0
deleted_quality_reviews: int = 0
deleted_performance_metrics: int = 0
deleted_alerts: int = 0
deleted_scorecards: int = 0
deletion_timestamp: datetime
class Config:
from_attributes = True
# ============================================================================
# PURCHASE ORDER SCHEMAS
# ============================================================================

View File

@@ -31,11 +31,12 @@ class SupplierService:
self,
tenant_id: UUID,
supplier_data: SupplierCreate,
created_by: UUID
created_by: UUID,
created_by_role: str = "member"
) -> Supplier:
"""Create a new supplier"""
logger.info("Creating supplier", tenant_id=str(tenant_id), name=supplier_data.name)
"""Create a new supplier with role-based auto-approval"""
logger.info("Creating supplier", tenant_id=str(tenant_id), name=supplier_data.name, role=created_by_role)
# Check for duplicate name
existing = await self.repository.get_by_name(tenant_id, supplier_data.name)
if existing:
@@ -50,18 +51,48 @@ class SupplierService:
raise ValueError(
f"Supplier with code '{supplier_data.supplier_code}' already exists"
)
# Generate supplier code if not provided
supplier_code = supplier_data.supplier_code
if not supplier_code:
supplier_code = self._generate_supplier_code(supplier_data.name)
# Fetch tenant supplier settings to determine approval workflow
try:
from shared.clients.tenant_client import create_tenant_client
tenant_client = create_tenant_client(settings)
supplier_settings = await tenant_client.get_supplier_settings(str(tenant_id)) or {}
except Exception as e:
logger.warning("Failed to fetch tenant settings, using defaults", error=str(e))
supplier_settings = {}
# Determine initial status based on settings and role
require_approval = supplier_settings.get('require_supplier_approval', True)
auto_approve_admin = supplier_settings.get('auto_approve_for_admin_owner', True)
# Auto-approval logic
if not require_approval:
# Workflow disabled globally - always auto-approve
initial_status = SupplierStatus.active
auto_approved = True
logger.info("Supplier approval workflow disabled - auto-approving")
elif auto_approve_admin and created_by_role.lower() in ['admin', 'owner']:
# Auto-approve for admin/owner roles
initial_status = SupplierStatus.active
auto_approved = True
logger.info("Auto-approving supplier created by admin/owner", role=created_by_role)
else:
# Require approval for other roles
initial_status = SupplierStatus.pending_approval
auto_approved = False
logger.info("Supplier requires approval", role=created_by_role)
# Create supplier data
create_data = supplier_data.model_dump(exclude_unset=True)
create_data.update({
'tenant_id': tenant_id,
'supplier_code': supplier_code,
'status': SupplierStatus.pending_approval,
'status': initial_status,
'created_by': created_by,
'updated_by': created_by,
'quality_rating': 0.0,
@@ -69,16 +100,23 @@ class SupplierService:
'total_orders': 0,
'total_amount': 0.0
})
# Set approval fields if auto-approved
if auto_approved:
create_data['approved_by'] = created_by
create_data['approved_at'] = datetime.utcnow()
supplier = await self.repository.create(create_data)
logger.info(
"Supplier created successfully",
tenant_id=str(tenant_id),
supplier_id=str(supplier.id),
name=supplier.name
name=supplier.name,
status=initial_status.value,
auto_approved=auto_approved
)
return supplier
async def get_supplier(self, supplier_id: UUID) -> Optional[Supplier]:
@@ -144,7 +182,28 @@ class SupplierService:
logger.info("Supplier deleted successfully", supplier_id=str(supplier_id))
return True
async def hard_delete_supplier(self, supplier_id: UUID, tenant_id: UUID) -> Dict[str, Any]:
"""
Hard delete supplier and all associated data (permanent deletion)
Returns deletion summary for audit purposes
"""
logger.info("Hard deleting supplier", supplier_id=str(supplier_id), tenant_id=str(tenant_id))
# Delegate to repository layer - all DB access is done there
deletion_summary = await self.repository.hard_delete_supplier(supplier_id)
if not deletion_summary:
raise ValueError("Supplier not found")
logger.info(
"Supplier hard deleted successfully",
supplier_id=str(supplier_id),
**deletion_summary
)
return deletion_summary
async def search_suppliers(
self,
tenant_id: UUID,
@@ -184,18 +243,18 @@ class SupplierService:
) -> Optional[Supplier]:
"""Approve a pending supplier"""
logger.info("Approving supplier", supplier_id=str(supplier_id))
supplier = self.repository.approve_supplier(supplier_id, approved_by)
supplier = await self.repository.approve_supplier(supplier_id, approved_by)
if not supplier:
logger.warning("Failed to approve supplier - not found or not pending")
return None
if notes:
self.repository.update(supplier_id, {
await self.repository.update(supplier_id, {
'notes': (supplier.notes or "") + f"\nApproval notes: {notes}",
'updated_at': datetime.utcnow()
})
logger.info("Supplier approved successfully", supplier_id=str(supplier_id))
return supplier
@@ -207,14 +266,14 @@ class SupplierService:
) -> Optional[Supplier]:
"""Reject a pending supplier"""
logger.info("Rejecting supplier", supplier_id=str(supplier_id))
supplier = self.repository.reject_supplier(
supplier = await self.repository.reject_supplier(
supplier_id, rejection_reason, rejected_by
)
if not supplier:
logger.warning("Failed to reject supplier - not found or not pending")
return None
logger.info("Supplier rejected successfully", supplier_id=str(supplier_id))
return supplier
@@ -318,4 +377,4 @@ class SupplierService:
if min_order is not None and min_order < 0:
errors['minimum_order_amount'] = "Minimum order amount cannot be negative"
return errors
return errors

View File

@@ -2,24 +2,37 @@
Subscription management API for GDPR-compliant cancellation and reactivation
"""
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, status, Query
from pydantic import BaseModel, Field
from datetime import datetime, timezone, timedelta
from uuid import UUID
import structlog
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update
from sqlalchemy import select
from shared.auth.decorators import get_current_user_dep, require_admin_role_dep
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.models.tenants import Subscription, Tenant
from app.models.tenants import Subscription
from app.services.subscription_limit_service import SubscriptionLimitService
logger = structlog.get_logger()
router = APIRouter()
route_builder = RouteBuilder('tenant')
class QuotaCheckResponse(BaseModel):
"""Response for quota limit checks"""
allowed: bool
message: str
limit: int
current_count: int
max_allowed: int
reason: str
requested_amount: int
available_amount: int
class SubscriptionCancellationRequest(BaseModel):
"""Request model for subscription cancellation"""
tenant_id: str = Field(..., description="Tenant ID to cancel subscription for")

View File

@@ -34,7 +34,12 @@ class TenantSettings(Base):
"demand_forecast_days": 14,
"safety_stock_percentage": 20.0,
"po_approval_reminder_hours": 24,
"po_critical_escalation_hours": 12
"po_critical_escalation_hours": 12,
"use_reorder_rules": True,
"economic_rounding": True,
"respect_storage_limits": True,
"use_supplier_minimums": True,
"optimize_price_tiers": True
})
# Inventory Management Settings (Inventory Service)
@@ -86,7 +91,11 @@ class TenantSettings(Base):
"good_quality_rate": 95.0,
"critical_delivery_delay_hours": 24,
"critical_quality_rejection_rate": 10.0,
"high_cost_variance_percentage": 15.0
"high_cost_variance_percentage": 15.0,
# Supplier Approval Workflow Settings
"require_supplier_approval": True,
"auto_approve_for_admin_owner": True,
"approval_required_roles": ["member", "viewer"]
})
# POS Integration Settings (POS Service)
@@ -132,7 +141,12 @@ class TenantSettings(Base):
"demand_forecast_days": 14,
"safety_stock_percentage": 20.0,
"po_approval_reminder_hours": 24,
"po_critical_escalation_hours": 12
"po_critical_escalation_hours": 12,
"use_reorder_rules": True,
"economic_rounding": True,
"respect_storage_limits": True,
"use_supplier_minimums": True,
"optimize_price_tiers": True
},
"inventory_settings": {
"low_stock_threshold": 10,
@@ -178,7 +192,10 @@ class TenantSettings(Base):
"good_quality_rate": 95.0,
"critical_delivery_delay_hours": 24,
"critical_quality_rejection_rate": 10.0,
"high_cost_variance_percentage": 15.0
"high_cost_variance_percentage": 15.0,
"require_supplier_approval": True,
"auto_approve_for_admin_owner": True,
"approval_required_roles": ["member", "viewer"]
},
"pos_settings": {
"sync_interval_minutes": 5,

View File

@@ -26,6 +26,11 @@ class ProcurementSettings(BaseModel):
safety_stock_percentage: float = Field(20.0, ge=0.0, le=100.0)
po_approval_reminder_hours: int = Field(24, ge=1, le=168)
po_critical_escalation_hours: int = Field(12, ge=1, le=72)
use_reorder_rules: bool = Field(True, description="Use ingredient reorder point and reorder quantity in procurement calculations")
economic_rounding: bool = Field(True, description="Round order quantities to economic multiples (reorder_quantity or supplier minimum_order_quantity)")
respect_storage_limits: bool = Field(True, description="Enforce max_stock_level constraints on orders")
use_supplier_minimums: bool = Field(True, description="Respect supplier minimum_order_quantity and minimum_order_amount")
optimize_price_tiers: bool = Field(True, description="Optimize order quantities to capture volume discount price tiers")
class InventorySettings(BaseModel):

View File

@@ -0,0 +1,43 @@
"""add smart procurement settings to tenant settings
Revision ID: 20251025_procurement
Revises: 20251022_0000
Create Date: 2025-10-25
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import json
# revision identifiers, used by Alembic.
revision = '20251025_procurement'
down_revision = '20251022_0000'
branch_labels = None
depends_on = None
def upgrade():
"""Add smart procurement flags to existing procurement_settings"""
# Use a single SQL statement to update all rows at once
# This avoids cursor lock issues and is more efficient
# Note: Cast to jsonb for merge operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
procurement_settings = (procurement_settings::jsonb ||
'{"use_reorder_rules": true, "economic_rounding": true, "respect_storage_limits": true, "use_supplier_minimums": true, "optimize_price_tiers": true}'::jsonb)::json,
updated_at = now()
""")
def downgrade():
"""Remove smart procurement flags from procurement_settings"""
# Use a single SQL statement to remove the keys from all rows
# Note: Cast to jsonb for operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
procurement_settings = (procurement_settings::jsonb - 'use_reorder_rules' - 'economic_rounding' - 'respect_storage_limits' - 'use_supplier_minimums' - 'optimize_price_tiers')::json,
updated_at = now()
""")

View File

@@ -0,0 +1,43 @@
"""add supplier approval workflow settings to tenant settings
Revision ID: 20251025_supplier_approval
Revises: 20251025_procurement
Create Date: 2025-10-25
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import json
# revision identifiers, used by Alembic.
revision = '20251025_supplier_approval'
down_revision = '20251025_procurement'
branch_labels = None
depends_on = None
def upgrade():
"""Add supplier approval workflow settings to existing supplier_settings"""
# Use a single SQL statement to update all rows at once
# This avoids cursor lock issues and is more efficient
# Note: Cast to jsonb for merge operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
supplier_settings = (supplier_settings::jsonb ||
'{"require_supplier_approval": true, "auto_approve_for_admin_owner": true, "approval_required_roles": ["member", "viewer"]}'::jsonb)::json,
updated_at = now()
""")
def downgrade():
"""Remove supplier approval workflow settings from supplier_settings"""
# Use a single SQL statement to remove the keys from all rows
# Note: Cast to jsonb for operator, then back to json
op.execute("""
UPDATE tenant_settings
SET
supplier_settings = (supplier_settings::jsonb - 'require_supplier_approval' - 'auto_approve_for_admin_owner' - 'approval_required_roles')::json,
updated_at = now()
""")

View File

@@ -249,13 +249,35 @@ class ModelRepository(TrainingBaseRepository):
)
recent_models = recent_result.scalar() or 0
# Calculate average accuracy from model metrics
accuracy_query = text("""
SELECT AVG(mape) as average_mape, COUNT(*) as total_models_with_metrics
FROM trained_models
WHERE tenant_id = :tenant_id
AND mape IS NOT NULL
AND is_active = true
""")
accuracy_result = await self.session.execute(accuracy_query, {"tenant_id": tenant_id})
accuracy_row = accuracy_result.fetchone()
average_mape = accuracy_row.average_mape if accuracy_row and accuracy_row.average_mape else 0
total_models_with_metrics = accuracy_row.total_models_with_metrics if accuracy_row else 0
# Convert MAPE to accuracy percentage (lower MAPE = higher accuracy)
# Use 100 - MAPE as a simple conversion, but cap it at reasonable bounds
average_accuracy = max(0, min(100, 100 - float(average_mape))) if average_mape > 0 else 0
return {
"total_models": total_models,
"active_models": active_models,
"inactive_models": total_models - active_models,
"production_models": production_models,
"models_by_product": product_stats,
"recent_models_30d": recent_models
"recent_models_30d": recent_models,
"average_accuracy": average_accuracy,
"total_models_with_metrics": total_models_with_metrics,
"average_mape": float(average_mape) if average_mape > 0 else 0
}
except Exception as e:
@@ -268,7 +290,10 @@ class ModelRepository(TrainingBaseRepository):
"inactive_models": 0,
"production_models": 0,
"models_by_product": {},
"recent_models_30d": 0
"recent_models_30d": 0,
"average_accuracy": 0,
"total_models_with_metrics": 0,
"average_mape": 0
}
async def _deactivate_other_production_models(
@@ -343,4 +368,4 @@ class ModelRepository(TrainingBaseRepository):
logger.error("Failed to get model performance summary",
model_id=model_id,
error=str(e))
return {}
return {}

View File

@@ -11,7 +11,7 @@ alembic==1.17.0
psycopg2-binary==2.9.10
# ML libraries
prophet==1.1.6
prophet==1.2.1
scikit-learn==1.6.1
pandas==2.2.3
numpy==2.2.2