Initial commit - production deployment
This commit is contained in:
1
services/recipes/app/__init__.py
Normal file
1
services/recipes/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/recipes/app/__init__.py
|
||||
1
services/recipes/app/api/__init__.py
Normal file
1
services/recipes/app/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/recipes/app/api/__init__.py
|
||||
237
services/recipes/app/api/audit.py
Normal file
237
services/recipes/app/api/audit.py
Normal file
@@ -0,0 +1,237 @@
|
||||
# services/recipes/app/api/audit.py
|
||||
"""
|
||||
Audit Logs API - Retrieve audit trail for recipes service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.models.audit_log_schemas import (
|
||||
AuditLogResponse,
|
||||
AuditLogListResponse,
|
||||
AuditLogStatsResponse
|
||||
)
|
||||
from app.core.database import db_manager
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
router = APIRouter(tags=["audit-logs"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""Database session dependency"""
|
||||
async with db_manager.get_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs"),
|
||||
response_model=AuditLogListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_logs(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
|
||||
action: Optional[str] = Query(None, description="Filter by action type"),
|
||||
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
|
||||
severity: Optional[str] = Query(None, description="Filter by severity level"),
|
||||
search: Optional[str] = Query(None, description="Search in description field"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit logs for recipes service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit logs",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"),
|
||||
filters={
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"action": action,
|
||||
"resource_type": resource_type,
|
||||
"severity": severity
|
||||
}
|
||||
)
|
||||
|
||||
# Build query filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
if user_id:
|
||||
filters.append(AuditLog.user_id == user_id)
|
||||
if action:
|
||||
filters.append(AuditLog.action == action)
|
||||
if resource_type:
|
||||
filters.append(AuditLog.resource_type == resource_type)
|
||||
if severity:
|
||||
filters.append(AuditLog.severity == severity)
|
||||
if search:
|
||||
filters.append(AuditLog.description.ilike(f"%{search}%"))
|
||||
|
||||
# Count total matching records
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
# Fetch paginated results
|
||||
query = (
|
||||
select(AuditLog)
|
||||
.where(and_(*filters))
|
||||
.order_by(AuditLog.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
audit_logs = result.scalars().all()
|
||||
|
||||
# Convert to response models
|
||||
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit logs",
|
||||
tenant_id=tenant_id,
|
||||
total=total,
|
||||
returned=len(items)
|
||||
)
|
||||
|
||||
return AuditLogListResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
has_more=(offset + len(items)) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit logs",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs/stats"),
|
||||
response_model=AuditLogStatsResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_log_stats(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit log statistics for recipes service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
# Build base filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
|
||||
# Total events
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total_events = total_result.scalar() or 0
|
||||
|
||||
# Events by action
|
||||
action_query = (
|
||||
select(AuditLog.action, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.action)
|
||||
)
|
||||
action_result = await db.execute(action_query)
|
||||
events_by_action = {row.action: row.count for row in action_result}
|
||||
|
||||
# Events by severity
|
||||
severity_query = (
|
||||
select(AuditLog.severity, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.severity)
|
||||
)
|
||||
severity_result = await db.execute(severity_query)
|
||||
events_by_severity = {row.severity: row.count for row in severity_result}
|
||||
|
||||
# Events by resource type
|
||||
resource_query = (
|
||||
select(AuditLog.resource_type, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.resource_type)
|
||||
)
|
||||
resource_result = await db.execute(resource_query)
|
||||
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
|
||||
|
||||
# Date range
|
||||
date_range_query = (
|
||||
select(
|
||||
func.min(AuditLog.created_at).label('min_date'),
|
||||
func.max(AuditLog.created_at).label('max_date')
|
||||
)
|
||||
.where(and_(*filters))
|
||||
)
|
||||
date_result = await db.execute(date_range_query)
|
||||
date_row = date_result.one()
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
total_events=total_events
|
||||
)
|
||||
|
||||
return AuditLogStatsResponse(
|
||||
total_events=total_events,
|
||||
events_by_action=events_by_action,
|
||||
events_by_severity=events_by_severity,
|
||||
events_by_resource_type=events_by_resource_type,
|
||||
date_range={
|
||||
"min": date_row.min_date,
|
||||
"max": date_row.max_date
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit log statistics",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit log statistics: {str(e)}"
|
||||
)
|
||||
47
services/recipes/app/api/internal.py
Normal file
47
services/recipes/app/api/internal.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Internal API for Recipes Service
|
||||
Handles internal service-to-service operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.models.recipes import Recipe, RecipeStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal", tags=["internal"])
|
||||
|
||||
|
||||
|
||||
@router.get("/count")
|
||||
async def get_recipe_count(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get count of recipes for onboarding status check.
|
||||
Counts DRAFT and ACTIVE recipes (excludes ARCHIVED/DISCONTINUED).
|
||||
Internal endpoint for tenant service.
|
||||
"""
|
||||
try:
|
||||
count = await db.scalar(
|
||||
select(func.count()).select_from(Recipe)
|
||||
.where(
|
||||
Recipe.tenant_id == UUID(tenant_id),
|
||||
Recipe.status.in_([RecipeStatus.DRAFT, RecipeStatus.ACTIVE, RecipeStatus.TESTING])
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"count": count or 0,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recipe count", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get recipe count: {str(e)}")
|
||||
426
services/recipes/app/api/internal_demo.py
Normal file
426
services/recipes/app/api/internal_demo.py
Normal file
@@ -0,0 +1,426 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Recipes Service
|
||||
Service-to-service endpoint for cloning recipe and production data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.recipes import (
|
||||
Recipe, RecipeIngredient, ProductionBatch, ProductionIngredientConsumption,
|
||||
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone recipes service data for a virtual demo tenant
|
||||
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting recipes data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "04-recipes.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "04-recipes.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "04-recipes.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"recipes": 0,
|
||||
"recipe_ingredients": 0
|
||||
}
|
||||
|
||||
# First, build recipe ID map by processing all recipes
|
||||
recipe_id_map = {}
|
||||
|
||||
# Create Recipes
|
||||
for recipe_data in seed_data.get('recipes', []):
|
||||
# Transform recipe ID using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
recipe_uuid = uuid.UUID(recipe_data['id'])
|
||||
transformed_id = transform_id(recipe_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse recipe UUID",
|
||||
recipe_id=recipe_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in recipe data: {str(e)}"
|
||||
)
|
||||
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_created_at = parse_date_field(
|
||||
recipe_data.get('created_at'),
|
||||
session_time,
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = parse_date_field(
|
||||
recipe_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
)
|
||||
|
||||
# Map field names from seed data to model fields
|
||||
# Handle yield_quantity/yield_unit (may be named finished_product_quantity/unit in seed data)
|
||||
yield_quantity = recipe_data.get('yield_quantity') or recipe_data.get('finished_product_quantity', 1.0)
|
||||
yield_unit_str = recipe_data.get('yield_unit') or recipe_data.get('finished_product_unit', 'UNITS')
|
||||
|
||||
# Convert yield_unit string to enum if needed
|
||||
if isinstance(yield_unit_str, str):
|
||||
try:
|
||||
yield_unit = MeasurementUnit[yield_unit_str.upper()]
|
||||
except KeyError:
|
||||
yield_unit = MeasurementUnit.UNITS
|
||||
else:
|
||||
yield_unit = yield_unit_str
|
||||
|
||||
# Convert status string to enum if needed
|
||||
status = recipe_data.get('status', 'ACTIVE')
|
||||
if isinstance(status, str):
|
||||
try:
|
||||
status = RecipeStatus[status.upper()]
|
||||
except KeyError:
|
||||
status = RecipeStatus.ACTIVE
|
||||
|
||||
new_recipe = Recipe(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
name=recipe_data['name'],
|
||||
description=recipe_data.get('description'),
|
||||
recipe_code=recipe_data.get('recipe_code'),
|
||||
version=recipe_data.get('version', '1.0'),
|
||||
status=status,
|
||||
finished_product_id=recipe_data['finished_product_id'],
|
||||
yield_quantity=yield_quantity,
|
||||
yield_unit=yield_unit,
|
||||
category=recipe_data.get('category'),
|
||||
difficulty_level=recipe_data.get('difficulty_level', 1),
|
||||
prep_time_minutes=recipe_data.get('prep_time_minutes') or recipe_data.get('preparation_time_minutes'),
|
||||
cook_time_minutes=recipe_data.get('cook_time_minutes') or recipe_data.get('baking_time_minutes'),
|
||||
total_time_minutes=recipe_data.get('total_time_minutes'),
|
||||
rest_time_minutes=recipe_data.get('rest_time_minutes') or recipe_data.get('cooling_time_minutes'),
|
||||
instructions=recipe_data.get('instructions'),
|
||||
preparation_notes=recipe_data.get('notes') or recipe_data.get('preparation_notes'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at
|
||||
)
|
||||
db.add(new_recipe)
|
||||
stats["recipes"] += 1
|
||||
|
||||
# Add recipe ID to map for ingredients
|
||||
recipe_id_map[recipe_data['id']] = str(transformed_id)
|
||||
|
||||
# Create Recipe Ingredients
|
||||
for recipe_ingredient_data in seed_data.get('recipe_ingredients', []):
|
||||
# Transform ingredient ID using XOR
|
||||
try:
|
||||
ingredient_uuid = uuid.UUID(recipe_ingredient_data['id'])
|
||||
transformed_id = transform_id(ingredient_uuid, virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse recipe ingredient UUID",
|
||||
ingredient_id=recipe_ingredient_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in recipe ingredient data: {str(e)}"
|
||||
)
|
||||
|
||||
# Get the transformed recipe ID
|
||||
recipe_id = recipe_id_map.get(recipe_ingredient_data['recipe_id'])
|
||||
if not recipe_id:
|
||||
logger.error("Recipe not found for ingredient",
|
||||
recipe_id=recipe_ingredient_data['recipe_id'])
|
||||
continue
|
||||
|
||||
# Convert unit string to enum if needed
|
||||
unit_str = recipe_ingredient_data.get('unit', 'KILOGRAMS')
|
||||
if isinstance(unit_str, str):
|
||||
try:
|
||||
unit = MeasurementUnit[unit_str.upper()]
|
||||
except KeyError:
|
||||
# Try without 'S' for singular forms
|
||||
try:
|
||||
unit = MeasurementUnit[unit_str.upper().rstrip('S')]
|
||||
except KeyError:
|
||||
unit = MeasurementUnit.KILOGRAMS
|
||||
else:
|
||||
unit = unit_str
|
||||
|
||||
new_recipe_ingredient = RecipeIngredient(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
recipe_id=recipe_id,
|
||||
ingredient_id=recipe_ingredient_data['ingredient_id'],
|
||||
quantity=recipe_ingredient_data['quantity'],
|
||||
unit=unit,
|
||||
unit_cost=recipe_ingredient_data.get('cost_per_unit') or recipe_ingredient_data.get('unit_cost', 0.0),
|
||||
total_cost=recipe_ingredient_data.get('total_cost'),
|
||||
ingredient_order=recipe_ingredient_data.get('sequence') or recipe_ingredient_data.get('ingredient_order', 1),
|
||||
is_optional=recipe_ingredient_data.get('is_optional', False),
|
||||
ingredient_notes=recipe_ingredient_data.get('notes') or recipe_ingredient_data.get('ingredient_notes')
|
||||
)
|
||||
db.add(new_recipe_ingredient)
|
||||
stats["recipe_ingredients"] += 1
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Recipes data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
records_cloned=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "completed",
|
||||
"records_cloned": sum(stats.values()),
|
||||
"duration_ms": duration_ms,
|
||||
"details": {
|
||||
"recipes": stats["recipes"],
|
||||
"recipe_ingredients": stats["recipe_ingredients"],
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone recipes data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "recipes",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"recipes": 0,
|
||||
"recipe_ingredients": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Delete in reverse dependency order
|
||||
|
||||
# 1. Delete recipe ingredients (depends on recipes)
|
||||
result = await db.execute(
|
||||
delete(RecipeIngredient)
|
||||
.where(RecipeIngredient.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["recipe_ingredients"] = result.rowcount
|
||||
|
||||
# 2. Delete recipes
|
||||
result = await db.execute(
|
||||
delete(Recipe)
|
||||
.where(Recipe.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["recipes"] = result.rowcount
|
||||
|
||||
records_deleted["total"] = sum(records_deleted.values())
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"demo_data_deleted",
|
||||
service="recipes",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="recipes",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
306
services/recipes/app/api/recipe_operations.py
Normal file
306
services/recipes/app/api/recipe_operations.py
Normal file
@@ -0,0 +1,306 @@
|
||||
# services/recipes/app/api/recipe_operations.py
|
||||
"""
|
||||
Recipe Operations API - Business operations and complex workflows
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, Query, Path
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from uuid import UUID
|
||||
import logging
|
||||
|
||||
from ..core.database import get_db
|
||||
from ..services.recipe_service import RecipeService
|
||||
from ..schemas.recipes import (
|
||||
RecipeResponse,
|
||||
RecipeDuplicateRequest,
|
||||
RecipeFeasibilityResponse,
|
||||
RecipeStatisticsResponse,
|
||||
)
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from shared.auth.access_control import require_user_role, analytics_tier_required
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(tags=["recipe-operations"])
|
||||
|
||||
|
||||
def get_user_id(x_user_id: str = Header(...)) -> UUID:
|
||||
"""Extract user ID from header"""
|
||||
try:
|
||||
return UUID(x_user_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid user ID format")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "duplicate"]),
|
||||
response_model=RecipeResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def duplicate_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
duplicate_data: RecipeDuplicateRequest,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a duplicate of an existing recipe"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
result = await recipe_service.duplicate_recipe(
|
||||
recipe_id,
|
||||
duplicate_data.new_name,
|
||||
user_id
|
||||
)
|
||||
|
||||
if not result["success"]:
|
||||
raise HTTPException(status_code=400, detail=result["error"])
|
||||
|
||||
return RecipeResponse(**result["data"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error duplicating recipe {recipe_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "activate"]),
|
||||
response_model=RecipeResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def activate_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Activate a recipe for production"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
result = await recipe_service.activate_recipe(recipe_id, user_id)
|
||||
|
||||
if not result["success"]:
|
||||
raise HTTPException(status_code=400, detail=result["error"])
|
||||
|
||||
return RecipeResponse(**result["data"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error activating recipe {recipe_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "feasibility"]),
|
||||
response_model=RecipeFeasibilityResponse
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def check_recipe_feasibility(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
batch_multiplier: float = Query(1.0, gt=0),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Check if recipe can be produced with current inventory (Professional+ tier)
|
||||
Supports batch scaling for production planning
|
||||
"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
result = await recipe_service.check_recipe_feasibility(recipe_id, batch_multiplier)
|
||||
|
||||
if not result["success"]:
|
||||
raise HTTPException(status_code=400, detail=result["error"])
|
||||
|
||||
return RecipeFeasibilityResponse(**result["data"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking recipe feasibility {recipe_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("statistics"),
|
||||
response_model=RecipeStatisticsResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_recipe_statistics(
|
||||
tenant_id: UUID,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get recipe statistics for dashboard"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
stats = await recipe_service.get_recipe_statistics(tenant_id)
|
||||
|
||||
return RecipeStatisticsResponse(**stats)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe statistics: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["categories", "list"])
|
||||
)
|
||||
async def get_recipe_categories(
|
||||
tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get list of recipe categories used by tenant"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipes = await recipe_service.search_recipes(tenant_id, limit=1000)
|
||||
categories = list(set(recipe["category"] for recipe in recipes if recipe["category"]))
|
||||
categories.sort()
|
||||
|
||||
return {"categories": categories}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe categories: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["count"])
|
||||
)
|
||||
async def get_recipe_count(
|
||||
tenant_id: UUID,
|
||||
x_internal_request: str = Header(None),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get total count of recipes for a tenant
|
||||
Internal endpoint for subscription usage tracking
|
||||
"""
|
||||
if x_internal_request != "true":
|
||||
raise HTTPException(status_code=403, detail="Internal endpoint only")
|
||||
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
recipes = await recipe_service.search_recipes(tenant_id, limit=10000)
|
||||
count = len(recipes)
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe count: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
# ============================================================================
|
||||
# Tenant Data Deletion Operations (Internal Service Only)
|
||||
# ============================================================================
|
||||
|
||||
from shared.auth.access_control import service_only_access
|
||||
from shared.services.tenant_deletion import TenantDataDeletionResult
|
||||
from app.services.tenant_deletion_service import RecipesTenantDeletionService
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all recipes data for a tenant (Internal service only)
|
||||
"""
|
||||
try:
|
||||
logger.info("recipes.tenant_deletion.api_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = RecipesTenantDeletionService(db)
|
||||
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Tenant data deletion completed successfully",
|
||||
"summary": result.to_dict()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"recipes.tenant_deletion.api_error - tenant_id: {tenant_id}, error: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete tenant data: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def preview_tenant_data_deletion(
|
||||
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Preview what data would be deleted for a tenant (dry-run)
|
||||
"""
|
||||
try:
|
||||
logger.info(f"recipes.tenant_deletion.preview_called - tenant_id: {tenant_id}")
|
||||
|
||||
deletion_service = RecipesTenantDeletionService(db)
|
||||
preview_data = await deletion_service.get_tenant_data_preview(tenant_id)
|
||||
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=deletion_service.service_name)
|
||||
result.deleted_counts = preview_data
|
||||
result.success = True
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant deletion preview failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"service": "recipes-service",
|
||||
"data_counts": result.deleted_counts,
|
||||
"total_items": sum(result.deleted_counts.values())
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"recipes.tenant_deletion.preview_error - tenant_id: {tenant_id}, error: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to preview tenant data deletion: {str(e)}")
|
||||
166
services/recipes/app/api/recipe_quality_configs.py
Normal file
166
services/recipes/app/api/recipe_quality_configs.py
Normal file
@@ -0,0 +1,166 @@
|
||||
# services/recipes/app/api/recipe_quality_configs.py
|
||||
"""
|
||||
Recipe Quality Configuration API - Atomic CRUD operations on RecipeQualityConfiguration
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List
|
||||
from uuid import UUID
|
||||
import logging
|
||||
|
||||
from ..core.database import get_db
|
||||
from ..services.recipe_service import RecipeService
|
||||
from ..schemas.recipes import (
|
||||
RecipeQualityConfiguration,
|
||||
RecipeQualityConfigurationUpdate
|
||||
)
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from shared.auth.access_control import require_user_role
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(tags=["recipe-quality-configs"])
|
||||
|
||||
|
||||
def get_user_id(x_user_id: str = Header(...)) -> UUID:
|
||||
"""Extract user ID from header"""
|
||||
try:
|
||||
return UUID(x_user_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid user ID format")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "quality-configuration"]),
|
||||
response_model=RecipeQualityConfiguration
|
||||
)
|
||||
async def get_recipe_quality_configuration(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get quality configuration for a specific recipe"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipe = await recipe_service.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
quality_config = recipe.get("quality_check_configuration")
|
||||
if not quality_config:
|
||||
quality_config = {
|
||||
"stages": {},
|
||||
"overall_quality_threshold": 7.0,
|
||||
"critical_stage_blocking": True,
|
||||
"auto_create_quality_checks": True,
|
||||
"quality_manager_approval_required": False
|
||||
}
|
||||
|
||||
return quality_config
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe quality configuration: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "quality-configuration"]),
|
||||
response_model=RecipeQualityConfiguration
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_recipe_quality_configuration(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
quality_config: RecipeQualityConfigurationUpdate,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update quality configuration for a specific recipe"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipe = await recipe_service.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
updated_recipe = await recipe_service.update_recipe_quality_configuration(
|
||||
tenant_id, recipe_id, quality_config.dict(exclude_unset=True), user_id
|
||||
)
|
||||
|
||||
return updated_recipe["quality_check_configuration"]
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating recipe quality configuration: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "quality-configuration", "stages", "{stage}", "templates"])
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def add_quality_templates_to_stage(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
stage: str,
|
||||
template_ids: List[UUID],
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Add quality templates to a specific recipe stage"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipe = await recipe_service.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
await recipe_service.add_quality_templates_to_stage(
|
||||
tenant_id, recipe_id, stage, template_ids, user_id
|
||||
)
|
||||
|
||||
return {"message": f"Added {len(template_ids)} templates to {stage} stage"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding quality templates to recipe stage: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}", "quality-configuration", "stages", "{stage}", "templates", "{template_id}"])
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def remove_quality_template_from_stage(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
stage: str,
|
||||
template_id: UUID,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Remove a quality template from a specific recipe stage"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipe = await recipe_service.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
await recipe_service.remove_quality_template_from_stage(
|
||||
tenant_id, recipe_id, stage, template_id, user_id
|
||||
)
|
||||
|
||||
return {"message": f"Removed template from {stage} stage"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error removing quality template from recipe stage: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
504
services/recipes/app/api/recipes.py
Normal file
504
services/recipes/app/api/recipes.py
Normal file
@@ -0,0 +1,504 @@
|
||||
# services/recipes/app/api/recipes.py
|
||||
"""
|
||||
Recipes API - Atomic CRUD operations on Recipe model
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, Query, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
import logging
|
||||
import httpx
|
||||
|
||||
from ..core.database import get_db
|
||||
from ..services.recipe_service import RecipeService
|
||||
from ..schemas.recipes import (
|
||||
RecipeCreate,
|
||||
RecipeUpdate,
|
||||
RecipeResponse,
|
||||
)
|
||||
from ..models import AuditLog
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from shared.auth.access_control import require_user_role, service_only_access
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from shared.services.tenant_deletion import TenantDataDeletionResult
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
logger = logging.getLogger(__name__)
|
||||
audit_logger = create_audit_logger("recipes-service", AuditLog)
|
||||
router = APIRouter(tags=["recipes"])
|
||||
|
||||
|
||||
def get_user_id(x_user_id: str = Header(...)) -> UUID:
|
||||
"""Extract user ID from header"""
|
||||
try:
|
||||
return UUID(x_user_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid user ID format")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, []),
|
||||
response_model=RecipeResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_data: RecipeCreate,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new recipe"""
|
||||
try:
|
||||
# CRITICAL: Check subscription limit before creating
|
||||
from ..core.config import settings
|
||||
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
try:
|
||||
# Check recipe limit (not product limit)
|
||||
limit_check_response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/recipes/can-add",
|
||||
headers={
|
||||
"x-user-id": str(current_user.get('user_id')),
|
||||
"x-tenant-id": str(tenant_id)
|
||||
}
|
||||
)
|
||||
|
||||
if limit_check_response.status_code == 200:
|
||||
limit_check = limit_check_response.json()
|
||||
|
||||
if not limit_check.get('can_add', False):
|
||||
logger.warning(
|
||||
f"Recipe limit exceeded for tenant {tenant_id}: {limit_check.get('reason')}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=402,
|
||||
detail={
|
||||
"error": "recipe_limit_exceeded",
|
||||
"message": limit_check.get('reason', 'Recipe limit exceeded'),
|
||||
"current_count": limit_check.get('current_count'),
|
||||
"max_allowed": limit_check.get('max_allowed'),
|
||||
"upgrade_required": True
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to check recipe limit for tenant {tenant_id}, allowing creation"
|
||||
)
|
||||
except httpx.TimeoutException:
|
||||
logger.warning(f"Timeout checking recipe limit for tenant {tenant_id}, allowing creation")
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(f"Error checking recipe limit for tenant {tenant_id}: {e}, allowing creation")
|
||||
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipe_dict = recipe_data.dict(exclude={"ingredients"})
|
||||
recipe_dict["tenant_id"] = tenant_id
|
||||
|
||||
ingredients_list = [ing.dict() for ing in recipe_data.ingredients]
|
||||
|
||||
result = await recipe_service.create_recipe(
|
||||
recipe_dict,
|
||||
ingredients_list,
|
||||
user_id
|
||||
)
|
||||
|
||||
if not result["success"]:
|
||||
raise HTTPException(status_code=400, detail=result["error"])
|
||||
|
||||
logger.info(f"Recipe created successfully for tenant {tenant_id}: {result['data'].get('name')}")
|
||||
|
||||
return RecipeResponse(**result["data"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating recipe: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, []),
|
||||
response_model=List[RecipeResponse]
|
||||
)
|
||||
async def search_recipes(
|
||||
tenant_id: UUID,
|
||||
search_term: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query(None),
|
||||
category: Optional[str] = Query(None),
|
||||
is_seasonal: Optional[bool] = Query(None),
|
||||
is_signature: Optional[bool] = Query(None),
|
||||
difficulty_level: Optional[int] = Query(None, ge=1, le=5),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
offset: int = Query(0, ge=0),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Search recipes with filters"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipes = await recipe_service.search_recipes(
|
||||
tenant_id=tenant_id,
|
||||
search_term=search_term,
|
||||
status=status,
|
||||
category=category,
|
||||
is_seasonal=is_seasonal,
|
||||
is_signature=is_signature,
|
||||
difficulty_level=difficulty_level,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
return [RecipeResponse(**recipe) for recipe in recipes]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching recipes: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["count"]),
|
||||
response_model=dict
|
||||
)
|
||||
async def count_recipes(
|
||||
tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get count of recipes for a tenant"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
# Use the search method with limit 0 to just get the count
|
||||
recipes = await recipe_service.search_recipes(
|
||||
tenant_id=tenant_id,
|
||||
limit=10000 # High limit to get all
|
||||
)
|
||||
|
||||
count = len(recipes)
|
||||
logger.info(f"Retrieved recipe count for tenant {tenant_id}: {count}")
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error counting recipes for tenant: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}"]),
|
||||
response_model=RecipeResponse
|
||||
)
|
||||
async def get_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get recipe by ID with ingredients"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
return RecipeResponse(**recipe)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe {recipe_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}"]),
|
||||
response_model=RecipeResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
recipe_data: RecipeUpdate,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update an existing recipe"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
recipe_dict = recipe_data.dict(exclude={"ingredients"}, exclude_unset=True)
|
||||
|
||||
ingredients_list = None
|
||||
if recipe_data.ingredients is not None:
|
||||
ingredients_list = [ing.dict() for ing in recipe_data.ingredients]
|
||||
|
||||
result = await recipe_service.update_recipe(
|
||||
recipe_id,
|
||||
recipe_dict,
|
||||
ingredients_list,
|
||||
user_id
|
||||
)
|
||||
|
||||
if not result["success"]:
|
||||
raise HTTPException(status_code=400, detail=result["error"])
|
||||
|
||||
return RecipeResponse(**result["data"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating recipe {recipe_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}"])
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete a recipe (Admin+ only)"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
# Check if deletion is safe
|
||||
summary = await recipe_service.get_deletion_summary(recipe_id)
|
||||
if not summary["success"]:
|
||||
raise HTTPException(status_code=500, detail=summary["error"])
|
||||
|
||||
if not summary["data"]["can_delete"]:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"message": "Cannot delete recipe with active dependencies",
|
||||
"warnings": summary["data"]["warnings"]
|
||||
}
|
||||
)
|
||||
|
||||
# Capture recipe data before deletion
|
||||
recipe_data = {
|
||||
"recipe_name": existing_recipe.get("name"),
|
||||
"category": existing_recipe.get("category"),
|
||||
"difficulty_level": existing_recipe.get("difficulty_level"),
|
||||
"ingredient_count": len(existing_recipe.get("ingredients", []))
|
||||
}
|
||||
|
||||
success = await recipe_service.delete_recipe(recipe_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
# Log audit event for recipe deletion
|
||||
try:
|
||||
# Get sync db for audit logging
|
||||
from ..core.database import SessionLocal
|
||||
sync_db = SessionLocal()
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=sync_db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=str(user_id),
|
||||
resource_type="recipe",
|
||||
resource_id=str(recipe_id),
|
||||
resource_data=recipe_data,
|
||||
description=f"Admin deleted recipe {recipe_data['recipe_name']}",
|
||||
endpoint=f"/recipes/{recipe_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
sync_db.commit()
|
||||
finally:
|
||||
sync_db.close()
|
||||
except Exception as audit_error:
|
||||
logger.warning(f"Failed to log audit event: {audit_error}")
|
||||
|
||||
logger.info(f"Deleted recipe {recipe_id} by user {user_id}")
|
||||
|
||||
return {"message": "Recipe deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting recipe {recipe_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_custom_route(RouteCategory.OPERATIONS, ["{recipe_id}", "archive"])
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def archive_recipe(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
user_id: UUID = Depends(get_user_id),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Archive (soft delete) a recipe by setting status to ARCHIVED"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
# Check status transitions (business rule)
|
||||
current_status = existing_recipe.get("status")
|
||||
if current_status == "DISCONTINUED":
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Cannot archive a discontinued recipe. Use hard delete instead."
|
||||
)
|
||||
|
||||
# Update status to ARCHIVED
|
||||
from ..schemas.recipes import RecipeUpdate, RecipeStatus
|
||||
update_data = RecipeUpdate(status=RecipeStatus.ARCHIVED)
|
||||
|
||||
updated_recipe = await recipe_service.update_recipe(
|
||||
recipe_id,
|
||||
update_data.dict(exclude_unset=True),
|
||||
user_id
|
||||
)
|
||||
|
||||
if not updated_recipe["success"]:
|
||||
raise HTTPException(status_code=400, detail=updated_recipe["error"])
|
||||
|
||||
logger.info(f"Archived recipe {recipe_id} by user {user_id}")
|
||||
return RecipeResponse(**updated_recipe["data"])
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error archiving recipe: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.OPERATIONS, ["{recipe_id}", "deletion-summary"])
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_recipe_deletion_summary(
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get summary of what will be affected by deleting this recipe"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
existing_recipe = await recipe_service.get_recipe_with_ingredients(recipe_id)
|
||||
if not existing_recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
|
||||
if existing_recipe["tenant_id"] != str(tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Not authorized")
|
||||
|
||||
summary = await recipe_service.get_deletion_summary(recipe_id)
|
||||
|
||||
if not summary["success"]:
|
||||
raise HTTPException(status_code=500, detail=summary["error"])
|
||||
|
||||
return summary["data"]
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting deletion summary: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
# ===== Tenant Data Deletion Endpoints =====
|
||||
|
||||
@router.delete("/tenant/{tenant_id}")
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all recipe-related data for a tenant
|
||||
Only accessible by internal services (called during tenant deletion)
|
||||
"""
|
||||
|
||||
logger.info(f"Tenant data deletion request received for tenant: {tenant_id}")
|
||||
|
||||
try:
|
||||
from app.services.tenant_deletion_service import RecipesTenantDeletionService
|
||||
|
||||
deletion_service = RecipesTenantDeletionService(db)
|
||||
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
||||
|
||||
return {
|
||||
"message": "Tenant data deletion completed in recipes-service",
|
||||
"summary": result.to_dict()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Tenant data deletion failed for {tenant_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete tenant data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tenant/{tenant_id}/deletion-preview")
|
||||
@service_only_access
|
||||
async def preview_tenant_data_deletion(
|
||||
tenant_id: str,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Preview what data would be deleted for a tenant (dry-run)
|
||||
Accessible by internal services and tenant admins
|
||||
"""
|
||||
|
||||
try:
|
||||
from app.services.tenant_deletion_service import RecipesTenantDeletionService
|
||||
|
||||
deletion_service = RecipesTenantDeletionService(db)
|
||||
preview = await deletion_service.get_tenant_data_preview(tenant_id)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"service": "recipes-service",
|
||||
"data_counts": preview,
|
||||
"total_items": sum(preview.values())
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Deletion preview failed for {tenant_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get deletion preview: {str(e)}"
|
||||
)
|
||||
1
services/recipes/app/core/__init__.py
Normal file
1
services/recipes/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/recipes/app/core/__init__.py
|
||||
77
services/recipes/app/core/config.py
Normal file
77
services/recipes/app/core/config.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# services/recipes/app/core/config.py
|
||||
"""
|
||||
Configuration management for Recipe Service
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import Optional
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""Recipe service configuration extending base configuration"""
|
||||
|
||||
# Override service-specific settings
|
||||
SERVICE_NAME: str = "recipes-service"
|
||||
VERSION: str = "1.0.0"
|
||||
APP_NAME: str = "Recipe Service"
|
||||
DESCRIPTION: str = "Recipe management and planning service"
|
||||
|
||||
# API Configuration
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Database configuration (secure approach - build from components)
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("RECIPES_DATABASE_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("RECIPES_DB_USER", "recipes_user")
|
||||
password = os.getenv("RECIPES_DB_PASSWORD", "recipes_pass123")
|
||||
host = os.getenv("RECIPES_DB_HOST", "localhost")
|
||||
port = os.getenv("RECIPES_DB_PORT", "5432")
|
||||
name = os.getenv("RECIPES_DB_NAME", "recipes_db")
|
||||
|
||||
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
||||
|
||||
# Redis configuration - use a specific database number
|
||||
REDIS_DB: int = 2
|
||||
|
||||
# Recipe-specific settings
|
||||
MAX_RECIPE_INGREDIENTS: int = int(os.getenv("MAX_RECIPE_INGREDIENTS", "50"))
|
||||
MAX_BATCH_SIZE_MULTIPLIER: float = float(os.getenv("MAX_BATCH_SIZE_MULTIPLIER", "10.0"))
|
||||
DEFAULT_RECIPE_VERSION: str = "1.0"
|
||||
|
||||
# Production settings (integration with production service)
|
||||
MAX_PRODUCTION_BATCHES_PER_DAY: int = int(os.getenv("MAX_PRODUCTION_BATCHES_PER_DAY", "100"))
|
||||
PRODUCTION_SCHEDULE_DAYS_AHEAD: int = int(os.getenv("PRODUCTION_SCHEDULE_DAYS_AHEAD", "7"))
|
||||
|
||||
# Cost calculation settings
|
||||
OVERHEAD_PERCENTAGE: float = float(os.getenv("OVERHEAD_PERCENTAGE", "15.0")) # Default 15% overhead
|
||||
LABOR_COST_PER_HOUR: float = float(os.getenv("LABOR_COST_PER_HOUR", "25.0")) # Default €25/hour
|
||||
|
||||
# Quality control
|
||||
MIN_QUALITY_SCORE: float = float(os.getenv("MIN_QUALITY_SCORE", "6.0")) # Minimum acceptable quality score
|
||||
MAX_DEFECT_RATE: float = float(os.getenv("MAX_DEFECT_RATE", "5.0")) # Maximum 5% defect rate
|
||||
|
||||
# External service URLs (specific to recipes service)
|
||||
PRODUCTION_SERVICE_URL: str = os.getenv(
|
||||
"PRODUCTION_SERVICE_URL",
|
||||
"http://production-service:8000"
|
||||
)
|
||||
INVENTORY_SERVICE_URL: str = os.getenv(
|
||||
"INVENTORY_SERVICE_URL",
|
||||
"http://inventory-service:8000"
|
||||
)
|
||||
SALES_SERVICE_URL: str = os.getenv(
|
||||
"SALES_SERVICE_URL",
|
||||
"http://sales-service:8000"
|
||||
)
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
25
services/recipes/app/core/database.py
Normal file
25
services/recipes/app/core/database.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# services/recipes/app/core/database.py
|
||||
"""
|
||||
Database configuration and session management for Recipe Service
|
||||
"""
|
||||
|
||||
from shared.database.base import DatabaseManager, create_database_manager
|
||||
from .config import settings
|
||||
|
||||
# Create database manager using shared async infrastructure
|
||||
db_manager = create_database_manager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
service_name="recipes-service",
|
||||
echo=settings.DEBUG
|
||||
)
|
||||
|
||||
# Dependency for FastAPI routes
|
||||
async def get_db():
|
||||
"""FastAPI dependency to get database session"""
|
||||
async for session in db_manager.get_db():
|
||||
yield session
|
||||
|
||||
# Initialize database
|
||||
async def init_database():
|
||||
"""Initialize database tables"""
|
||||
await db_manager.create_tables()
|
||||
136
services/recipes/app/main.py
Normal file
136
services/recipes/app/main.py
Normal file
@@ -0,0 +1,136 @@
|
||||
# services/recipes/app/main.py
|
||||
"""
|
||||
Recipe Service - FastAPI application
|
||||
Handles recipe management, production planning, and inventory consumption tracking
|
||||
"""
|
||||
|
||||
import time
|
||||
from fastapi import FastAPI, Request
|
||||
from sqlalchemy import text
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
|
||||
from .core.config import settings
|
||||
from .core.database import db_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
# Import API routers
|
||||
from .api import recipes, recipe_quality_configs, recipe_operations, audit, internal_demo, internal
|
||||
|
||||
# Import models to register them with SQLAlchemy metadata
|
||||
from .models import recipes as recipe_models
|
||||
|
||||
|
||||
class RecipesService(StandardFastAPIService):
|
||||
"""Recipes Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
await self.verify_migrations()
|
||||
await super().on_startup(app)
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
def __init__(self):
|
||||
# Define expected database tables for health checks
|
||||
recipes_expected_tables = [
|
||||
'recipes', 'recipe_ingredients', 'production_batches',
|
||||
'production_ingredient_consumption', 'production_schedules'
|
||||
]
|
||||
|
||||
super().__init__(
|
||||
service_name="recipes-service",
|
||||
app_name="Recipe Management Service",
|
||||
description="Comprehensive recipe management, production planning, and inventory consumption tracking for bakery operations",
|
||||
version=settings.VERSION,
|
||||
log_level=settings.LOG_LEVEL,
|
||||
cors_origins=settings.CORS_ORIGINS,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=db_manager,
|
||||
expected_tables=recipes_expected_tables
|
||||
)
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for recipes service"""
|
||||
# Custom startup completed
|
||||
pass
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for recipes service"""
|
||||
# Database cleanup is handled by the base class
|
||||
pass
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return recipes-specific features"""
|
||||
return [
|
||||
"recipe_management",
|
||||
"production_planning",
|
||||
"inventory_consumption_tracking",
|
||||
"batch_production",
|
||||
"tenant_scoped_operations"
|
||||
]
|
||||
|
||||
def setup_custom_middleware(self):
|
||||
"""Setup custom middleware for recipes service"""
|
||||
# Add GZip middleware
|
||||
self.app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
|
||||
# Request timing middleware
|
||||
@self.app.middleware("http")
|
||||
async def add_process_time_header(request: Request, call_next):
|
||||
"""Add processing time header to responses"""
|
||||
start_time = time.time()
|
||||
response = await call_next(request)
|
||||
process_time = time.time() - start_time
|
||||
response.headers["X-Process-Time"] = str(process_time)
|
||||
return response
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = RecipesService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app(
|
||||
docs_url="/docs" if settings.DEBUG else None,
|
||||
redoc_url="/redoc" if settings.DEBUG else None
|
||||
)
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Setup custom middleware
|
||||
service.setup_custom_middleware()
|
||||
|
||||
# Include routers
|
||||
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
|
||||
# where {recipe_id} would match literal paths like "audit-logs"
|
||||
service.add_router(audit.router)
|
||||
service.add_router(recipes.router)
|
||||
service.add_router(recipe_quality_configs.router)
|
||||
service.add_router(recipe_operations.router)
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
service.add_router(internal.router)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=settings.DEBUG,
|
||||
log_level=settings.LOG_LEVEL.lower()
|
||||
)
|
||||
33
services/recipes/app/models/__init__.py
Normal file
33
services/recipes/app/models/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
# services/recipes/app/models/__init__.py
|
||||
|
||||
from .recipes import (
|
||||
Recipe,
|
||||
RecipeIngredient,
|
||||
ProductionBatch,
|
||||
ProductionIngredientConsumption,
|
||||
ProductionSchedule,
|
||||
RecipeStatus,
|
||||
ProductionStatus,
|
||||
MeasurementUnit,
|
||||
ProductionPriority
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Recipe",
|
||||
"RecipeIngredient",
|
||||
"ProductionBatch",
|
||||
"ProductionIngredientConsumption",
|
||||
"ProductionSchedule",
|
||||
"RecipeStatus",
|
||||
"ProductionStatus",
|
||||
"MeasurementUnit",
|
||||
"ProductionPriority",
|
||||
"AuditLog"
|
||||
]
|
||||
531
services/recipes/app/models/recipes.py
Normal file
531
services/recipes/app/models/recipes.py
Normal file
@@ -0,0 +1,531 @@
|
||||
# services/recipes/app/models/recipes.py
|
||||
"""
|
||||
Recipe and Production Management models for Recipe Service
|
||||
Comprehensive recipe management, production tracking, and inventory consumption
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class RecipeStatus(enum.Enum):
|
||||
"""Recipe lifecycle status"""
|
||||
DRAFT = "DRAFT"
|
||||
ACTIVE = "ACTIVE"
|
||||
TESTING = "TESTING"
|
||||
ARCHIVED = "ARCHIVED"
|
||||
DISCONTINUED = "DISCONTINUED"
|
||||
|
||||
|
||||
class ProductionStatus(enum.Enum):
|
||||
"""Production batch status"""
|
||||
PLANNED = "PLANNED"
|
||||
IN_PROGRESS = "IN_PROGRESS"
|
||||
COMPLETED = "COMPLETED"
|
||||
FAILED = "FAILED"
|
||||
CANCELLED = "CANCELLED"
|
||||
|
||||
|
||||
class MeasurementUnit(enum.Enum):
|
||||
"""Units for recipe measurements"""
|
||||
GRAMS = "g"
|
||||
KILOGRAMS = "kg"
|
||||
MILLILITERS = "ml"
|
||||
LITERS = "l"
|
||||
CUPS = "cups"
|
||||
TABLESPOONS = "tbsp"
|
||||
TEASPOONS = "tsp"
|
||||
UNITS = "units"
|
||||
PIECES = "pieces"
|
||||
PERCENTAGE = "%"
|
||||
|
||||
|
||||
class ProductionPriority(enum.Enum):
|
||||
"""Production batch priority levels"""
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
URGENT = "urgent"
|
||||
|
||||
|
||||
class Recipe(Base):
|
||||
"""Master recipe definitions"""
|
||||
__tablename__ = "recipes"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Recipe identification
|
||||
name = Column(String(255), nullable=False, index=True)
|
||||
recipe_code = Column(String(100), nullable=True, index=True)
|
||||
version = Column(String(20), nullable=False, default="1.0")
|
||||
|
||||
# Product association
|
||||
finished_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Links to inventory ingredient with product_type=finished_product
|
||||
|
||||
# Recipe details
|
||||
description = Column(Text, nullable=True)
|
||||
category = Column(String(100), nullable=True, index=True) # bread, pastries, cakes, etc.
|
||||
cuisine_type = Column(String(100), nullable=True)
|
||||
difficulty_level = Column(Integer, nullable=False, default=1) # 1-5 scale
|
||||
|
||||
# Production metrics
|
||||
yield_quantity = Column(Float, nullable=False) # How many units this recipe produces
|
||||
yield_unit = Column(SQLEnum(MeasurementUnit), nullable=False)
|
||||
prep_time_minutes = Column(Integer, nullable=True)
|
||||
cook_time_minutes = Column(Integer, nullable=True)
|
||||
total_time_minutes = Column(Integer, nullable=True)
|
||||
rest_time_minutes = Column(Integer, nullable=True) # Rising time, cooling time, etc.
|
||||
|
||||
# Cost and pricing
|
||||
estimated_cost_per_unit = Column(Numeric(10, 2), nullable=True)
|
||||
last_calculated_cost = Column(Numeric(10, 2), nullable=True)
|
||||
cost_calculation_date = Column(DateTime(timezone=True), nullable=True)
|
||||
target_margin_percentage = Column(Float, nullable=True)
|
||||
suggested_selling_price = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Instructions and notes
|
||||
instructions = Column(JSONB, nullable=True) # Structured step-by-step instructions
|
||||
preparation_notes = Column(Text, nullable=True)
|
||||
storage_instructions = Column(Text, nullable=True)
|
||||
|
||||
# Recipe metadata
|
||||
serves_count = Column(Integer, nullable=True) # How many people/portions
|
||||
nutritional_info = Column(JSONB, nullable=True) # Calories, protein, etc.
|
||||
allergen_info = Column(JSONB, nullable=True) # List of allergens
|
||||
dietary_tags = Column(JSONB, nullable=True) # vegan, gluten-free, etc.
|
||||
|
||||
# Production settings
|
||||
batch_size_multiplier = Column(Float, nullable=False, default=1.0) # Standard batch multiplier
|
||||
minimum_batch_size = Column(Float, nullable=True)
|
||||
maximum_batch_size = Column(Float, nullable=True)
|
||||
optimal_production_temperature = Column(Float, nullable=True) # Celsius
|
||||
optimal_humidity = Column(Float, nullable=True) # Percentage
|
||||
|
||||
# Quality control
|
||||
quality_check_configuration = Column(JSONB, nullable=True) # Stage-based quality check config
|
||||
|
||||
# Status and lifecycle
|
||||
status = Column(SQLEnum(RecipeStatus), nullable=False, default=RecipeStatus.DRAFT, index=True)
|
||||
is_seasonal = Column(Boolean, default=False)
|
||||
season_start_month = Column(Integer, nullable=True) # 1-12
|
||||
season_end_month = Column(Integer, nullable=True) # 1-12
|
||||
is_signature_item = Column(Boolean, default=False)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
ingredients = relationship("RecipeIngredient", back_populates="recipe", cascade="all, delete-orphan")
|
||||
production_batches = relationship("ProductionBatch", back_populates="recipe", cascade="all, delete-orphan")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_recipes_tenant_name', 'tenant_id', 'name'),
|
||||
Index('idx_recipes_tenant_product', 'tenant_id', 'finished_product_id'),
|
||||
Index('idx_recipes_status', 'tenant_id', 'status'),
|
||||
Index('idx_recipes_category', 'tenant_id', 'category', 'status'),
|
||||
Index('idx_recipes_seasonal', 'tenant_id', 'is_seasonal', 'season_start_month', 'season_end_month'),
|
||||
Index('idx_recipes_signature', 'tenant_id', 'is_signature_item', 'status'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'name': self.name,
|
||||
'recipe_code': self.recipe_code,
|
||||
'version': self.version,
|
||||
'finished_product_id': str(self.finished_product_id),
|
||||
'description': self.description,
|
||||
'category': self.category,
|
||||
'cuisine_type': self.cuisine_type,
|
||||
'difficulty_level': self.difficulty_level,
|
||||
'yield_quantity': self.yield_quantity,
|
||||
'yield_unit': self.yield_unit.value if self.yield_unit else None,
|
||||
'prep_time_minutes': self.prep_time_minutes,
|
||||
'cook_time_minutes': self.cook_time_minutes,
|
||||
'total_time_minutes': self.total_time_minutes,
|
||||
'rest_time_minutes': self.rest_time_minutes,
|
||||
'estimated_cost_per_unit': float(self.estimated_cost_per_unit) if self.estimated_cost_per_unit else None,
|
||||
'last_calculated_cost': float(self.last_calculated_cost) if self.last_calculated_cost else None,
|
||||
'cost_calculation_date': self.cost_calculation_date.isoformat() if self.cost_calculation_date else None,
|
||||
'target_margin_percentage': self.target_margin_percentage,
|
||||
'suggested_selling_price': float(self.suggested_selling_price) if self.suggested_selling_price else None,
|
||||
'instructions': self.instructions,
|
||||
'preparation_notes': self.preparation_notes,
|
||||
'storage_instructions': self.storage_instructions,
|
||||
'serves_count': self.serves_count,
|
||||
'nutritional_info': self.nutritional_info,
|
||||
'allergen_info': self.allergen_info,
|
||||
'dietary_tags': self.dietary_tags,
|
||||
'batch_size_multiplier': self.batch_size_multiplier,
|
||||
'minimum_batch_size': self.minimum_batch_size,
|
||||
'maximum_batch_size': self.maximum_batch_size,
|
||||
'optimal_production_temperature': self.optimal_production_temperature,
|
||||
'optimal_humidity': self.optimal_humidity,
|
||||
'quality_check_configuration': self.quality_check_configuration,
|
||||
'status': self.status.value if self.status else None,
|
||||
'is_seasonal': self.is_seasonal,
|
||||
'season_start_month': self.season_start_month,
|
||||
'season_end_month': self.season_end_month,
|
||||
'is_signature_item': self.is_signature_item,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
'updated_by': str(self.updated_by) if self.updated_by else None,
|
||||
}
|
||||
|
||||
|
||||
class RecipeIngredient(Base):
|
||||
"""Ingredients required for each recipe"""
|
||||
__tablename__ = "recipe_ingredients"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
recipe_id = Column(UUID(as_uuid=True), ForeignKey('recipes.id'), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Links to inventory ingredients
|
||||
|
||||
# Quantity specifications
|
||||
quantity = Column(Float, nullable=False)
|
||||
unit = Column(SQLEnum(MeasurementUnit), nullable=False)
|
||||
quantity_in_base_unit = Column(Float, nullable=True) # Converted to ingredient's base unit
|
||||
|
||||
# Alternative measurements
|
||||
alternative_quantity = Column(Float, nullable=True) # e.g., "2 cups" vs "240ml"
|
||||
alternative_unit = Column(SQLEnum(MeasurementUnit), nullable=True)
|
||||
|
||||
# Ingredient specifications
|
||||
preparation_method = Column(String(255), nullable=True) # "sifted", "room temperature", "chopped"
|
||||
ingredient_notes = Column(Text, nullable=True) # Special instructions for this ingredient
|
||||
is_optional = Column(Boolean, default=False)
|
||||
|
||||
# Recipe organization
|
||||
ingredient_order = Column(Integer, nullable=False, default=1) # Order in recipe
|
||||
ingredient_group = Column(String(100), nullable=True) # "wet ingredients", "dry ingredients", etc.
|
||||
|
||||
# Substitutions
|
||||
substitution_options = Column(JSONB, nullable=True) # Alternative ingredients
|
||||
substitution_ratio = Column(Float, nullable=True) # 1:1, 1:2, etc.
|
||||
|
||||
# Cost tracking
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
cost_updated_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
recipe = relationship("Recipe", back_populates="ingredients")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_recipe_ingredients_recipe', 'recipe_id', 'ingredient_order'),
|
||||
Index('idx_recipe_ingredients_ingredient', 'ingredient_id'),
|
||||
Index('idx_recipe_ingredients_tenant', 'tenant_id', 'recipe_id'),
|
||||
Index('idx_recipe_ingredients_group', 'recipe_id', 'ingredient_group', 'ingredient_order'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'recipe_id': str(self.recipe_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'quantity': self.quantity,
|
||||
'unit': self.unit.value if self.unit else None,
|
||||
'quantity_in_base_unit': self.quantity_in_base_unit,
|
||||
'alternative_quantity': self.alternative_quantity,
|
||||
'alternative_unit': self.alternative_unit.value if self.alternative_unit else None,
|
||||
'preparation_method': self.preparation_method,
|
||||
'ingredient_notes': self.ingredient_notes,
|
||||
'is_optional': self.is_optional,
|
||||
'ingredient_order': self.ingredient_order,
|
||||
'ingredient_group': self.ingredient_group,
|
||||
'substitution_options': self.substitution_options,
|
||||
'substitution_ratio': self.substitution_ratio,
|
||||
'unit_cost': float(self.unit_cost) if self.unit_cost else None,
|
||||
'total_cost': float(self.total_cost) if self.total_cost else None,
|
||||
'cost_updated_at': self.cost_updated_at.isoformat() if self.cost_updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
class ProductionBatch(Base):
|
||||
"""Track production batches and inventory consumption"""
|
||||
__tablename__ = "production_batches"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
recipe_id = Column(UUID(as_uuid=True), ForeignKey('recipes.id'), nullable=False, index=True)
|
||||
|
||||
# Batch identification
|
||||
batch_number = Column(String(100), nullable=False, index=True)
|
||||
production_date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
planned_start_time = Column(DateTime(timezone=True), nullable=True)
|
||||
actual_start_time = Column(DateTime(timezone=True), nullable=True)
|
||||
planned_end_time = Column(DateTime(timezone=True), nullable=True)
|
||||
actual_end_time = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Production planning
|
||||
planned_quantity = Column(Float, nullable=False)
|
||||
actual_quantity = Column(Float, nullable=True)
|
||||
yield_percentage = Column(Float, nullable=True) # actual/planned * 100
|
||||
batch_size_multiplier = Column(Float, nullable=False, default=1.0)
|
||||
|
||||
# Production details
|
||||
status = Column(SQLEnum(ProductionStatus), nullable=False, default=ProductionStatus.PLANNED, index=True)
|
||||
priority = Column(SQLEnum(ProductionPriority), nullable=False, default=ProductionPriority.MEDIUM)
|
||||
assigned_staff = Column(JSONB, nullable=True) # List of staff assigned to this batch
|
||||
production_notes = Column(Text, nullable=True)
|
||||
|
||||
# Quality metrics
|
||||
quality_score = Column(Float, nullable=True) # 1-10 scale
|
||||
quality_notes = Column(Text, nullable=True)
|
||||
defect_rate = Column(Float, nullable=True) # Percentage of defective products
|
||||
rework_required = Column(Boolean, default=False)
|
||||
|
||||
# Cost tracking
|
||||
planned_material_cost = Column(Numeric(10, 2), nullable=True)
|
||||
actual_material_cost = Column(Numeric(10, 2), nullable=True)
|
||||
labor_cost = Column(Numeric(10, 2), nullable=True)
|
||||
overhead_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_production_cost = Column(Numeric(10, 2), nullable=True)
|
||||
cost_per_unit = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Environmental conditions
|
||||
production_temperature = Column(Float, nullable=True)
|
||||
production_humidity = Column(Float, nullable=True)
|
||||
oven_temperature = Column(Float, nullable=True)
|
||||
baking_time_minutes = Column(Integer, nullable=True)
|
||||
|
||||
# Waste and efficiency
|
||||
waste_quantity = Column(Float, nullable=False, default=0.0)
|
||||
waste_reason = Column(String(255), nullable=True)
|
||||
efficiency_percentage = Column(Float, nullable=True) # Based on time vs planned
|
||||
|
||||
# Sales integration
|
||||
customer_order_reference = Column(String(100), nullable=True) # If made to order
|
||||
pre_order_quantity = Column(Float, nullable=True) # Pre-sold quantity
|
||||
shelf_quantity = Column(Float, nullable=True) # For shelf/display
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
completed_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
recipe = relationship("Recipe", back_populates="production_batches")
|
||||
ingredient_consumptions = relationship("ProductionIngredientConsumption", back_populates="production_batch", cascade="all, delete-orphan")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_production_batches_tenant_date', 'tenant_id', 'production_date'),
|
||||
Index('idx_production_batches_recipe', 'recipe_id', 'production_date'),
|
||||
Index('idx_production_batches_status', 'tenant_id', 'status', 'production_date'),
|
||||
Index('idx_production_batches_batch_number', 'tenant_id', 'batch_number'),
|
||||
Index('idx_production_batches_priority', 'tenant_id', 'priority', 'planned_start_time'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'recipe_id': str(self.recipe_id),
|
||||
'batch_number': self.batch_number,
|
||||
'production_date': self.production_date.isoformat() if self.production_date else None,
|
||||
'planned_start_time': self.planned_start_time.isoformat() if self.planned_start_time else None,
|
||||
'actual_start_time': self.actual_start_time.isoformat() if self.actual_start_time else None,
|
||||
'planned_end_time': self.planned_end_time.isoformat() if self.planned_end_time else None,
|
||||
'actual_end_time': self.actual_end_time.isoformat() if self.actual_end_time else None,
|
||||
'planned_quantity': self.planned_quantity,
|
||||
'actual_quantity': self.actual_quantity,
|
||||
'yield_percentage': self.yield_percentage,
|
||||
'batch_size_multiplier': self.batch_size_multiplier,
|
||||
'status': self.status.value if self.status else None,
|
||||
'priority': self.priority.value if self.priority else None,
|
||||
'assigned_staff': self.assigned_staff,
|
||||
'production_notes': self.production_notes,
|
||||
'quality_score': self.quality_score,
|
||||
'quality_notes': self.quality_notes,
|
||||
'defect_rate': self.defect_rate,
|
||||
'rework_required': self.rework_required,
|
||||
'planned_material_cost': float(self.planned_material_cost) if self.planned_material_cost else None,
|
||||
'actual_material_cost': float(self.actual_material_cost) if self.actual_material_cost else None,
|
||||
'labor_cost': float(self.labor_cost) if self.labor_cost else None,
|
||||
'overhead_cost': float(self.overhead_cost) if self.overhead_cost else None,
|
||||
'total_production_cost': float(self.total_production_cost) if self.total_production_cost else None,
|
||||
'cost_per_unit': float(self.cost_per_unit) if self.cost_per_unit else None,
|
||||
'production_temperature': self.production_temperature,
|
||||
'production_humidity': self.production_humidity,
|
||||
'oven_temperature': self.oven_temperature,
|
||||
'baking_time_minutes': self.baking_time_minutes,
|
||||
'waste_quantity': self.waste_quantity,
|
||||
'waste_reason': self.waste_reason,
|
||||
'efficiency_percentage': self.efficiency_percentage,
|
||||
'customer_order_reference': self.customer_order_reference,
|
||||
'pre_order_quantity': self.pre_order_quantity,
|
||||
'shelf_quantity': self.shelf_quantity,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
'completed_by': str(self.completed_by) if self.completed_by else None,
|
||||
}
|
||||
|
||||
|
||||
class ProductionIngredientConsumption(Base):
|
||||
"""Track actual ingredient consumption during production"""
|
||||
__tablename__ = "production_ingredient_consumption"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
production_batch_id = Column(UUID(as_uuid=True), ForeignKey('production_batches.id'), nullable=False, index=True)
|
||||
recipe_ingredient_id = Column(UUID(as_uuid=True), ForeignKey('recipe_ingredients.id'), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Links to inventory ingredients
|
||||
stock_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Specific stock batch used
|
||||
|
||||
# Consumption details
|
||||
planned_quantity = Column(Float, nullable=False)
|
||||
actual_quantity = Column(Float, nullable=False)
|
||||
unit = Column(SQLEnum(MeasurementUnit), nullable=False)
|
||||
variance_quantity = Column(Float, nullable=True) # actual - planned
|
||||
variance_percentage = Column(Float, nullable=True) # (actual - planned) / planned * 100
|
||||
|
||||
# Cost tracking
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Consumption details
|
||||
consumption_time = Column(DateTime(timezone=True), nullable=False,
|
||||
default=lambda: datetime.now(timezone.utc))
|
||||
consumption_notes = Column(Text, nullable=True)
|
||||
staff_member = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Quality and condition
|
||||
ingredient_condition = Column(String(50), nullable=True) # fresh, near_expiry, etc.
|
||||
quality_impact = Column(String(255), nullable=True) # Impact on final product quality
|
||||
substitution_used = Column(Boolean, default=False)
|
||||
substitution_details = Column(Text, nullable=True)
|
||||
|
||||
# Relationships
|
||||
production_batch = relationship("ProductionBatch", back_populates="ingredient_consumptions")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_consumption_batch', 'production_batch_id'),
|
||||
Index('idx_consumption_ingredient', 'ingredient_id', 'consumption_time'),
|
||||
Index('idx_consumption_tenant', 'tenant_id', 'consumption_time'),
|
||||
Index('idx_consumption_recipe_ingredient', 'recipe_ingredient_id'),
|
||||
Index('idx_consumption_stock', 'stock_id'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'production_batch_id': str(self.production_batch_id),
|
||||
'recipe_ingredient_id': str(self.recipe_ingredient_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'stock_id': str(self.stock_id) if self.stock_id else None,
|
||||
'planned_quantity': self.planned_quantity,
|
||||
'actual_quantity': self.actual_quantity,
|
||||
'unit': self.unit.value if self.unit else None,
|
||||
'variance_quantity': self.variance_quantity,
|
||||
'variance_percentage': self.variance_percentage,
|
||||
'unit_cost': float(self.unit_cost) if self.unit_cost else None,
|
||||
'total_cost': float(self.total_cost) if self.total_cost else None,
|
||||
'consumption_time': self.consumption_time.isoformat() if self.consumption_time else None,
|
||||
'consumption_notes': self.consumption_notes,
|
||||
'staff_member': str(self.staff_member) if self.staff_member else None,
|
||||
'ingredient_condition': self.ingredient_condition,
|
||||
'quality_impact': self.quality_impact,
|
||||
'substitution_used': self.substitution_used,
|
||||
'substitution_details': self.substitution_details,
|
||||
}
|
||||
|
||||
|
||||
class ProductionSchedule(Base):
|
||||
"""Production planning and scheduling"""
|
||||
__tablename__ = "production_schedules"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Schedule details
|
||||
schedule_date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
schedule_name = Column(String(255), nullable=True)
|
||||
|
||||
# Production planning
|
||||
total_planned_batches = Column(Integer, nullable=False, default=0)
|
||||
total_planned_items = Column(Float, nullable=False, default=0.0)
|
||||
estimated_production_hours = Column(Float, nullable=True)
|
||||
estimated_material_cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Schedule status
|
||||
is_published = Column(Boolean, default=False)
|
||||
is_completed = Column(Boolean, default=False)
|
||||
completion_percentage = Column(Float, nullable=True)
|
||||
|
||||
# Planning constraints
|
||||
available_staff_hours = Column(Float, nullable=True)
|
||||
oven_capacity_hours = Column(Float, nullable=True)
|
||||
production_capacity_limit = Column(Float, nullable=True)
|
||||
|
||||
# Notes and instructions
|
||||
schedule_notes = Column(Text, nullable=True)
|
||||
preparation_instructions = Column(Text, nullable=True)
|
||||
special_requirements = Column(JSONB, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
published_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
published_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_production_schedules_tenant_date', 'tenant_id', 'schedule_date'),
|
||||
Index('idx_production_schedules_published', 'tenant_id', 'is_published', 'schedule_date'),
|
||||
Index('idx_production_schedules_completed', 'tenant_id', 'is_completed', 'schedule_date'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'schedule_date': self.schedule_date.isoformat() if self.schedule_date else None,
|
||||
'schedule_name': self.schedule_name,
|
||||
'total_planned_batches': self.total_planned_batches,
|
||||
'total_planned_items': self.total_planned_items,
|
||||
'estimated_production_hours': self.estimated_production_hours,
|
||||
'estimated_material_cost': float(self.estimated_material_cost) if self.estimated_material_cost else None,
|
||||
'is_published': self.is_published,
|
||||
'is_completed': self.is_completed,
|
||||
'completion_percentage': self.completion_percentage,
|
||||
'available_staff_hours': self.available_staff_hours,
|
||||
'oven_capacity_hours': self.oven_capacity_hours,
|
||||
'production_capacity_limit': self.production_capacity_limit,
|
||||
'schedule_notes': self.schedule_notes,
|
||||
'preparation_instructions': self.preparation_instructions,
|
||||
'special_requirements': self.special_requirements,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
'published_by': str(self.published_by) if self.published_by else None,
|
||||
'published_at': self.published_at.isoformat() if self.published_at else None,
|
||||
}
|
||||
7
services/recipes/app/repositories/__init__.py
Normal file
7
services/recipes/app/repositories/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# services/recipes/app/repositories/__init__.py
|
||||
|
||||
from .recipe_repository import RecipeRepository
|
||||
|
||||
__all__ = [
|
||||
"RecipeRepository"
|
||||
]
|
||||
270
services/recipes/app/repositories/recipe_repository.py
Normal file
270
services/recipes/app/repositories/recipe_repository.py
Normal file
@@ -0,0 +1,270 @@
|
||||
# services/recipes/app/repositories/recipe_repository.py
|
||||
"""
|
||||
Async recipe repository for database operations
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, or_
|
||||
from sqlalchemy.orm import selectinload
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.database.repository import BaseRepository
|
||||
from ..models.recipes import Recipe, RecipeIngredient, RecipeStatus
|
||||
from ..schemas.recipes import RecipeCreate, RecipeUpdate
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class RecipeRepository(BaseRepository[Recipe, RecipeCreate, RecipeUpdate]):
|
||||
"""Async repository for recipe operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Recipe, session)
|
||||
|
||||
async def get_recipe_with_ingredients(self, recipe_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Get recipe with ingredients loaded"""
|
||||
result = await self.session.execute(
|
||||
select(Recipe)
|
||||
.options(selectinload(Recipe.ingredients))
|
||||
.where(Recipe.id == recipe_id)
|
||||
)
|
||||
recipe = result.scalar_one_or_none()
|
||||
|
||||
if not recipe:
|
||||
return None
|
||||
|
||||
return {
|
||||
"id": str(recipe.id),
|
||||
"tenant_id": str(recipe.tenant_id),
|
||||
"name": recipe.name,
|
||||
"recipe_code": recipe.recipe_code,
|
||||
"version": recipe.version,
|
||||
"finished_product_id": str(recipe.finished_product_id),
|
||||
"description": recipe.description,
|
||||
"category": recipe.category,
|
||||
"cuisine_type": recipe.cuisine_type,
|
||||
"difficulty_level": recipe.difficulty_level,
|
||||
"yield_quantity": float(recipe.yield_quantity),
|
||||
"yield_unit": recipe.yield_unit.value if hasattr(recipe.yield_unit, 'value') else recipe.yield_unit,
|
||||
"prep_time_minutes": recipe.prep_time_minutes,
|
||||
"cook_time_minutes": recipe.cook_time_minutes,
|
||||
"total_time_minutes": recipe.total_time_minutes,
|
||||
"rest_time_minutes": recipe.rest_time_minutes,
|
||||
"estimated_cost_per_unit": float(recipe.estimated_cost_per_unit) if recipe.estimated_cost_per_unit else None,
|
||||
"last_calculated_cost": float(recipe.last_calculated_cost) if recipe.last_calculated_cost else None,
|
||||
"cost_calculation_date": recipe.cost_calculation_date.isoformat() if recipe.cost_calculation_date else None,
|
||||
"target_margin_percentage": recipe.target_margin_percentage,
|
||||
"suggested_selling_price": float(recipe.suggested_selling_price) if recipe.suggested_selling_price else None,
|
||||
"instructions": recipe.instructions,
|
||||
"preparation_notes": recipe.preparation_notes,
|
||||
"storage_instructions": recipe.storage_instructions,
|
||||
"quality_check_configuration": recipe.quality_check_configuration,
|
||||
"serves_count": recipe.serves_count,
|
||||
"nutritional_info": recipe.nutritional_info,
|
||||
"allergen_info": recipe.allergen_info,
|
||||
"dietary_tags": recipe.dietary_tags,
|
||||
"batch_size_multiplier": float(recipe.batch_size_multiplier),
|
||||
"minimum_batch_size": float(recipe.minimum_batch_size) if recipe.minimum_batch_size else None,
|
||||
"maximum_batch_size": float(recipe.maximum_batch_size) if recipe.maximum_batch_size else None,
|
||||
"optimal_production_temperature": float(recipe.optimal_production_temperature) if recipe.optimal_production_temperature else None,
|
||||
"optimal_humidity": float(recipe.optimal_humidity) if recipe.optimal_humidity else None,
|
||||
"status": recipe.status.value if hasattr(recipe.status, 'value') else recipe.status,
|
||||
"is_seasonal": recipe.is_seasonal,
|
||||
"season_start_month": recipe.season_start_month,
|
||||
"season_end_month": recipe.season_end_month,
|
||||
"is_signature_item": recipe.is_signature_item,
|
||||
"created_at": recipe.created_at.isoformat() if recipe.created_at else None,
|
||||
"updated_at": recipe.updated_at.isoformat() if recipe.updated_at else None,
|
||||
"created_by": str(recipe.created_by) if recipe.created_by else None,
|
||||
"updated_by": str(recipe.updated_by) if hasattr(recipe, 'updated_by') and recipe.updated_by else None,
|
||||
"ingredients": [
|
||||
{
|
||||
"id": str(ingredient.id),
|
||||
"tenant_id": str(ingredient.tenant_id),
|
||||
"recipe_id": str(ingredient.recipe_id),
|
||||
"ingredient_id": str(ingredient.ingredient_id),
|
||||
"quantity": float(ingredient.quantity),
|
||||
"unit": ingredient.unit.value if hasattr(ingredient.unit, 'value') else ingredient.unit,
|
||||
"quantity_in_base_unit": float(ingredient.quantity_in_base_unit) if ingredient.quantity_in_base_unit else None,
|
||||
"alternative_quantity": float(ingredient.alternative_quantity) if ingredient.alternative_quantity else None,
|
||||
"alternative_unit": ingredient.alternative_unit.value if hasattr(ingredient.alternative_unit, 'value') and ingredient.alternative_unit else None,
|
||||
"preparation_method": ingredient.preparation_method,
|
||||
"ingredient_notes": ingredient.ingredient_notes,
|
||||
"is_optional": ingredient.is_optional,
|
||||
"ingredient_order": ingredient.ingredient_order,
|
||||
"ingredient_group": ingredient.ingredient_group,
|
||||
"substitution_options": ingredient.substitution_options,
|
||||
"substitution_ratio": float(ingredient.substitution_ratio) if ingredient.substitution_ratio else None,
|
||||
"unit_cost": float(ingredient.unit_cost) if hasattr(ingredient, 'unit_cost') and ingredient.unit_cost else None,
|
||||
"total_cost": float(ingredient.total_cost) if hasattr(ingredient, 'total_cost') and ingredient.total_cost else None,
|
||||
"cost_updated_at": ingredient.cost_updated_at.isoformat() if hasattr(ingredient, 'cost_updated_at') and ingredient.cost_updated_at else None
|
||||
}
|
||||
for ingredient in recipe.ingredients
|
||||
] if hasattr(recipe, 'ingredients') else []
|
||||
}
|
||||
|
||||
async def search_recipes(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
search_term: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
category: Optional[str] = None,
|
||||
is_seasonal: Optional[bool] = None,
|
||||
is_signature: Optional[bool] = None,
|
||||
difficulty_level: Optional[int] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Search recipes with multiple filters"""
|
||||
query = select(Recipe).where(Recipe.tenant_id == tenant_id)
|
||||
|
||||
# Text search
|
||||
if search_term:
|
||||
query = query.where(
|
||||
or_(
|
||||
Recipe.name.ilike(f"%{search_term}%"),
|
||||
Recipe.description.ilike(f"%{search_term}%")
|
||||
)
|
||||
)
|
||||
|
||||
# Status filter
|
||||
if status:
|
||||
query = query.where(Recipe.status == status)
|
||||
|
||||
# Category filter
|
||||
if category:
|
||||
query = query.where(Recipe.category == category)
|
||||
|
||||
# Seasonal filter
|
||||
if is_seasonal is not None:
|
||||
query = query.where(Recipe.is_seasonal == is_seasonal)
|
||||
|
||||
# Signature filter
|
||||
if is_signature is not None:
|
||||
query = query.where(Recipe.is_signature_item == is_signature)
|
||||
|
||||
# Difficulty filter
|
||||
if difficulty_level is not None:
|
||||
query = query.where(Recipe.difficulty_level == difficulty_level)
|
||||
|
||||
# Apply ordering and pagination
|
||||
query = query.order_by(Recipe.name).limit(limit).offset(offset)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
recipes = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(recipe.id),
|
||||
"tenant_id": str(recipe.tenant_id),
|
||||
"name": recipe.name,
|
||||
"recipe_code": recipe.recipe_code,
|
||||
"version": recipe.version,
|
||||
"finished_product_id": str(recipe.finished_product_id),
|
||||
"description": recipe.description,
|
||||
"category": recipe.category,
|
||||
"cuisine_type": recipe.cuisine_type,
|
||||
"difficulty_level": recipe.difficulty_level,
|
||||
"yield_quantity": float(recipe.yield_quantity),
|
||||
"yield_unit": recipe.yield_unit.value if hasattr(recipe.yield_unit, 'value') else recipe.yield_unit,
|
||||
"prep_time_minutes": recipe.prep_time_minutes,
|
||||
"cook_time_minutes": recipe.cook_time_minutes,
|
||||
"total_time_minutes": recipe.total_time_minutes,
|
||||
"rest_time_minutes": recipe.rest_time_minutes,
|
||||
"estimated_cost_per_unit": float(recipe.estimated_cost_per_unit) if recipe.estimated_cost_per_unit else None,
|
||||
"last_calculated_cost": float(recipe.last_calculated_cost) if recipe.last_calculated_cost else None,
|
||||
"cost_calculation_date": recipe.cost_calculation_date.isoformat() if recipe.cost_calculation_date else None,
|
||||
"target_margin_percentage": recipe.target_margin_percentage,
|
||||
"suggested_selling_price": float(recipe.suggested_selling_price) if recipe.suggested_selling_price else None,
|
||||
"instructions": recipe.instructions,
|
||||
"preparation_notes": recipe.preparation_notes,
|
||||
"storage_instructions": recipe.storage_instructions,
|
||||
"quality_check_configuration": recipe.quality_check_configuration,
|
||||
"serves_count": recipe.serves_count,
|
||||
"nutritional_info": recipe.nutritional_info,
|
||||
"allergen_info": recipe.allergen_info,
|
||||
"dietary_tags": recipe.dietary_tags,
|
||||
"batch_size_multiplier": float(recipe.batch_size_multiplier),
|
||||
"minimum_batch_size": float(recipe.minimum_batch_size) if recipe.minimum_batch_size else None,
|
||||
"maximum_batch_size": float(recipe.maximum_batch_size) if recipe.maximum_batch_size else None,
|
||||
"optimal_production_temperature": float(recipe.optimal_production_temperature) if recipe.optimal_production_temperature else None,
|
||||
"optimal_humidity": float(recipe.optimal_humidity) if recipe.optimal_humidity else None,
|
||||
"status": recipe.status.value if hasattr(recipe.status, 'value') else recipe.status,
|
||||
"is_seasonal": recipe.is_seasonal,
|
||||
"season_start_month": recipe.season_start_month,
|
||||
"season_end_month": recipe.season_end_month,
|
||||
"is_signature_item": recipe.is_signature_item,
|
||||
"created_at": recipe.created_at.isoformat() if recipe.created_at else None,
|
||||
"updated_at": recipe.updated_at.isoformat() if recipe.updated_at else None,
|
||||
"created_by": str(recipe.created_by) if recipe.created_by else None,
|
||||
"updated_by": str(recipe.updated_by) if hasattr(recipe, 'updated_by') and recipe.updated_by else None,
|
||||
"ingredients": [] # For list view, don't load ingredients to improve performance
|
||||
}
|
||||
for recipe in recipes
|
||||
]
|
||||
|
||||
async def get_recipe_statistics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get recipe statistics for dashboard"""
|
||||
# Total recipes
|
||||
total_result = await self.session.execute(
|
||||
select(func.count(Recipe.id)).where(Recipe.tenant_id == tenant_id)
|
||||
)
|
||||
total_recipes = total_result.scalar() or 0
|
||||
|
||||
# Active recipes
|
||||
active_result = await self.session.execute(
|
||||
select(func.count(Recipe.id)).where(
|
||||
and_(
|
||||
Recipe.tenant_id == tenant_id,
|
||||
Recipe.status == RecipeStatus.ACTIVE
|
||||
)
|
||||
)
|
||||
)
|
||||
active_recipes = active_result.scalar() or 0
|
||||
|
||||
# Signature recipes
|
||||
signature_result = await self.session.execute(
|
||||
select(func.count(Recipe.id)).where(
|
||||
and_(
|
||||
Recipe.tenant_id == tenant_id,
|
||||
Recipe.is_signature_item == True
|
||||
)
|
||||
)
|
||||
)
|
||||
signature_recipes = signature_result.scalar() or 0
|
||||
|
||||
# Seasonal recipes
|
||||
seasonal_result = await self.session.execute(
|
||||
select(func.count(Recipe.id)).where(
|
||||
and_(
|
||||
Recipe.tenant_id == tenant_id,
|
||||
Recipe.is_seasonal == True
|
||||
)
|
||||
)
|
||||
)
|
||||
seasonal_recipes = seasonal_result.scalar() or 0
|
||||
|
||||
# Category breakdown
|
||||
category_result = await self.session.execute(
|
||||
select(Recipe.category, func.count(Recipe.id))
|
||||
.where(Recipe.tenant_id == tenant_id)
|
||||
.group_by(Recipe.category)
|
||||
)
|
||||
category_data = category_result.all()
|
||||
|
||||
# Convert to list of dicts for the schema
|
||||
category_breakdown = [
|
||||
{"category": category or "Uncategorized", "count": count}
|
||||
for category, count in category_data
|
||||
]
|
||||
|
||||
return {
|
||||
"total_recipes": total_recipes,
|
||||
"active_recipes": active_recipes,
|
||||
"signature_recipes": signature_recipes,
|
||||
"seasonal_recipes": seasonal_recipes,
|
||||
"category_breakdown": category_breakdown
|
||||
}
|
||||
25
services/recipes/app/schemas/__init__.py
Normal file
25
services/recipes/app/schemas/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# services/recipes/app/schemas/__init__.py
|
||||
|
||||
from .recipes import (
|
||||
RecipeCreate,
|
||||
RecipeUpdate,
|
||||
RecipeResponse,
|
||||
RecipeIngredientCreate,
|
||||
RecipeIngredientResponse,
|
||||
RecipeSearchRequest,
|
||||
RecipeFeasibilityResponse,
|
||||
RecipeDuplicateRequest,
|
||||
RecipeStatisticsResponse
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"RecipeCreate",
|
||||
"RecipeUpdate",
|
||||
"RecipeResponse",
|
||||
"RecipeIngredientCreate",
|
||||
"RecipeIngredientResponse",
|
||||
"RecipeSearchRequest",
|
||||
"RecipeFeasibilityResponse",
|
||||
"RecipeDuplicateRequest",
|
||||
"RecipeStatisticsResponse"
|
||||
]
|
||||
273
services/recipes/app/schemas/recipes.py
Normal file
273
services/recipes/app/schemas/recipes.py
Normal file
@@ -0,0 +1,273 @@
|
||||
# services/recipes/app/schemas/recipes.py
|
||||
"""
|
||||
Pydantic schemas for recipe-related API requests and responses
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from ..models.recipes import RecipeStatus, MeasurementUnit
|
||||
|
||||
|
||||
# Quality Template Association Schemas
|
||||
class QualityStageConfiguration(BaseModel):
|
||||
"""Schema for quality checks configuration per production stage"""
|
||||
template_ids: List[UUID] = Field(default_factory=list, description="Quality template IDs for this stage")
|
||||
required_checks: List[str] = Field(default_factory=list, description="Required quality check types")
|
||||
optional_checks: List[str] = Field(default_factory=list, description="Optional quality check types")
|
||||
blocking_on_failure: bool = Field(default=True, description="Block stage progression on critical failures")
|
||||
min_quality_score: Optional[float] = Field(None, ge=0, le=10, description="Minimum quality score to pass stage")
|
||||
|
||||
|
||||
class RecipeQualityConfiguration(BaseModel):
|
||||
"""Schema for recipe quality configuration across all stages"""
|
||||
stages: Dict[str, QualityStageConfiguration] = Field(default_factory=dict, description="Quality configuration per stage")
|
||||
overall_quality_threshold: float = Field(default=7.0, ge=0, le=10, description="Overall quality threshold for batch")
|
||||
critical_stage_blocking: bool = Field(default=True, description="Block progression if critical checks fail")
|
||||
auto_create_quality_checks: bool = Field(default=True, description="Automatically create quality checks for batches")
|
||||
quality_manager_approval_required: bool = Field(default=False, description="Require quality manager approval")
|
||||
|
||||
|
||||
class RecipeQualityConfigurationUpdate(BaseModel):
|
||||
"""Schema for updating recipe quality configuration"""
|
||||
stages: Optional[Dict[str, QualityStageConfiguration]] = None
|
||||
overall_quality_threshold: Optional[float] = Field(None, ge=0, le=10)
|
||||
critical_stage_blocking: Optional[bool] = None
|
||||
auto_create_quality_checks: Optional[bool] = None
|
||||
quality_manager_approval_required: Optional[bool] = None
|
||||
|
||||
|
||||
class RecipeIngredientCreate(BaseModel):
|
||||
"""Schema for creating recipe ingredients"""
|
||||
ingredient_id: UUID
|
||||
quantity: float = Field(..., gt=0)
|
||||
unit: MeasurementUnit
|
||||
alternative_quantity: Optional[float] = None
|
||||
alternative_unit: Optional[MeasurementUnit] = None
|
||||
preparation_method: Optional[str] = None
|
||||
ingredient_notes: Optional[str] = None
|
||||
is_optional: bool = False
|
||||
ingredient_order: int = Field(..., ge=1)
|
||||
ingredient_group: Optional[str] = None
|
||||
substitution_options: Optional[Dict[str, Any]] = None
|
||||
substitution_ratio: Optional[float] = None
|
||||
|
||||
|
||||
class RecipeIngredientUpdate(BaseModel):
|
||||
"""Schema for updating recipe ingredients"""
|
||||
ingredient_id: Optional[UUID] = None
|
||||
quantity: Optional[float] = Field(None, gt=0)
|
||||
unit: Optional[MeasurementUnit] = None
|
||||
alternative_quantity: Optional[float] = None
|
||||
alternative_unit: Optional[MeasurementUnit] = None
|
||||
preparation_method: Optional[str] = None
|
||||
ingredient_notes: Optional[str] = None
|
||||
is_optional: Optional[bool] = None
|
||||
ingredient_order: Optional[int] = Field(None, ge=1)
|
||||
ingredient_group: Optional[str] = None
|
||||
substitution_options: Optional[Dict[str, Any]] = None
|
||||
substitution_ratio: Optional[float] = None
|
||||
|
||||
|
||||
class RecipeIngredientResponse(BaseModel):
|
||||
"""Schema for recipe ingredient responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
recipe_id: UUID
|
||||
ingredient_id: UUID
|
||||
quantity: float
|
||||
unit: str
|
||||
quantity_in_base_unit: Optional[float] = None
|
||||
alternative_quantity: Optional[float] = None
|
||||
alternative_unit: Optional[str] = None
|
||||
preparation_method: Optional[str] = None
|
||||
ingredient_notes: Optional[str] = None
|
||||
is_optional: bool
|
||||
ingredient_order: int
|
||||
ingredient_group: Optional[str] = None
|
||||
substitution_options: Optional[Dict[str, Any]] = None
|
||||
substitution_ratio: Optional[float] = None
|
||||
unit_cost: Optional[float] = None
|
||||
total_cost: Optional[float] = None
|
||||
cost_updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class RecipeCreate(BaseModel):
|
||||
"""Schema for creating recipes"""
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
recipe_code: Optional[str] = Field(None, max_length=100)
|
||||
version: str = Field(default="1.0", max_length=20)
|
||||
finished_product_id: UUID
|
||||
description: Optional[str] = None
|
||||
category: Optional[str] = Field(None, max_length=100)
|
||||
cuisine_type: Optional[str] = Field(None, max_length=100)
|
||||
difficulty_level: int = Field(default=1, ge=1, le=5)
|
||||
yield_quantity: float = Field(..., gt=0)
|
||||
yield_unit: MeasurementUnit
|
||||
prep_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
cook_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
total_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
rest_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
instructions: Optional[Dict[str, Any]] = None
|
||||
preparation_notes: Optional[str] = None
|
||||
storage_instructions: Optional[str] = None
|
||||
quality_check_configuration: Optional[RecipeQualityConfiguration] = None
|
||||
serves_count: Optional[int] = Field(None, ge=1)
|
||||
nutritional_info: Optional[Dict[str, Any]] = None
|
||||
allergen_info: Optional[Dict[str, Any]] = None
|
||||
dietary_tags: Optional[Dict[str, Any]] = None
|
||||
batch_size_multiplier: float = Field(default=1.0, gt=0)
|
||||
minimum_batch_size: Optional[float] = Field(None, gt=0)
|
||||
maximum_batch_size: Optional[float] = Field(None, gt=0)
|
||||
optimal_production_temperature: Optional[float] = None
|
||||
optimal_humidity: Optional[float] = Field(None, ge=0, le=100)
|
||||
is_seasonal: bool = False
|
||||
season_start_month: Optional[int] = Field(None, ge=1, le=12)
|
||||
season_end_month: Optional[int] = Field(None, ge=1, le=12)
|
||||
is_signature_item: bool = False
|
||||
target_margin_percentage: Optional[float] = Field(None, ge=0)
|
||||
ingredients: List[RecipeIngredientCreate] = Field(..., min_items=1)
|
||||
|
||||
|
||||
class RecipeUpdate(BaseModel):
|
||||
"""Schema for updating recipes"""
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
recipe_code: Optional[str] = Field(None, max_length=100)
|
||||
version: Optional[str] = Field(None, max_length=20)
|
||||
description: Optional[str] = None
|
||||
category: Optional[str] = Field(None, max_length=100)
|
||||
cuisine_type: Optional[str] = Field(None, max_length=100)
|
||||
difficulty_level: Optional[int] = Field(None, ge=1, le=5)
|
||||
yield_quantity: Optional[float] = Field(None, gt=0)
|
||||
yield_unit: Optional[MeasurementUnit] = None
|
||||
prep_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
cook_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
total_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
rest_time_minutes: Optional[int] = Field(None, ge=0)
|
||||
instructions: Optional[Dict[str, Any]] = None
|
||||
preparation_notes: Optional[str] = None
|
||||
storage_instructions: Optional[str] = None
|
||||
quality_check_configuration: Optional[RecipeQualityConfigurationUpdate] = None
|
||||
serves_count: Optional[int] = Field(None, ge=1)
|
||||
nutritional_info: Optional[Dict[str, Any]] = None
|
||||
allergen_info: Optional[Dict[str, Any]] = None
|
||||
dietary_tags: Optional[Dict[str, Any]] = None
|
||||
batch_size_multiplier: Optional[float] = Field(None, gt=0)
|
||||
minimum_batch_size: Optional[float] = Field(None, gt=0)
|
||||
maximum_batch_size: Optional[float] = Field(None, gt=0)
|
||||
optimal_production_temperature: Optional[float] = None
|
||||
optimal_humidity: Optional[float] = Field(None, ge=0, le=100)
|
||||
status: Optional[RecipeStatus] = None
|
||||
is_seasonal: Optional[bool] = None
|
||||
season_start_month: Optional[int] = Field(None, ge=1, le=12)
|
||||
season_end_month: Optional[int] = Field(None, ge=1, le=12)
|
||||
is_signature_item: Optional[bool] = None
|
||||
target_margin_percentage: Optional[float] = Field(None, ge=0)
|
||||
ingredients: Optional[List[RecipeIngredientCreate]] = None
|
||||
|
||||
|
||||
class RecipeResponse(BaseModel):
|
||||
"""Schema for recipe responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
name: str
|
||||
recipe_code: Optional[str] = None
|
||||
version: str
|
||||
finished_product_id: UUID
|
||||
description: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
cuisine_type: Optional[str] = None
|
||||
difficulty_level: int
|
||||
yield_quantity: float
|
||||
yield_unit: str
|
||||
prep_time_minutes: Optional[int] = None
|
||||
cook_time_minutes: Optional[int] = None
|
||||
total_time_minutes: Optional[int] = None
|
||||
rest_time_minutes: Optional[int] = None
|
||||
estimated_cost_per_unit: Optional[float] = None
|
||||
last_calculated_cost: Optional[float] = None
|
||||
cost_calculation_date: Optional[datetime] = None
|
||||
target_margin_percentage: Optional[float] = None
|
||||
suggested_selling_price: Optional[float] = None
|
||||
instructions: Optional[Dict[str, Any]] = None
|
||||
preparation_notes: Optional[str] = None
|
||||
storage_instructions: Optional[str] = None
|
||||
quality_check_configuration: Optional[RecipeQualityConfiguration] = None
|
||||
serves_count: Optional[int] = None
|
||||
nutritional_info: Optional[Dict[str, Any]] = None
|
||||
allergen_info: Optional[Dict[str, Any]] = None
|
||||
dietary_tags: Optional[Dict[str, Any]] = None
|
||||
batch_size_multiplier: float
|
||||
minimum_batch_size: Optional[float] = None
|
||||
maximum_batch_size: Optional[float] = None
|
||||
optimal_production_temperature: Optional[float] = None
|
||||
optimal_humidity: Optional[float] = None
|
||||
status: str
|
||||
is_seasonal: bool
|
||||
season_start_month: Optional[int] = None
|
||||
season_end_month: Optional[int] = None
|
||||
is_signature_item: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[UUID] = None
|
||||
updated_by: Optional[UUID] = None
|
||||
ingredients: Optional[List[RecipeIngredientResponse]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class RecipeDeletionSummary(BaseModel):
|
||||
"""Summary of what will be deleted when hard-deleting a recipe"""
|
||||
recipe_id: UUID
|
||||
recipe_name: str
|
||||
recipe_code: str
|
||||
production_batches_count: int
|
||||
recipe_ingredients_count: int
|
||||
dependent_recipes_count: int # Recipes that use this as ingredient/sub-recipe
|
||||
affected_orders_count: int # Orders that include this recipe
|
||||
last_used_date: Optional[datetime] = None
|
||||
can_delete: bool
|
||||
warnings: List[str] = []
|
||||
|
||||
|
||||
class RecipeSearchRequest(BaseModel):
|
||||
"""Schema for recipe search requests"""
|
||||
search_term: Optional[str] = None
|
||||
status: Optional[RecipeStatus] = None
|
||||
category: Optional[str] = None
|
||||
is_seasonal: Optional[bool] = None
|
||||
is_signature: Optional[bool] = None
|
||||
difficulty_level: Optional[int] = Field(None, ge=1, le=5)
|
||||
limit: int = Field(default=100, ge=1, le=1000)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
class RecipeDuplicateRequest(BaseModel):
|
||||
"""Schema for recipe duplication requests"""
|
||||
new_name: str = Field(..., min_length=1, max_length=255)
|
||||
|
||||
|
||||
class RecipeFeasibilityResponse(BaseModel):
|
||||
"""Schema for recipe feasibility check responses"""
|
||||
recipe_id: UUID
|
||||
recipe_name: str
|
||||
batch_multiplier: float
|
||||
feasible: bool
|
||||
missing_ingredients: List[Dict[str, Any]] = []
|
||||
insufficient_ingredients: List[Dict[str, Any]] = []
|
||||
|
||||
|
||||
class RecipeStatisticsResponse(BaseModel):
|
||||
"""Schema for recipe statistics responses"""
|
||||
total_recipes: int
|
||||
active_recipes: int
|
||||
signature_recipes: int
|
||||
seasonal_recipes: int
|
||||
category_breakdown: List[Dict[str, Any]]
|
||||
7
services/recipes/app/services/__init__.py
Normal file
7
services/recipes/app/services/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# services/recipes/app/services/__init__.py
|
||||
|
||||
from .recipe_service import RecipeService
|
||||
|
||||
__all__ = [
|
||||
"RecipeService"
|
||||
]
|
||||
519
services/recipes/app/services/recipe_service.py
Normal file
519
services/recipes/app/services/recipe_service.py
Normal file
@@ -0,0 +1,519 @@
|
||||
# services/recipes/app/services/recipe_service.py
|
||||
"""
|
||||
Service layer for recipe management operations
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from ..repositories.recipe_repository import RecipeRepository
|
||||
from ..schemas.recipes import RecipeCreate, RecipeUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecipeService:
|
||||
"""Async service for recipe management operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
self.recipe_repo = RecipeRepository(session)
|
||||
|
||||
async def get_recipe_with_ingredients(self, recipe_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Get recipe by ID with ingredients"""
|
||||
try:
|
||||
return await self.recipe_repo.get_recipe_with_ingredients(recipe_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe {recipe_id}: {e}")
|
||||
return None
|
||||
|
||||
async def search_recipes(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
search_term: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
category: Optional[str] = None,
|
||||
is_seasonal: Optional[bool] = None,
|
||||
is_signature: Optional[bool] = None,
|
||||
difficulty_level: Optional[int] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Search recipes with filters"""
|
||||
try:
|
||||
return await self.recipe_repo.search_recipes(
|
||||
tenant_id=tenant_id,
|
||||
search_term=search_term,
|
||||
status=status,
|
||||
category=category,
|
||||
is_seasonal=is_seasonal,
|
||||
is_signature=is_signature,
|
||||
difficulty_level=difficulty_level,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching recipes: {e}")
|
||||
return []
|
||||
|
||||
async def get_recipe_statistics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get recipe statistics for dashboard"""
|
||||
try:
|
||||
return await self.recipe_repo.get_recipe_statistics(tenant_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recipe statistics: {e}")
|
||||
return {"total_recipes": 0, "active_recipes": 0, "signature_recipes": 0, "seasonal_recipes": 0}
|
||||
|
||||
async def get_deletion_summary(self, recipe_id: UUID) -> Dict[str, Any]:
|
||||
"""Get summary of what will be affected by deleting this recipe"""
|
||||
try:
|
||||
from sqlalchemy import select, func
|
||||
from ..models.recipes import RecipeIngredient
|
||||
|
||||
# Get recipe info
|
||||
recipe = await self.recipe_repo.get_by_id(recipe_id)
|
||||
if not recipe:
|
||||
return {"success": False, "error": "Recipe not found"}
|
||||
|
||||
# Count recipe ingredients
|
||||
ingredients_result = await self.session.execute(
|
||||
select(func.count(RecipeIngredient.id))
|
||||
.where(RecipeIngredient.recipe_id == recipe_id)
|
||||
)
|
||||
ingredients_count = ingredients_result.scalar() or 0
|
||||
|
||||
# Count production batches using this recipe (if production tables exist)
|
||||
production_batches_count = 0
|
||||
try:
|
||||
# Try to import production models if they exist
|
||||
production_batches_result = await self.session.execute(
|
||||
select(func.count()).select_from(
|
||||
select(1).where(
|
||||
# This would need actual production_batches table reference
|
||||
# For now, set to 0
|
||||
).subquery()
|
||||
)
|
||||
)
|
||||
production_batches_count = 0 # Set to 0 for now
|
||||
except:
|
||||
production_batches_count = 0
|
||||
|
||||
# Count dependent recipes (recipes using this as ingredient) - future feature
|
||||
dependent_recipes_count = 0
|
||||
|
||||
# Count affected orders - would need orders service integration
|
||||
affected_orders_count = 0
|
||||
|
||||
# Determine if deletion is safe
|
||||
warnings = []
|
||||
can_delete = True
|
||||
|
||||
if production_batches_count > 0:
|
||||
warnings.append(f"Esta receta tiene {production_batches_count} lotes de producción asociados")
|
||||
can_delete = False
|
||||
|
||||
if affected_orders_count > 0:
|
||||
warnings.append(f"Esta receta está en {affected_orders_count} pedidos")
|
||||
can_delete = False
|
||||
|
||||
if dependent_recipes_count > 0:
|
||||
warnings.append(f"{dependent_recipes_count} recetas dependen de esta")
|
||||
|
||||
if recipe.status == RecipeStatus.ACTIVE:
|
||||
warnings.append("Esta receta está activa. Considera archivarla primero.")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": {
|
||||
"recipe_id": str(recipe.id),
|
||||
"recipe_name": recipe.name,
|
||||
"recipe_code": recipe.recipe_code or "",
|
||||
"production_batches_count": production_batches_count,
|
||||
"recipe_ingredients_count": ingredients_count,
|
||||
"dependent_recipes_count": dependent_recipes_count,
|
||||
"affected_orders_count": affected_orders_count,
|
||||
"last_used_date": None,
|
||||
"can_delete": can_delete,
|
||||
"warnings": warnings
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting deletion summary: {e}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
async def create_recipe(
|
||||
self,
|
||||
recipe_data: Dict[str, Any],
|
||||
ingredients_data: List[Dict[str, Any]],
|
||||
created_by: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new recipe with ingredients"""
|
||||
from ..models.recipes import Recipe, RecipeIngredient, RecipeStatus
|
||||
|
||||
try:
|
||||
# Add metadata
|
||||
recipe_data["created_by"] = created_by
|
||||
recipe_data["created_at"] = datetime.utcnow()
|
||||
recipe_data["updated_at"] = datetime.utcnow()
|
||||
recipe_data["status"] = recipe_data.get("status", RecipeStatus.DRAFT)
|
||||
|
||||
# Create Recipe model directly (without ingredients)
|
||||
recipe = Recipe(**recipe_data)
|
||||
self.session.add(recipe)
|
||||
await self.session.flush() # Get the recipe ID
|
||||
|
||||
# Now create ingredients with the recipe_id and tenant_id
|
||||
for ing_data in ingredients_data:
|
||||
ingredient = RecipeIngredient(
|
||||
recipe_id=recipe.id,
|
||||
tenant_id=recipe.tenant_id, # Add tenant_id from recipe
|
||||
**ing_data
|
||||
)
|
||||
self.session.add(ingredient)
|
||||
|
||||
await self.session.flush()
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await self.session.commit()
|
||||
|
||||
# Get the created recipe with ingredients
|
||||
result = await self.recipe_repo.get_recipe_with_ingredients(recipe.id)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating recipe: {e}")
|
||||
await self.session.rollback()
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def update_recipe(
|
||||
self,
|
||||
recipe_id: UUID,
|
||||
recipe_data: Dict[str, Any],
|
||||
ingredients_data: Optional[List[Dict[str, Any]]] = None,
|
||||
updated_by: UUID = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Update an existing recipe"""
|
||||
try:
|
||||
# Check if recipe exists
|
||||
existing_recipe = await self.recipe_repo.get_by_id(recipe_id)
|
||||
if not existing_recipe:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Recipe not found"
|
||||
}
|
||||
|
||||
# Status transition business rules
|
||||
if "status" in recipe_data:
|
||||
from ..models.recipes import RecipeStatus
|
||||
new_status = recipe_data["status"]
|
||||
current_status = existing_recipe.status
|
||||
|
||||
# Cannot reactivate discontinued recipes
|
||||
if current_status == RecipeStatus.DISCONTINUED:
|
||||
if new_status != RecipeStatus.DISCONTINUED:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Cannot reactivate a discontinued recipe. Create a new version instead."
|
||||
}
|
||||
|
||||
# Can only archive active or testing recipes
|
||||
if new_status == RecipeStatus.ARCHIVED:
|
||||
if current_status not in [RecipeStatus.ACTIVE, RecipeStatus.TESTING]:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Can only archive active or testing recipes."
|
||||
}
|
||||
|
||||
# Cannot activate drafts without ingredients
|
||||
if new_status == RecipeStatus.ACTIVE and current_status == RecipeStatus.DRAFT:
|
||||
# Check if recipe has ingredients
|
||||
from sqlalchemy import select, func
|
||||
from ..models.recipes import RecipeIngredient
|
||||
|
||||
result = await self.session.execute(
|
||||
select(func.count(RecipeIngredient.id)).where(RecipeIngredient.recipe_id == recipe_id)
|
||||
)
|
||||
ingredient_count = result.scalar()
|
||||
|
||||
if ingredient_count == 0:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Cannot activate a recipe without ingredients."
|
||||
}
|
||||
|
||||
# Add metadata
|
||||
if updated_by:
|
||||
recipe_data["updated_by"] = updated_by
|
||||
recipe_data["updated_at"] = datetime.utcnow()
|
||||
|
||||
# Use the shared repository's update method
|
||||
recipe_update = RecipeUpdate(**recipe_data)
|
||||
updated_recipe = await self.recipe_repo.update(recipe_id, recipe_update)
|
||||
|
||||
# Get the updated recipe with ingredients
|
||||
result = await self.recipe_repo.get_recipe_with_ingredients(recipe_id)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating recipe {recipe_id}: {e}")
|
||||
await self.session.rollback()
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def delete_recipe(self, recipe_id: UUID) -> bool:
|
||||
"""Delete a recipe"""
|
||||
try:
|
||||
return await self.recipe_repo.delete(recipe_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting recipe {recipe_id}: {e}")
|
||||
return False
|
||||
|
||||
async def check_recipe_feasibility(self, recipe_id: UUID, batch_multiplier: float = 1.0) -> Dict[str, Any]:
|
||||
"""Check if recipe can be produced with current inventory"""
|
||||
try:
|
||||
recipe = await self.recipe_repo.get_recipe_with_ingredients(recipe_id)
|
||||
if not recipe:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Recipe not found"
|
||||
}
|
||||
|
||||
# Simplified feasibility check - can be enhanced later with inventory service integration
|
||||
return {
|
||||
"success": True,
|
||||
"data": {
|
||||
"recipe_id": str(recipe_id),
|
||||
"recipe_name": recipe["name"],
|
||||
"batch_multiplier": batch_multiplier,
|
||||
"feasible": True,
|
||||
"missing_ingredients": [],
|
||||
"insufficient_ingredients": []
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking recipe feasibility {recipe_id}: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def duplicate_recipe(
|
||||
self,
|
||||
recipe_id: UUID,
|
||||
new_name: str,
|
||||
created_by: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a duplicate of an existing recipe"""
|
||||
try:
|
||||
# Get original recipe
|
||||
original_recipe = await self.recipe_repo.get_recipe_with_ingredients(recipe_id)
|
||||
if not original_recipe:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Recipe not found"
|
||||
}
|
||||
|
||||
# Create new recipe data
|
||||
new_recipe_data = original_recipe.copy()
|
||||
new_recipe_data["name"] = new_name
|
||||
|
||||
# Remove fields that should be auto-generated
|
||||
new_recipe_data.pop("id", None)
|
||||
new_recipe_data.pop("created_at", None)
|
||||
new_recipe_data.pop("updated_at", None)
|
||||
|
||||
# Handle ingredients
|
||||
ingredients = new_recipe_data.pop("ingredients", [])
|
||||
|
||||
# Create the duplicate
|
||||
result = await self.create_recipe(new_recipe_data, ingredients, created_by)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error duplicating recipe {recipe_id}: {e}")
|
||||
await self.session.rollback()
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def activate_recipe(self, recipe_id: UUID, activated_by: UUID) -> Dict[str, Any]:
|
||||
"""Activate a recipe for production"""
|
||||
try:
|
||||
# Check if recipe exists
|
||||
recipe = await self.recipe_repo.get_recipe_with_ingredients(recipe_id)
|
||||
if not recipe:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Recipe not found"
|
||||
}
|
||||
|
||||
if not recipe.get("ingredients"):
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Recipe must have at least one ingredient"
|
||||
}
|
||||
|
||||
# Update recipe status
|
||||
update_data = {
|
||||
"status": "active",
|
||||
"updated_by": activated_by,
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
recipe_update = RecipeUpdate(**update_data)
|
||||
await self.recipe_repo.update(recipe_id, recipe_update)
|
||||
|
||||
# Get the updated recipe
|
||||
result = await self.recipe_repo.get_recipe_with_ingredients(recipe_id)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error activating recipe {recipe_id}: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
# Quality Configuration Methods
|
||||
|
||||
async def update_recipe_quality_configuration(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
quality_config_update: Dict[str, Any],
|
||||
user_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Update quality configuration for a recipe"""
|
||||
try:
|
||||
# Get current recipe
|
||||
recipe = await self.recipe_repo.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise ValueError("Recipe not found")
|
||||
|
||||
# Get existing quality configuration or create default
|
||||
current_config = recipe.get("quality_check_configuration", {
|
||||
"stages": {},
|
||||
"overall_quality_threshold": 7.0,
|
||||
"critical_stage_blocking": True,
|
||||
"auto_create_quality_checks": True,
|
||||
"quality_manager_approval_required": False
|
||||
})
|
||||
|
||||
# Merge with updates
|
||||
if "stages" in quality_config_update:
|
||||
current_config["stages"].update(quality_config_update["stages"])
|
||||
|
||||
for key in ["overall_quality_threshold", "critical_stage_blocking",
|
||||
"auto_create_quality_checks", "quality_manager_approval_required"]:
|
||||
if key in quality_config_update:
|
||||
current_config[key] = quality_config_update[key]
|
||||
|
||||
# Update recipe with new configuration
|
||||
recipe_update = RecipeUpdate(quality_check_configuration=current_config)
|
||||
await self.recipe_repo.update_recipe(tenant_id, recipe_id, recipe_update, user_id)
|
||||
|
||||
# Return updated recipe
|
||||
updated_recipe = await self.recipe_repo.get_recipe(tenant_id, recipe_id)
|
||||
return updated_recipe
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating recipe quality configuration: {e}")
|
||||
raise
|
||||
|
||||
async def add_quality_templates_to_stage(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
stage: str,
|
||||
template_ids: List[UUID],
|
||||
user_id: UUID
|
||||
):
|
||||
"""Add quality templates to a specific recipe stage"""
|
||||
try:
|
||||
# Get current recipe
|
||||
recipe = await self.recipe_repo.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise ValueError("Recipe not found")
|
||||
|
||||
# Get existing quality configuration
|
||||
quality_config = recipe.get("quality_check_configuration", {"stages": {}})
|
||||
|
||||
# Initialize stage if it doesn't exist
|
||||
if stage not in quality_config["stages"]:
|
||||
quality_config["stages"][stage] = {
|
||||
"template_ids": [],
|
||||
"required_checks": [],
|
||||
"optional_checks": [],
|
||||
"blocking_on_failure": True,
|
||||
"min_quality_score": None
|
||||
}
|
||||
|
||||
# Add template IDs (avoid duplicates)
|
||||
stage_config = quality_config["stages"][stage]
|
||||
existing_ids = set(stage_config.get("template_ids", []))
|
||||
new_ids = [str(tid) for tid in template_ids if str(tid) not in existing_ids]
|
||||
stage_config["template_ids"].extend(new_ids)
|
||||
|
||||
# Update recipe
|
||||
recipe_update = RecipeUpdate(quality_check_configuration=quality_config)
|
||||
await self.recipe_repo.update_recipe(tenant_id, recipe_id, recipe_update, user_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding quality templates to stage: {e}")
|
||||
raise
|
||||
|
||||
async def remove_quality_template_from_stage(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
recipe_id: UUID,
|
||||
stage: str,
|
||||
template_id: UUID,
|
||||
user_id: UUID
|
||||
):
|
||||
"""Remove a quality template from a specific recipe stage"""
|
||||
try:
|
||||
# Get current recipe
|
||||
recipe = await self.recipe_repo.get_recipe(tenant_id, recipe_id)
|
||||
if not recipe:
|
||||
raise ValueError("Recipe not found")
|
||||
|
||||
# Get existing quality configuration
|
||||
quality_config = recipe.get("quality_check_configuration", {"stages": {}})
|
||||
|
||||
# Remove template ID from stage
|
||||
if stage in quality_config["stages"]:
|
||||
stage_config = quality_config["stages"][stage]
|
||||
template_ids = stage_config.get("template_ids", [])
|
||||
template_ids = [tid for tid in template_ids if str(tid) != str(template_id)]
|
||||
stage_config["template_ids"] = template_ids
|
||||
|
||||
# Update recipe
|
||||
recipe_update = RecipeUpdate(quality_check_configuration=quality_config)
|
||||
await self.recipe_repo.update_recipe(tenant_id, recipe_id, recipe_update, user_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error removing quality template from stage: {e}")
|
||||
raise
|
||||
134
services/recipes/app/services/tenant_deletion_service.py
Normal file
134
services/recipes/app/services/tenant_deletion_service.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""
|
||||
Recipes Service - Tenant Data Deletion
|
||||
Handles deletion of all recipe-related data for a tenant
|
||||
"""
|
||||
from typing import Dict
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
|
||||
from shared.services.tenant_deletion import BaseTenantDataDeletionService, TenantDataDeletionResult
|
||||
from app.models.recipes import Recipe, RecipeIngredient, ProductionBatch
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class RecipesTenantDeletionService(BaseTenantDataDeletionService):
|
||||
"""Service for deleting all recipe-related data for a tenant"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
super().__init__("recipes-service")
|
||||
self.db = db_session
|
||||
|
||||
async def get_tenant_data_preview(self, tenant_id: str) -> Dict[str, int]:
|
||||
"""Get counts of what would be deleted"""
|
||||
|
||||
try:
|
||||
preview = {}
|
||||
|
||||
# Count recipes
|
||||
recipe_count = await self.db.scalar(
|
||||
select(func.count(Recipe.id)).where(Recipe.tenant_id == tenant_id)
|
||||
)
|
||||
preview["recipes"] = recipe_count or 0
|
||||
|
||||
# Count recipe ingredients (will be deleted via CASCADE)
|
||||
ingredient_count = await self.db.scalar(
|
||||
select(func.count(RecipeIngredient.id))
|
||||
.where(RecipeIngredient.tenant_id == tenant_id)
|
||||
)
|
||||
preview["recipe_ingredients"] = ingredient_count or 0
|
||||
|
||||
# Count production batches (will be deleted via CASCADE)
|
||||
batch_count = await self.db.scalar(
|
||||
select(func.count(ProductionBatch.id))
|
||||
.where(ProductionBatch.tenant_id == tenant_id)
|
||||
)
|
||||
preview["production_batches"] = batch_count or 0
|
||||
|
||||
return preview
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting deletion preview",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {}
|
||||
|
||||
async def delete_tenant_data(self, tenant_id: str) -> TenantDataDeletionResult:
|
||||
"""Delete all data for a tenant"""
|
||||
|
||||
result = TenantDataDeletionResult(tenant_id, self.service_name)
|
||||
|
||||
try:
|
||||
# Get preview before deletion for reporting
|
||||
preview = await self.get_tenant_data_preview(tenant_id)
|
||||
|
||||
# Delete production batches first (foreign key to recipes)
|
||||
try:
|
||||
batch_delete = await self.db.execute(
|
||||
delete(ProductionBatch).where(ProductionBatch.tenant_id == tenant_id)
|
||||
)
|
||||
deleted_batches = batch_delete.rowcount
|
||||
result.add_deleted_items("production_batches", deleted_batches)
|
||||
|
||||
logger.info("Deleted production batches for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=deleted_batches)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting production batches",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Production batch deletion: {str(e)}")
|
||||
|
||||
# Delete recipe ingredients (foreign key to recipes)
|
||||
try:
|
||||
ingredient_delete = await self.db.execute(
|
||||
delete(RecipeIngredient).where(RecipeIngredient.tenant_id == tenant_id)
|
||||
)
|
||||
deleted_ingredients = ingredient_delete.rowcount
|
||||
result.add_deleted_items("recipe_ingredients", deleted_ingredients)
|
||||
|
||||
logger.info("Deleted recipe ingredients for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=deleted_ingredients)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting recipe ingredients",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Recipe ingredient deletion: {str(e)}")
|
||||
|
||||
# Delete recipes (parent table)
|
||||
try:
|
||||
recipe_delete = await self.db.execute(
|
||||
delete(Recipe).where(Recipe.tenant_id == tenant_id)
|
||||
)
|
||||
deleted_recipes = recipe_delete.rowcount
|
||||
result.add_deleted_items("recipes", deleted_recipes)
|
||||
|
||||
logger.info("Deleted recipes for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=deleted_recipes)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting recipes",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Recipe deletion: {str(e)}")
|
||||
|
||||
# Commit all deletions
|
||||
await self.db.commit()
|
||||
|
||||
logger.info("Tenant data deletion completed",
|
||||
tenant_id=tenant_id,
|
||||
deleted_counts=result.deleted_counts)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Fatal error during tenant data deletion",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
await self.db.rollback()
|
||||
result.add_error(f"Fatal error: {str(e)}")
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user