Improve the frontend 3

This commit is contained in:
Urtzi Alfaro
2025-10-30 21:08:07 +01:00
parent 36217a2729
commit 63f5c6d512
184 changed files with 21512 additions and 7442 deletions

View File

@@ -455,3 +455,174 @@ async def resolve_or_create_products_batch(
logger.error("Batch product resolution failed",
error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Batch resolution failed: {str(e)}")
# ================================================================
# NEW: BATCH API ENDPOINTS FOR ORCHESTRATOR
# ================================================================
class BatchIngredientsRequest(BaseModel):
"""Request for batch ingredient fetching"""
ingredient_ids: List[UUID] = Field(..., description="List of ingredient IDs to fetch")
class BatchIngredientsResponse(BaseModel):
"""Response with ingredient data"""
ingredients: List[Dict[str, Any]] = Field(..., description="List of ingredient data")
found_count: int = Field(..., description="Number of ingredients found")
missing_ids: List[str] = Field(default_factory=list, description="IDs not found")
@router.post(
route_builder.build_operations_route("ingredients/batch"),
response_model=BatchIngredientsResponse
)
async def get_ingredients_batch(
request: BatchIngredientsRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Fetch multiple ingredients in a single request (for Orchestrator).
This endpoint reduces N API calls to 1, improving performance when
the orchestrator needs ingredient data for production/procurement planning.
"""
try:
if not request.ingredient_ids:
return BatchIngredientsResponse(
ingredients=[],
found_count=0,
missing_ids=[]
)
service = InventoryService()
ingredients = []
found_ids = set()
for ingredient_id in request.ingredient_ids:
try:
ingredient = await service.get_ingredient_by_id(ingredient_id, tenant_id, db)
if ingredient:
ingredients.append({
'id': str(ingredient.id),
'name': ingredient.name,
'type': ingredient.type,
'unit': ingredient.unit,
'current_stock': float(ingredient.current_stock) if ingredient.current_stock else 0,
'reorder_point': float(ingredient.reorder_point) if ingredient.reorder_point else 0,
'cost_per_unit': float(ingredient.cost_per_unit) if ingredient.cost_per_unit else 0,
'category': ingredient.category,
'is_active': ingredient.is_active,
'shelf_life_days': ingredient.shelf_life_days
})
found_ids.add(str(ingredient_id))
except Exception as e:
logger.warning(
"Failed to fetch ingredient in batch",
ingredient_id=str(ingredient_id),
error=str(e)
)
continue
missing_ids = [str(id) for id in request.ingredient_ids if str(id) not in found_ids]
logger.info(
"Batch ingredient fetch complete",
requested=len(request.ingredient_ids),
found=len(ingredients),
missing=len(missing_ids),
tenant_id=str(tenant_id)
)
return BatchIngredientsResponse(
ingredients=ingredients,
found_count=len(ingredients),
missing_ids=missing_ids
)
except Exception as e:
logger.error(
"Batch ingredient fetch failed",
error=str(e),
tenant_id=str(tenant_id)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Batch ingredient fetch failed: {str(e)}"
)
class BatchStockLevelsRequest(BaseModel):
"""Request for batch stock level fetching"""
ingredient_ids: List[UUID] = Field(..., description="List of ingredient IDs")
class BatchStockLevelsResponse(BaseModel):
"""Response with stock level data"""
stock_levels: Dict[str, float] = Field(..., description="Ingredient ID to stock level mapping")
found_count: int = Field(..., description="Number of stock levels found")
@router.post(
route_builder.build_operations_route("stock-levels/batch"),
response_model=BatchStockLevelsResponse
)
async def get_stock_levels_batch(
request: BatchStockLevelsRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Fetch stock levels for multiple ingredients in a single request.
Optimized endpoint for Orchestrator to quickly check inventory levels
without making individual API calls per ingredient.
"""
try:
if not request.ingredient_ids:
return BatchStockLevelsResponse(
stock_levels={},
found_count=0
)
service = InventoryService()
stock_levels = {}
for ingredient_id in request.ingredient_ids:
try:
ingredient = await service.get_ingredient_by_id(ingredient_id, tenant_id, db)
if ingredient:
stock_levels[str(ingredient_id)] = float(ingredient.current_stock) if ingredient.current_stock else 0.0
except Exception as e:
logger.warning(
"Failed to fetch stock level in batch",
ingredient_id=str(ingredient_id),
error=str(e)
)
continue
logger.info(
"Batch stock level fetch complete",
requested=len(request.ingredient_ids),
found=len(stock_levels),
tenant_id=str(tenant_id)
)
return BatchStockLevelsResponse(
stock_levels=stock_levels,
found_count=len(stock_levels)
)
except Exception as e:
logger.error(
"Batch stock level fetch failed",
error=str(e),
tenant_id=str(tenant_id)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Batch stock level fetch failed: {str(e)}"
)

View File

@@ -137,7 +137,11 @@ class Ingredient(Base):
is_perishable = Column(Boolean, default=False)
allergen_info = Column(JSONB, nullable=True) # JSON array of allergens
nutritional_info = Column(JSONB, nullable=True) # Nutritional information for finished products
# NEW: Local production support (for procurement service integration)
produced_locally = Column(Boolean, default=False, nullable=False) # If true, ingredient is produced in-house
recipe_id = Column(UUID(as_uuid=True), nullable=True) # Links to recipe for BOM explosion
# Audit fields
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone=True),
@@ -213,6 +217,9 @@ class Ingredient(Base):
'is_perishable': self.is_perishable if self.is_perishable is not None else False,
'allergen_info': self.allergen_info,
'nutritional_info': self.nutritional_info,
# NEW: Local production support
'produced_locally': self.produced_locally if self.produced_locally is not None else False,
'recipe_id': str(self.recipe_id) if self.recipe_id else None,
'created_at': self.created_at.isoformat() if self.created_at else None,
'updated_at': self.updated_at.isoformat() if self.updated_at else datetime.now(timezone.utc).isoformat(),
'created_by': str(self.created_by) if self.created_by else None,

View File

@@ -60,7 +60,11 @@ class IngredientCreate(InventoryBaseSchema):
# Properties
is_perishable: bool = Field(False, description="Is perishable")
allergen_info: Optional[Dict[str, Any]] = Field(None, description="Allergen information")
# NEW: Local production support
produced_locally: bool = Field(False, description="If true, ingredient is produced in-house")
recipe_id: Optional[str] = Field(None, description="Recipe ID for BOM explosion (if produced locally)")
@validator('reorder_point')
def validate_reorder_point(cls, v, values):
if 'low_stock_threshold' in values and v <= values['low_stock_threshold']:
@@ -99,6 +103,10 @@ class IngredientUpdate(InventoryBaseSchema):
is_perishable: Optional[bool] = Field(None, description="Is perishable")
allergen_info: Optional[Dict[str, Any]] = Field(None, description="Allergen information")
# NEW: Local production support
produced_locally: Optional[bool] = Field(None, description="If true, ingredient is produced in-house")
recipe_id: Optional[str] = Field(None, description="Recipe ID for BOM explosion (if produced locally)")
class IngredientResponse(InventoryBaseSchema):
"""Schema for ingredient and finished product API responses"""
@@ -125,6 +133,11 @@ class IngredientResponse(InventoryBaseSchema):
is_active: bool
is_perishable: bool
allergen_info: Optional[Dict[str, Any]]
# NEW: Local production support
produced_locally: bool = False
recipe_id: Optional[str] = None
created_at: datetime
updated_at: datetime
created_by: Optional[str]

View File

@@ -0,0 +1,77 @@
"""add_local_production_support
Revision ID: add_local_production_support
Revises: e7fcea67bf4e
Create Date: 2025-10-29 14:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'add_local_production_support'
down_revision = 'e7fcea67bf4e'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add local production support columns to ingredients table"""
# Add produced_locally column
op.add_column('ingredients', sa.Column(
'produced_locally',
sa.Boolean(),
nullable=False,
server_default='false',
comment='If true, ingredient is produced in-house and requires BOM explosion'
))
# Add recipe_id column for BOM explosion
op.add_column('ingredients', sa.Column(
'recipe_id',
postgresql.UUID(as_uuid=True),
nullable=True,
comment='Links to recipe for BOM explosion when ingredient is produced locally'
))
# Create index for efficient querying of locally-produced ingredients
op.create_index(
'ix_ingredients_produced_locally',
'ingredients',
['produced_locally'],
unique=False
)
# Create index for recipe_id lookups
op.create_index(
'ix_ingredients_recipe_id',
'ingredients',
['recipe_id'],
unique=False
)
# Add check constraint: if produced_locally is true, recipe_id should be set
# Note: This is a soft constraint - we allow NULL recipe_id even if produced_locally=true
# to support gradual data migration and edge cases
# op.create_check_constraint(
# 'ck_ingredients_local_production',
# 'ingredients',
# 'produced_locally = false OR recipe_id IS NOT NULL'
# )
def downgrade() -> None:
"""Remove local production support columns from ingredients table"""
# Drop check constraint
# op.drop_constraint('ck_ingredients_local_production', 'ingredients', type_='check')
# Drop indexes
op.drop_index('ix_ingredients_recipe_id', table_name='ingredients')
op.drop_index('ix_ingredients_produced_locally', table_name='ingredients')
# Drop columns
op.drop_column('ingredients', 'recipe_id')
op.drop_column('ingredients', 'produced_locally')

View File

@@ -155,6 +155,9 @@ async def seed_ingredients_for_tenant(
is_perishable=ing_data.get("is_perishable", False),
is_active=True,
allergen_info=ing_data.get("allergen_info", []),
# NEW: Local production support (Sprint 5)
produced_locally=ing_data.get("produced_locally", False),
recipe_id=uuid.UUID(ing_data["recipe_id"]) if ing_data.get("recipe_id") else None,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)