Improve the inventory page 3
This commit is contained in:
@@ -75,14 +75,14 @@ class ProductionStage(enum.Enum):
|
||||
|
||||
class StockMovementType(enum.Enum):
|
||||
"""Types of inventory movements"""
|
||||
PURCHASE = "purchase"
|
||||
PRODUCTION_USE = "production_use"
|
||||
ADJUSTMENT = "adjustment"
|
||||
WASTE = "waste"
|
||||
TRANSFER = "transfer"
|
||||
RETURN = "return"
|
||||
INITIAL_STOCK = "initial_stock"
|
||||
TRANSFORMATION = "transformation" # Converting between production stages
|
||||
PURCHASE = "PURCHASE"
|
||||
PRODUCTION_USE = "PRODUCTION_USE"
|
||||
ADJUSTMENT = "ADJUSTMENT"
|
||||
WASTE = "WASTE"
|
||||
TRANSFER = "TRANSFER"
|
||||
RETURN = "RETURN"
|
||||
INITIAL_STOCK = "INITIAL_STOCK"
|
||||
TRANSFORMATION = "TRANSFORMATION" # Converting between production stages
|
||||
|
||||
|
||||
class Ingredient(Base):
|
||||
@@ -121,15 +121,8 @@ class Ingredient(Base):
|
||||
reorder_quantity = Column(Float, nullable=False, default=50.0)
|
||||
max_stock_level = Column(Float, nullable=True)
|
||||
|
||||
# Storage requirements (applies to both ingredients and finished products)
|
||||
requires_refrigeration = Column(Boolean, default=False)
|
||||
requires_freezing = Column(Boolean, default=False)
|
||||
storage_temperature_min = Column(Float, nullable=True) # Celsius
|
||||
storage_temperature_max = Column(Float, nullable=True) # Celsius
|
||||
storage_humidity_max = Column(Float, nullable=True) # Percentage
|
||||
|
||||
# Shelf life (critical for finished products)
|
||||
shelf_life_days = Column(Integer, nullable=True)
|
||||
# Shelf life (critical for finished products) - default values only
|
||||
shelf_life_days = Column(Integer, nullable=True) # Default shelf life - actual per batch
|
||||
display_life_hours = Column(Integer, nullable=True) # How long can be displayed (for fresh products)
|
||||
best_before_hours = Column(Integer, nullable=True) # Hours until best before (for same-day products)
|
||||
storage_instructions = Column(Text, nullable=True)
|
||||
@@ -211,11 +204,6 @@ class Ingredient(Base):
|
||||
'reorder_point': self.reorder_point,
|
||||
'reorder_quantity': self.reorder_quantity,
|
||||
'max_stock_level': self.max_stock_level,
|
||||
'requires_refrigeration': self.requires_refrigeration,
|
||||
'requires_freezing': self.requires_freezing,
|
||||
'storage_temperature_min': self.storage_temperature_min,
|
||||
'storage_temperature_max': self.storage_temperature_max,
|
||||
'storage_humidity_max': self.storage_humidity_max,
|
||||
'shelf_life_days': self.shelf_life_days,
|
||||
'display_life_hours': self.display_life_hours,
|
||||
'best_before_hours': self.best_before_hours,
|
||||
@@ -248,7 +236,7 @@ class Stock(Base):
|
||||
supplier_batch_ref = Column(String(100), nullable=True)
|
||||
|
||||
# Production stage tracking
|
||||
production_stage = Column(String(20), nullable=False, default='raw_ingredient', index=True)
|
||||
production_stage = Column(SQLEnum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage', create_type=False), nullable=False, default='raw_ingredient', index=True)
|
||||
transformation_reference = Column(String(100), nullable=True, index=True) # Links related transformations
|
||||
|
||||
# Quantities
|
||||
@@ -275,6 +263,15 @@ class Stock(Base):
|
||||
warehouse_zone = Column(String(50), nullable=True)
|
||||
shelf_position = Column(String(50), nullable=True)
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration = Column(Boolean, default=False)
|
||||
requires_freezing = Column(Boolean, default=False)
|
||||
storage_temperature_min = Column(Float, nullable=True) # Celsius
|
||||
storage_temperature_max = Column(Float, nullable=True) # Celsius
|
||||
storage_humidity_max = Column(Float, nullable=True) # Percentage
|
||||
shelf_life_days = Column(Integer, nullable=True) # Batch-specific shelf life
|
||||
storage_instructions = Column(Text, nullable=True) # Batch-specific instructions
|
||||
|
||||
# Status
|
||||
is_available = Column(Boolean, default=True)
|
||||
is_expired = Column(Boolean, default=False, index=True)
|
||||
@@ -325,6 +322,13 @@ class Stock(Base):
|
||||
'storage_location': self.storage_location,
|
||||
'warehouse_zone': self.warehouse_zone,
|
||||
'shelf_position': self.shelf_position,
|
||||
'requires_refrigeration': self.requires_refrigeration,
|
||||
'requires_freezing': self.requires_freezing,
|
||||
'storage_temperature_min': self.storage_temperature_min,
|
||||
'storage_temperature_max': self.storage_temperature_max,
|
||||
'storage_humidity_max': self.storage_humidity_max,
|
||||
'shelf_life_days': self.shelf_life_days,
|
||||
'storage_instructions': self.storage_instructions,
|
||||
'is_available': self.is_available,
|
||||
'is_expired': self.is_expired,
|
||||
'quality_status': self.quality_status,
|
||||
@@ -343,7 +347,7 @@ class StockMovement(Base):
|
||||
stock_id = Column(UUID(as_uuid=True), ForeignKey('stock.id'), nullable=True, index=True)
|
||||
|
||||
# Movement details
|
||||
movement_type = Column(SQLEnum(StockMovementType), nullable=False, index=True)
|
||||
movement_type = Column(SQLEnum('PURCHASE', 'PRODUCTION_USE', 'ADJUSTMENT', 'WASTE', 'TRANSFER', 'RETURN', 'INITIAL_STOCK', name='stockmovementtype', create_type=False), nullable=False, index=True)
|
||||
quantity = Column(Float, nullable=False)
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
@@ -386,7 +390,7 @@ class StockMovement(Base):
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'stock_id': str(self.stock_id) if self.stock_id else None,
|
||||
'movement_type': self.movement_type.value if self.movement_type else None,
|
||||
'movement_type': self.movement_type if self.movement_type else None,
|
||||
'quantity': self.quantity,
|
||||
'unit_cost': float(self.unit_cost) if self.unit_cost else None,
|
||||
'total_cost': float(self.total_cost) if self.total_cost else None,
|
||||
@@ -415,8 +419,8 @@ class ProductTransformation(Base):
|
||||
target_ingredient_id = Column(UUID(as_uuid=True), ForeignKey('ingredients.id'), nullable=False)
|
||||
|
||||
# Stage transformation
|
||||
source_stage = Column(String(20), nullable=False)
|
||||
target_stage = Column(String(20), nullable=False)
|
||||
source_stage = Column(SQLEnum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage', create_type=False), nullable=False)
|
||||
target_stage = Column(SQLEnum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage', create_type=False), nullable=False)
|
||||
|
||||
# Quantities and conversion
|
||||
source_quantity = Column(Float, nullable=False) # Input quantity
|
||||
|
||||
@@ -395,7 +395,8 @@ class IngredientRepository(BaseRepository[Ingredient, IngredientCreate, Ingredie
|
||||
async def update_last_purchase_price(self, ingredient_id: UUID, price: float) -> Optional[Ingredient]:
|
||||
"""Update the last purchase price for an ingredient"""
|
||||
try:
|
||||
update_data = {'last_purchase_price': price}
|
||||
from app.schemas.inventory import IngredientUpdate
|
||||
update_data = IngredientUpdate(last_purchase_price=price)
|
||||
return await self.update(ingredient_id, update_data)
|
||||
|
||||
except Exception as e:
|
||||
@@ -442,4 +443,28 @@ class IngredientRepository(BaseRepository[Ingredient, IngredientCreate, Ingredie
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Failed to hard delete ingredient", error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
raise
|
||||
raise
|
||||
|
||||
async def get_active_tenants(self) -> List[UUID]:
|
||||
"""Get list of active tenant IDs from ingredients table"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(func.distinct(Ingredient.tenant_id))
|
||||
.where(Ingredient.is_active == True)
|
||||
)
|
||||
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row[0]
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
|
||||
logger.info("Retrieved active tenants from ingredients", count=len(tenant_ids))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants from ingredients", error=str(e))
|
||||
return []
|
||||
@@ -6,6 +6,7 @@ Stock Movement Repository using Repository Pattern
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
@@ -35,6 +36,16 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
|
||||
create_data = movement_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
create_data['created_by'] = created_by
|
||||
|
||||
# Ensure movement_type is properly converted to enum value
|
||||
if 'movement_type' in create_data:
|
||||
movement_type = create_data['movement_type']
|
||||
if hasattr(movement_type, 'value'):
|
||||
# It's an enum object, use its value
|
||||
create_data['movement_type'] = movement_type.value
|
||||
elif isinstance(movement_type, str):
|
||||
# It's already a string, ensure it's uppercase for database
|
||||
create_data['movement_type'] = movement_type.upper()
|
||||
|
||||
# Set movement date if not provided
|
||||
if not create_data.get('movement_date'):
|
||||
@@ -42,7 +53,9 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
|
||||
|
||||
# Calculate total cost if unit cost provided
|
||||
if create_data.get('unit_cost') and create_data.get('quantity'):
|
||||
create_data['total_cost'] = create_data['unit_cost'] * create_data['quantity']
|
||||
unit_cost = create_data['unit_cost']
|
||||
quantity = Decimal(str(create_data['quantity']))
|
||||
create_data['total_cost'] = unit_cost * quantity
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
@@ -50,7 +63,7 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
|
||||
"Created stock movement",
|
||||
movement_id=record.id,
|
||||
ingredient_id=record.ingredient_id,
|
||||
movement_type=record.movement_type.value if record.movement_type else None,
|
||||
movement_type=record.movement_type if record.movement_type else None,
|
||||
quantity=record.quantity,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
@@ -234,7 +247,7 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
|
||||
|
||||
summary = {}
|
||||
for row in result:
|
||||
movement_type = row.movement_type.value if row.movement_type else "unknown"
|
||||
movement_type = row.movement_type if row.movement_type else "unknown"
|
||||
summary[movement_type] = {
|
||||
'count': row.count,
|
||||
'total_quantity': float(row.total_quantity),
|
||||
@@ -417,4 +430,65 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
raise
|
||||
|
||||
async def create_automatic_waste_movement(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
stock_id: UUID,
|
||||
quantity: float,
|
||||
unit_cost: Optional[float],
|
||||
batch_number: Optional[str],
|
||||
expiration_date: datetime,
|
||||
created_by: Optional[UUID] = None
|
||||
) -> StockMovement:
|
||||
"""Create an automatic waste movement for expired batches"""
|
||||
try:
|
||||
# Calculate total cost
|
||||
total_cost = None
|
||||
if unit_cost and quantity:
|
||||
total_cost = Decimal(str(unit_cost)) * Decimal(str(quantity))
|
||||
|
||||
# Generate reference number
|
||||
reference_number = f"AUTO-EXPIRE-{batch_number or stock_id}"
|
||||
|
||||
# Create movement data
|
||||
movement_data = {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
'stock_id': stock_id,
|
||||
'movement_type': StockMovementType.WASTE.value,
|
||||
'quantity': quantity,
|
||||
'unit_cost': Decimal(str(unit_cost)) if unit_cost else None,
|
||||
'total_cost': total_cost,
|
||||
'quantity_before': quantity,
|
||||
'quantity_after': 0,
|
||||
'reference_number': reference_number,
|
||||
'reason_code': 'expired',
|
||||
'notes': f"Lote automáticamente marcado como caducado. Vencimiento: {expiration_date.strftime('%Y-%m-%d')}",
|
||||
'movement_date': datetime.now(),
|
||||
'created_by': created_by
|
||||
}
|
||||
|
||||
# Create the movement record
|
||||
movement = await self.create(movement_data)
|
||||
|
||||
logger.info("Created automatic waste movement for expired batch",
|
||||
movement_id=str(movement.id),
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id),
|
||||
stock_id=str(stock_id),
|
||||
quantity=quantity,
|
||||
batch_number=batch_number,
|
||||
reference_number=reference_number)
|
||||
|
||||
return movement
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create automatic waste movement",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id),
|
||||
stock_id=str(stock_id))
|
||||
raise
|
||||
@@ -6,6 +6,7 @@ Stock Repository using Repository Pattern
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
@@ -13,11 +14,12 @@ import structlog
|
||||
from app.models.inventory import Stock, Ingredient
|
||||
from app.schemas.inventory import StockCreate, StockUpdate
|
||||
from shared.database.repository import BaseRepository
|
||||
from shared.utils.batch_generator import BatchCountProvider
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate]):
|
||||
class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate], BatchCountProvider):
|
||||
"""Repository for stock operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
@@ -29,6 +31,20 @@ class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate]):
|
||||
# Prepare data
|
||||
create_data = stock_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
|
||||
# Ensure production_stage is properly converted to enum value
|
||||
if 'production_stage' in create_data:
|
||||
if hasattr(create_data['production_stage'], 'value'):
|
||||
create_data['production_stage'] = create_data['production_stage'].value
|
||||
elif isinstance(create_data['production_stage'], str):
|
||||
# If it's a string, ensure it's the correct enum value
|
||||
from app.models.inventory import ProductionStage
|
||||
try:
|
||||
enum_obj = ProductionStage[create_data['production_stage']]
|
||||
create_data['production_stage'] = enum_obj.value
|
||||
except KeyError:
|
||||
# If it's already the value, keep it as is
|
||||
pass
|
||||
|
||||
# Calculate available quantity
|
||||
available_qty = create_data['current_quantity'] - create_data.get('reserved_quantity', 0)
|
||||
@@ -36,7 +52,9 @@ class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate]):
|
||||
|
||||
# Calculate total cost if unit cost provided
|
||||
if create_data.get('unit_cost') and create_data.get('current_quantity'):
|
||||
create_data['total_cost'] = create_data['unit_cost'] * create_data['current_quantity']
|
||||
unit_cost = create_data['unit_cost']
|
||||
current_quantity = Decimal(str(create_data['current_quantity']))
|
||||
create_data['total_cost'] = unit_cost * current_quantity
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
@@ -524,4 +542,164 @@ class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate]):
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_daily_batch_count(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date_start: datetime,
|
||||
date_end: datetime,
|
||||
prefix: Optional[str] = None
|
||||
) -> int:
|
||||
"""Get the count of batches created today for the given tenant"""
|
||||
try:
|
||||
conditions = [
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.created_at >= date_start,
|
||||
Stock.created_at <= date_end
|
||||
]
|
||||
|
||||
if prefix:
|
||||
conditions.append(Stock.batch_number.like(f"{prefix}-%"))
|
||||
|
||||
stmt = select(func.count(Stock.id)).where(and_(*conditions))
|
||||
result = await self.session.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
logger.debug(
|
||||
"Retrieved daily batch count",
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix,
|
||||
count=count,
|
||||
date_start=date_start,
|
||||
date_end=date_end
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to get daily batch count",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_expired_batches_for_processing(self, tenant_id: UUID) -> List[Tuple[Stock, Ingredient]]:
|
||||
"""Get expired batches that haven't been processed yet (for automatic processing)"""
|
||||
try:
|
||||
current_date = datetime.now()
|
||||
|
||||
# Find expired batches that are still available and not yet marked as expired
|
||||
result = await self.session.execute(
|
||||
select(Stock, Ingredient)
|
||||
.join(Ingredient, Stock.ingredient_id == Ingredient.id)
|
||||
.where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
Stock.is_expired == False,
|
||||
Stock.current_quantity > 0,
|
||||
or_(
|
||||
and_(
|
||||
Stock.final_expiration_date.isnot(None),
|
||||
Stock.final_expiration_date <= current_date
|
||||
),
|
||||
and_(
|
||||
Stock.final_expiration_date.is_(None),
|
||||
Stock.expiration_date.isnot(None),
|
||||
Stock.expiration_date <= current_date
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(
|
||||
asc(func.coalesce(Stock.final_expiration_date, Stock.expiration_date))
|
||||
)
|
||||
)
|
||||
|
||||
expired_batches = result.all()
|
||||
logger.info("Found expired batches for processing",
|
||||
tenant_id=str(tenant_id),
|
||||
count=len(expired_batches))
|
||||
|
||||
return expired_batches
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expired batches for processing",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def mark_batch_as_expired(self, stock_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Mark a specific batch as expired and unavailable"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
update(Stock)
|
||||
.where(
|
||||
and_(
|
||||
Stock.id == stock_id,
|
||||
Stock.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
.values(
|
||||
is_expired=True,
|
||||
is_available=False,
|
||||
quality_status="expired",
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
)
|
||||
|
||||
if result.rowcount > 0:
|
||||
logger.info("Marked batch as expired",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return True
|
||||
else:
|
||||
logger.warning("No batch found to mark as expired",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark batch as expired",
|
||||
error=str(e),
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_stock_to_zero(self, stock_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Update stock quantities to zero after moving to waste"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
update(Stock)
|
||||
.where(
|
||||
and_(
|
||||
Stock.id == stock_id,
|
||||
Stock.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
.values(
|
||||
current_quantity=0,
|
||||
available_quantity=0,
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
)
|
||||
|
||||
if result.rowcount > 0:
|
||||
logger.info("Updated stock quantities to zero",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return True
|
||||
else:
|
||||
logger.warning("No stock found to update to zero",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update stock to zero",
|
||||
error=str(e),
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
@@ -54,16 +54,8 @@ class IngredientCreate(InventoryBaseSchema):
|
||||
reorder_quantity: float = Field(50.0, gt=0, description="Default reorder quantity")
|
||||
max_stock_level: Optional[float] = Field(None, gt=0, description="Maximum stock level")
|
||||
|
||||
# Storage requirements
|
||||
requires_refrigeration: bool = Field(False, description="Requires refrigeration")
|
||||
requires_freezing: bool = Field(False, description="Requires freezing")
|
||||
storage_temperature_min: Optional[float] = Field(None, description="Min storage temperature (°C)")
|
||||
storage_temperature_max: Optional[float] = Field(None, description="Max storage temperature (°C)")
|
||||
storage_humidity_max: Optional[float] = Field(None, ge=0, le=100, description="Max humidity (%)")
|
||||
|
||||
# Shelf life
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Shelf life in days")
|
||||
storage_instructions: Optional[str] = Field(None, description="Storage instructions")
|
||||
# Shelf life (default value only - actual per batch)
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Default shelf life in days")
|
||||
|
||||
# Properties
|
||||
is_perishable: bool = Field(False, description="Is perishable")
|
||||
@@ -106,16 +98,8 @@ class IngredientUpdate(InventoryBaseSchema):
|
||||
reorder_quantity: Optional[float] = Field(None, gt=0, description="Default reorder quantity")
|
||||
max_stock_level: Optional[float] = Field(None, gt=0, description="Maximum stock level")
|
||||
|
||||
# Storage requirements
|
||||
requires_refrigeration: Optional[bool] = Field(None, description="Requires refrigeration")
|
||||
requires_freezing: Optional[bool] = Field(None, description="Requires freezing")
|
||||
storage_temperature_min: Optional[float] = Field(None, description="Min storage temperature (°C)")
|
||||
storage_temperature_max: Optional[float] = Field(None, description="Max storage temperature (°C)")
|
||||
storage_humidity_max: Optional[float] = Field(None, ge=0, le=100, description="Max humidity (%)")
|
||||
|
||||
# Shelf life
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Shelf life in days")
|
||||
storage_instructions: Optional[str] = Field(None, description="Storage instructions")
|
||||
# Shelf life (default value only - actual per batch)
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Default shelf life in days")
|
||||
|
||||
# Properties
|
||||
is_active: Optional[bool] = Field(None, description="Is active")
|
||||
@@ -144,13 +128,7 @@ class IngredientResponse(InventoryBaseSchema):
|
||||
reorder_point: float
|
||||
reorder_quantity: float
|
||||
max_stock_level: Optional[float]
|
||||
requires_refrigeration: bool
|
||||
requires_freezing: bool
|
||||
storage_temperature_min: Optional[float]
|
||||
storage_temperature_max: Optional[float]
|
||||
storage_humidity_max: Optional[float]
|
||||
shelf_life_days: Optional[int]
|
||||
storage_instructions: Optional[str]
|
||||
shelf_life_days: Optional[int] # Default value only
|
||||
is_active: bool
|
||||
is_perishable: bool
|
||||
allergen_info: Optional[Dict[str, Any]]
|
||||
@@ -174,7 +152,7 @@ class StockCreate(InventoryBaseSchema):
|
||||
supplier_batch_ref: Optional[str] = Field(None, max_length=100, description="Supplier batch reference")
|
||||
|
||||
# Production stage tracking
|
||||
production_stage: ProductionStage = Field(ProductionStage.RAW_INGREDIENT, description="Production stage of the stock")
|
||||
production_stage: ProductionStage = Field(default=ProductionStage.RAW_INGREDIENT, description="Production stage of the stock")
|
||||
transformation_reference: Optional[str] = Field(None, max_length=100, description="Transformation reference ID")
|
||||
|
||||
current_quantity: float = Field(..., ge=0, description="Current quantity")
|
||||
@@ -194,6 +172,15 @@ class StockCreate(InventoryBaseSchema):
|
||||
|
||||
quality_status: str = Field("good", description="Quality status")
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration: bool = Field(False, description="Requires refrigeration")
|
||||
requires_freezing: bool = Field(False, description="Requires freezing")
|
||||
storage_temperature_min: Optional[float] = Field(None, description="Min storage temperature (°C)")
|
||||
storage_temperature_max: Optional[float] = Field(None, description="Max storage temperature (°C)")
|
||||
storage_humidity_max: Optional[float] = Field(None, ge=0, le=100, description="Max humidity (%)")
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Batch-specific shelf life in days")
|
||||
storage_instructions: Optional[str] = Field(None, description="Batch-specific storage instructions")
|
||||
|
||||
|
||||
class StockUpdate(InventoryBaseSchema):
|
||||
"""Schema for updating stock entries"""
|
||||
@@ -224,6 +211,15 @@ class StockUpdate(InventoryBaseSchema):
|
||||
is_available: Optional[bool] = Field(None, description="Is available")
|
||||
quality_status: Optional[str] = Field(None, description="Quality status")
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration: Optional[bool] = Field(None, description="Requires refrigeration")
|
||||
requires_freezing: Optional[bool] = Field(None, description="Requires freezing")
|
||||
storage_temperature_min: Optional[float] = Field(None, description="Min storage temperature (°C)")
|
||||
storage_temperature_max: Optional[float] = Field(None, description="Max storage temperature (°C)")
|
||||
storage_humidity_max: Optional[float] = Field(None, ge=0, le=100, description="Max humidity (%)")
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Batch-specific shelf life in days")
|
||||
storage_instructions: Optional[str] = Field(None, description="Batch-specific storage instructions")
|
||||
|
||||
|
||||
class StockResponse(InventoryBaseSchema):
|
||||
"""Schema for stock API responses"""
|
||||
@@ -258,6 +254,15 @@ class StockResponse(InventoryBaseSchema):
|
||||
is_available: bool
|
||||
is_expired: bool
|
||||
quality_status: str
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration: bool
|
||||
requires_freezing: bool
|
||||
storage_temperature_min: Optional[float]
|
||||
storage_temperature_max: Optional[float]
|
||||
storage_humidity_max: Optional[float]
|
||||
shelf_life_days: Optional[int]
|
||||
storage_instructions: Optional[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
@@ -6,15 +6,18 @@ Implements hybrid detection patterns for critical stock issues and optimization
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import uuid
|
||||
from typing import List, Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import structlog
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from sqlalchemy import text
|
||||
|
||||
from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
|
||||
from shared.alerts.templates import format_item_message
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -70,6 +73,15 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
misfire_grace_time=300,
|
||||
max_instances=1
|
||||
)
|
||||
|
||||
# Expired batch detection - daily at 6:00 AM (alerts and automated processing)
|
||||
self.scheduler.add_job(
|
||||
self.check_and_process_expired_batches,
|
||||
CronTrigger(hour=6, minute=0), # Daily at 6:00 AM
|
||||
id='expired_batch_processing',
|
||||
misfire_grace_time=1800, # 30 minute grace time
|
||||
max_instances=1
|
||||
)
|
||||
|
||||
logger.info("Inventory alert schedules configured",
|
||||
service=self.config.SERVICE_NAME)
|
||||
@@ -770,4 +782,193 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
logger.error("Error getting stock after order",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
return None
|
||||
return None
|
||||
|
||||
async def check_and_process_expired_batches(self):
|
||||
"""Daily check and automated processing of expired stock batches"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Use existing method to get active tenants from ingredients table
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
if not tenants:
|
||||
logger.info("No active tenants found")
|
||||
return
|
||||
|
||||
total_processed = 0
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
# Get expired batches for each tenant
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
stock_repo = StockRepository(session)
|
||||
expired_batches = await stock_repo.get_expired_batches_for_processing(tenant_id)
|
||||
|
||||
if expired_batches:
|
||||
processed_count = await self._process_expired_batches_for_tenant(tenant_id, expired_batches)
|
||||
total_processed += processed_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing expired batches for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
logger.info("Expired batch processing completed",
|
||||
total_processed=total_processed,
|
||||
tenants_processed=len(tenants))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Expired batch processing failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
|
||||
async def _process_expired_batches_for_tenant(self, tenant_id: UUID, batches: List[tuple]) -> int:
|
||||
"""Process expired batches for a specific tenant"""
|
||||
processed_count = 0
|
||||
processed_batches = []
|
||||
|
||||
try:
|
||||
for stock, ingredient in batches:
|
||||
try:
|
||||
# Process each batch individually with its own transaction
|
||||
await self._process_single_expired_batch(tenant_id, stock, ingredient)
|
||||
processed_count += 1
|
||||
processed_batches.append((stock, ingredient))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing individual expired batch",
|
||||
tenant_id=str(tenant_id),
|
||||
stock_id=str(stock.id),
|
||||
batch_number=stock.batch_number,
|
||||
error=str(e))
|
||||
|
||||
# Generate summary alert for the tenant if any batches were processed
|
||||
if processed_count > 0:
|
||||
await self._generate_expired_batch_summary_alert(tenant_id, processed_batches)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing expired batches for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
return processed_count
|
||||
|
||||
async def _process_single_expired_batch(self, tenant_id: UUID, stock, ingredient):
|
||||
"""Process a single expired batch: mark as expired, create waste movement, update stock"""
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
async with session.begin(): # Use transaction for consistency
|
||||
try:
|
||||
stock_repo = StockRepository(session)
|
||||
movement_repo = StockMovementRepository(session)
|
||||
|
||||
# Calculate effective expiration date
|
||||
effective_expiration_date = stock.final_expiration_date or stock.expiration_date
|
||||
|
||||
# 1. Mark the stock batch as expired
|
||||
await stock_repo.mark_batch_as_expired(stock.id, tenant_id)
|
||||
|
||||
# 2. Create waste stock movement
|
||||
await movement_repo.create_automatic_waste_movement(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=stock.ingredient_id,
|
||||
stock_id=stock.id,
|
||||
quantity=stock.current_quantity,
|
||||
unit_cost=float(stock.unit_cost) if stock.unit_cost else None,
|
||||
batch_number=stock.batch_number,
|
||||
expiration_date=effective_expiration_date,
|
||||
created_by=None # Automatic system operation
|
||||
)
|
||||
|
||||
# 3. Update the stock quantity to 0 (moved to waste)
|
||||
await stock_repo.update_stock_to_zero(stock.id, tenant_id)
|
||||
|
||||
# Calculate days expired
|
||||
days_expired = (datetime.now().date() - effective_expiration_date.date()).days if effective_expiration_date else 0
|
||||
|
||||
logger.info("Expired batch processed successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
stock_id=str(stock.id),
|
||||
ingredient_name=ingredient.name,
|
||||
batch_number=stock.batch_number,
|
||||
quantity_wasted=stock.current_quantity,
|
||||
days_expired=days_expired)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in expired batch transaction",
|
||||
stock_id=str(stock.id),
|
||||
error=str(e))
|
||||
raise # Re-raise to trigger rollback
|
||||
|
||||
async def _generate_expired_batch_summary_alert(self, tenant_id: UUID, processed_batches: List[tuple]):
|
||||
"""Generate summary alert for automatically processed expired batches"""
|
||||
try:
|
||||
total_batches = len(processed_batches)
|
||||
total_quantity = sum(float(stock.current_quantity) for stock, ingredient in processed_batches)
|
||||
|
||||
# Get the most affected ingredients (top 3)
|
||||
ingredient_summary = {}
|
||||
for stock, ingredient in processed_batches:
|
||||
ingredient_name = ingredient.name
|
||||
if ingredient_name not in ingredient_summary:
|
||||
ingredient_summary[ingredient_name] = {
|
||||
'quantity': 0,
|
||||
'batches': 0,
|
||||
'unit': ingredient.unit_of_measure.value if ingredient.unit_of_measure else 'kg'
|
||||
}
|
||||
ingredient_summary[ingredient_name]['quantity'] += float(stock.current_quantity)
|
||||
ingredient_summary[ingredient_name]['batches'] += 1
|
||||
|
||||
# Sort by quantity and get top 3
|
||||
top_ingredients = sorted(ingredient_summary.items(),
|
||||
key=lambda x: x[1]['quantity'],
|
||||
reverse=True)[:3]
|
||||
|
||||
# Build ingredient list for message
|
||||
ingredient_list = []
|
||||
for name, info in top_ingredients:
|
||||
ingredient_list.append(f"{name} ({info['quantity']:.1f}{info['unit']}, {info['batches']} lote{'s' if info['batches'] > 1 else ''})")
|
||||
|
||||
remaining_count = total_batches - sum(info['batches'] for _, info in top_ingredients)
|
||||
if remaining_count > 0:
|
||||
ingredient_list.append(f"y {remaining_count} lote{'s' if remaining_count > 1 else ''} más")
|
||||
|
||||
# Create alert message
|
||||
title = f"🗑️ Lotes Caducados Procesados Automáticamente"
|
||||
message = (
|
||||
f"Se han procesado automáticamente {total_batches} lote{'s' if total_batches > 1 else ''} "
|
||||
f"caducado{'s' if total_batches > 1 else ''} ({total_quantity:.1f}kg total) y se ha{'n' if total_batches > 1 else ''} "
|
||||
f"movido automáticamente a desperdicio:\n\n"
|
||||
f"• {chr(10).join(ingredient_list)}\n\n"
|
||||
f"Los lotes han sido marcados como no disponibles y se han generado los movimientos de desperdicio correspondientes."
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, {
|
||||
'type': 'expired_batches_auto_processed',
|
||||
'severity': 'medium',
|
||||
'title': title,
|
||||
'message': message,
|
||||
'actions': [
|
||||
'Revisar movimientos de desperdicio',
|
||||
'Analizar causas de caducidad',
|
||||
'Ajustar niveles de stock',
|
||||
'Revisar rotación de inventario'
|
||||
],
|
||||
'metadata': {
|
||||
'total_batches_processed': total_batches,
|
||||
'total_quantity_wasted': total_quantity,
|
||||
'processing_date': datetime.now(timezone.utc).isoformat(),
|
||||
'affected_ingredients': [
|
||||
{
|
||||
'name': name,
|
||||
'quantity_wasted': info['quantity'],
|
||||
'batches_count': info['batches'],
|
||||
'unit': info['unit']
|
||||
} for name, info in ingredient_summary.items()
|
||||
],
|
||||
'automation_source': 'daily_expired_batch_check'
|
||||
}
|
||||
}, item_type='alert')
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating expired batch summary alert",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
@@ -20,6 +20,7 @@ from app.schemas.inventory import (
|
||||
)
|
||||
from app.core.database import get_db_transaction
|
||||
from shared.database.exceptions import DatabaseError
|
||||
from shared.utils.batch_generator import BatchNumberGenerator, create_fallback_batch_number
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -237,7 +238,21 @@ class InventoryService:
|
||||
ingredient = await ingredient_repo.get_by_id(UUID(stock_data.ingredient_id))
|
||||
if not ingredient or ingredient.tenant_id != tenant_id:
|
||||
raise ValueError("Ingredient not found")
|
||||
|
||||
|
||||
# Generate batch number if not provided
|
||||
if not stock_data.batch_number:
|
||||
try:
|
||||
batch_generator = BatchNumberGenerator(stock_repo)
|
||||
stock_data.batch_number = await batch_generator.generate_batch_number(
|
||||
tenant_id=str(tenant_id),
|
||||
prefix="INV"
|
||||
)
|
||||
logger.info("Generated batch number", batch_number=stock_data.batch_number)
|
||||
except Exception as e:
|
||||
# Fallback to a simple batch number if generation fails
|
||||
stock_data.batch_number = create_fallback_batch_number("INV")
|
||||
logger.warning("Used fallback batch number", batch_number=stock_data.batch_number, error=str(e))
|
||||
|
||||
# Create stock entry
|
||||
stock = await stock_repo.create_stock_entry(stock_data, tenant_id)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
"""Add production stage enum and columns
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2025-01-17 15:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003'
|
||||
down_revision = '002'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create ProductionStage enum type
|
||||
op.execute("""
|
||||
CREATE TYPE productionstage AS ENUM (
|
||||
'raw_ingredient', 'par_baked', 'fully_baked',
|
||||
'prepared_dough', 'frozen_product'
|
||||
);
|
||||
""")
|
||||
|
||||
# Add production_stage column to stock table
|
||||
op.add_column('stock', sa.Column('production_stage',
|
||||
sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'),
|
||||
nullable=False, server_default='raw_ingredient'))
|
||||
|
||||
# Add transformation_reference column to stock table
|
||||
op.add_column('stock', sa.Column('transformation_reference', sa.String(100), nullable=True))
|
||||
|
||||
# Add stage-specific expiration tracking columns
|
||||
op.add_column('stock', sa.Column('original_expiration_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('stock', sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('stock', sa.Column('final_expiration_date', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
# Create product_transformations table
|
||||
op.create_table(
|
||||
'product_transformations',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('transformation_reference', sa.String(100), nullable=False),
|
||||
sa.Column('source_ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('target_ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('source_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
|
||||
sa.Column('target_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
|
||||
sa.Column('source_quantity', sa.Float(), nullable=False),
|
||||
sa.Column('target_quantity', sa.Float(), nullable=False),
|
||||
sa.Column('conversion_ratio', sa.Float(), nullable=False, server_default='1.0'),
|
||||
sa.Column('expiration_calculation_method', sa.String(50), nullable=False, server_default='days_from_transformation'),
|
||||
sa.Column('expiration_days_offset', sa.Integer(), nullable=True),
|
||||
sa.Column('transformation_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('process_notes', sa.Text(), nullable=True),
|
||||
sa.Column('performed_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('source_batch_numbers', sa.Text(), nullable=True),
|
||||
sa.Column('target_batch_number', sa.String(100), nullable=True),
|
||||
sa.Column('is_completed', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_reversed', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['source_ingredient_id'], ['ingredients.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_ingredient_id'], ['ingredients.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Add new indexes for enhanced functionality
|
||||
op.create_index('idx_stock_production_stage', 'stock', ['tenant_id', 'production_stage', 'is_available'])
|
||||
op.create_index('idx_stock_transformation', 'stock', ['tenant_id', 'transformation_reference'])
|
||||
op.create_index('idx_stock_final_expiration', 'stock', ['tenant_id', 'final_expiration_date', 'is_available'])
|
||||
|
||||
# Create indexes for product_transformations table
|
||||
op.create_index('idx_transformations_tenant_date', 'product_transformations', ['tenant_id', 'transformation_date'])
|
||||
op.create_index('idx_transformations_reference', 'product_transformations', ['transformation_reference'])
|
||||
op.create_index('idx_transformations_source', 'product_transformations', ['tenant_id', 'source_ingredient_id'])
|
||||
op.create_index('idx_transformations_target', 'product_transformations', ['tenant_id', 'target_ingredient_id'])
|
||||
op.create_index('idx_transformations_stages', 'product_transformations', ['source_stage', 'target_stage'])
|
||||
|
||||
# Update existing stockmovementtype enum to include TRANSFORMATION
|
||||
op.execute("ALTER TYPE stockmovementtype ADD VALUE 'transformation';")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes for product_transformations
|
||||
op.drop_index('idx_transformations_stages', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_target', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_source', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_reference', table_name='product_transformations')
|
||||
op.drop_index('idx_transformations_tenant_date', table_name='product_transformations')
|
||||
|
||||
# Drop new stock indexes
|
||||
op.drop_index('idx_stock_final_expiration', table_name='stock')
|
||||
op.drop_index('idx_stock_transformation', table_name='stock')
|
||||
op.drop_index('idx_stock_production_stage', table_name='stock')
|
||||
|
||||
# Drop product_transformations table
|
||||
op.drop_table('product_transformations')
|
||||
|
||||
# Remove new columns from stock table
|
||||
op.drop_column('stock', 'final_expiration_date')
|
||||
op.drop_column('stock', 'transformation_date')
|
||||
op.drop_column('stock', 'original_expiration_date')
|
||||
op.drop_column('stock', 'transformation_reference')
|
||||
op.drop_column('stock', 'production_stage')
|
||||
|
||||
# Drop ProductionStage enum type
|
||||
op.execute("DROP TYPE productionstage;")
|
||||
|
||||
# Note: Cannot easily remove 'transformation' from existing enum in PostgreSQL
|
||||
# This would require recreating the enum and updating all references
|
||||
# For now, we leave the enum value as it won't cause issues
|
||||
@@ -0,0 +1,104 @@
|
||||
"""Move storage configuration from ingredient to batch level
|
||||
|
||||
Revision ID: 004_move_storage_config_to_batch
|
||||
Revises: 003_add_production_stage_enum
|
||||
Create Date: 2025-01-17 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004_move_storage_config_to_batch'
|
||||
down_revision = '003_add_production_stage_enum'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Move storage configuration from ingredients to stock batches"""
|
||||
|
||||
# Add batch-specific storage columns to stock table
|
||||
op.add_column('stock', sa.Column('requires_refrigeration', sa.Boolean(), default=False))
|
||||
op.add_column('stock', sa.Column('requires_freezing', sa.Boolean(), default=False))
|
||||
op.add_column('stock', sa.Column('storage_temperature_min', sa.Float(), nullable=True))
|
||||
op.add_column('stock', sa.Column('storage_temperature_max', sa.Float(), nullable=True))
|
||||
op.add_column('stock', sa.Column('storage_humidity_max', sa.Float(), nullable=True))
|
||||
op.add_column('stock', sa.Column('shelf_life_days', sa.Integer(), nullable=True))
|
||||
op.add_column('stock', sa.Column('storage_instructions', sa.Text(), nullable=True))
|
||||
|
||||
# Migrate existing data from ingredients to stock batches
|
||||
# This will copy the ingredient-level storage config to all existing stock batches
|
||||
op.execute("""
|
||||
UPDATE stock
|
||||
SET
|
||||
requires_refrigeration = i.requires_refrigeration,
|
||||
requires_freezing = i.requires_freezing,
|
||||
storage_temperature_min = i.storage_temperature_min,
|
||||
storage_temperature_max = i.storage_temperature_max,
|
||||
storage_humidity_max = i.storage_humidity_max,
|
||||
shelf_life_days = i.shelf_life_days,
|
||||
storage_instructions = i.storage_instructions
|
||||
FROM ingredients i
|
||||
WHERE stock.ingredient_id = i.id
|
||||
""")
|
||||
|
||||
# Remove storage configuration columns from ingredients table
|
||||
# Keep only shelf_life_days as default value
|
||||
op.drop_column('ingredients', 'requires_refrigeration')
|
||||
op.drop_column('ingredients', 'requires_freezing')
|
||||
op.drop_column('ingredients', 'storage_temperature_min')
|
||||
op.drop_column('ingredients', 'storage_temperature_max')
|
||||
op.drop_column('ingredients', 'storage_humidity_max')
|
||||
op.drop_column('ingredients', 'storage_instructions')
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Revert storage configuration back to ingredient level"""
|
||||
|
||||
# Add storage configuration columns back to ingredients table
|
||||
op.add_column('ingredients', sa.Column('requires_refrigeration', sa.Boolean(), default=False))
|
||||
op.add_column('ingredients', sa.Column('requires_freezing', sa.Boolean(), default=False))
|
||||
op.add_column('ingredients', sa.Column('storage_temperature_min', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('storage_temperature_max', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('storage_humidity_max', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('storage_instructions', sa.Text(), nullable=True))
|
||||
|
||||
# Migrate data back from stock to ingredients (use most common values per ingredient)
|
||||
op.execute("""
|
||||
UPDATE ingredients
|
||||
SET
|
||||
requires_refrigeration = COALESCE(
|
||||
(SELECT bool_or(s.requires_refrigeration) FROM stock s WHERE s.ingredient_id = ingredients.id),
|
||||
false
|
||||
),
|
||||
requires_freezing = COALESCE(
|
||||
(SELECT bool_or(s.requires_freezing) FROM stock s WHERE s.ingredient_id = ingredients.id),
|
||||
false
|
||||
),
|
||||
storage_temperature_min = (
|
||||
SELECT MIN(s.storage_temperature_min) FROM stock s WHERE s.ingredient_id = ingredients.id
|
||||
),
|
||||
storage_temperature_max = (
|
||||
SELECT MAX(s.storage_temperature_max) FROM stock s WHERE s.ingredient_id = ingredients.id
|
||||
),
|
||||
storage_humidity_max = (
|
||||
SELECT MAX(s.storage_humidity_max) FROM stock s WHERE s.ingredient_id = ingredients.id
|
||||
),
|
||||
storage_instructions = (
|
||||
SELECT s.storage_instructions FROM stock s
|
||||
WHERE s.ingredient_id = ingredients.id
|
||||
AND s.storage_instructions IS NOT NULL
|
||||
LIMIT 1
|
||||
)
|
||||
""")
|
||||
|
||||
# Remove batch-specific storage columns from stock table
|
||||
op.drop_column('stock', 'requires_refrigeration')
|
||||
op.drop_column('stock', 'requires_freezing')
|
||||
op.drop_column('stock', 'storage_temperature_min')
|
||||
op.drop_column('stock', 'storage_temperature_max')
|
||||
op.drop_column('stock', 'storage_humidity_max')
|
||||
op.drop_column('stock', 'shelf_life_days')
|
||||
op.drop_column('stock', 'storage_instructions')
|
||||
@@ -14,11 +14,12 @@ from .base import ProductionBaseRepository
|
||||
from app.models.production import ProductionBatch, ProductionStatus, ProductionPriority
|
||||
from shared.database.exceptions import DatabaseError, ValidationError
|
||||
from shared.database.transactions import transactional
|
||||
from shared.utils.batch_generator import BatchCountProvider, BatchNumberGenerator, create_fallback_batch_number
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProductionBatchRepository(ProductionBaseRepository):
|
||||
class ProductionBatchRepository(ProductionBaseRepository, BatchCountProvider):
|
||||
"""Repository for production batch operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession, cache_ttl: Optional[int] = 300):
|
||||
@@ -41,9 +42,17 @@ class ProductionBatchRepository(ProductionBaseRepository):
|
||||
|
||||
# Generate batch number if not provided
|
||||
if "batch_number" not in batch_data or not batch_data["batch_number"]:
|
||||
batch_data["batch_number"] = await self._generate_batch_number(
|
||||
batch_data["tenant_id"]
|
||||
)
|
||||
try:
|
||||
batch_generator = BatchNumberGenerator(self)
|
||||
batch_data["batch_number"] = await batch_generator.generate_batch_number(
|
||||
tenant_id=batch_data["tenant_id"],
|
||||
prefix="PROD"
|
||||
)
|
||||
logger.info("Generated production batch number", batch_number=batch_data["batch_number"])
|
||||
except Exception as e:
|
||||
# Fallback to a simple batch number if generation fails
|
||||
batch_data["batch_number"] = create_fallback_batch_number("PROD")
|
||||
logger.warning("Used fallback batch number", batch_number=batch_data["batch_number"], error=str(e))
|
||||
|
||||
# Set default values
|
||||
if "status" not in batch_data:
|
||||
@@ -314,33 +323,57 @@ class ProductionBatchRepository(ProductionBaseRepository):
|
||||
logger.error("Error fetching urgent batches", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch urgent batches: {str(e)}")
|
||||
|
||||
async def _generate_batch_number(self, tenant_id: str) -> str:
|
||||
"""Generate a unique batch number"""
|
||||
async def get_daily_batch_count(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date_start: datetime,
|
||||
date_end: datetime,
|
||||
prefix: Optional[str] = None
|
||||
) -> int:
|
||||
"""Get the count of production batches created today for the given tenant"""
|
||||
try:
|
||||
# Get current date for prefix
|
||||
today = datetime.utcnow().date()
|
||||
date_prefix = today.strftime("%Y%m%d")
|
||||
|
||||
# Count batches created today
|
||||
today_start = datetime.combine(today, datetime.min.time())
|
||||
today_end = datetime.combine(today, datetime.max.time())
|
||||
|
||||
daily_batches = await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"created_at__gte": today_start,
|
||||
"created_at__lte": today_end
|
||||
}
|
||||
conditions = {
|
||||
"tenant_id": tenant_id,
|
||||
"created_at__gte": date_start,
|
||||
"created_at__lte": date_end
|
||||
}
|
||||
|
||||
if prefix:
|
||||
# Filter by batch numbers that start with the given prefix
|
||||
filters_list = [
|
||||
and_(
|
||||
ProductionBatch.tenant_id == tenant_id,
|
||||
ProductionBatch.created_at >= date_start,
|
||||
ProductionBatch.created_at <= date_end,
|
||||
ProductionBatch.batch_number.like(f"{prefix}-%")
|
||||
)
|
||||
]
|
||||
result = await self.session.execute(
|
||||
select(func.count(ProductionBatch.id)).where(and_(*filters_list))
|
||||
)
|
||||
else:
|
||||
batches = await self.get_multi(filters=conditions)
|
||||
result_count = len(batches)
|
||||
return result_count
|
||||
|
||||
count = result.scalar() or 0
|
||||
|
||||
logger.debug(
|
||||
"Retrieved daily production batch count",
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix,
|
||||
count=count,
|
||||
date_start=date_start,
|
||||
date_end=date_end
|
||||
)
|
||||
|
||||
# Generate sequential number
|
||||
sequence = len(daily_batches) + 1
|
||||
batch_number = f"PROD-{date_prefix}-{sequence:03d}"
|
||||
|
||||
return batch_number
|
||||
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating batch number", error=str(e))
|
||||
# Fallback to timestamp-based number
|
||||
timestamp = int(datetime.utcnow().timestamp())
|
||||
return f"PROD-{timestamp}"
|
||||
logger.error(
|
||||
"Failed to get daily production batch count",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix
|
||||
)
|
||||
raise
|
||||
Reference in New Issue
Block a user