Improve the frontend and fix TODOs
This commit is contained in:
@@ -5,7 +5,7 @@ Service-to-service endpoint for cloning inventory data with date adjustment
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
@@ -18,7 +18,7 @@ from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.inventory import Ingredient, Stock
|
||||
from app.models.inventory import Ingredient, Stock, StockMovement
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -83,15 +83,49 @@ async def clone_demo_data(
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Check if data already exists for this virtual tenant (idempotency)
|
||||
existing_check = await db.execute(
|
||||
select(Ingredient).where(Ingredient.tenant_id == virtual_uuid).limit(1)
|
||||
)
|
||||
existing_ingredient = existing_check.scalars().first()
|
||||
|
||||
if existing_ingredient:
|
||||
logger.warning(
|
||||
"Data already exists for virtual tenant - cleaning before re-clone",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
base_tenant_id=base_tenant_id
|
||||
)
|
||||
# Clean up existing data first to ensure fresh clone
|
||||
from sqlalchemy import delete
|
||||
|
||||
await db.execute(
|
||||
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"Existing data cleaned, proceeding with fresh clone",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"ingredients": 0,
|
||||
"stock_batches": 0,
|
||||
"stock_movements": 0,
|
||||
"alerts_generated": 0
|
||||
}
|
||||
|
||||
# Mapping from base ingredient ID to virtual ingredient ID
|
||||
ingredient_id_mapping = {}
|
||||
# Mapping from base stock ID to virtual stock ID
|
||||
stock_id_mapping = {}
|
||||
|
||||
# Clone Ingredients
|
||||
result = await db.execute(
|
||||
@@ -213,9 +247,11 @@ async def clone_demo_data(
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_created_at
|
||||
|
||||
# Create new stock batch
|
||||
# Create new stock batch with new ID
|
||||
new_stock_id = uuid.uuid4()
|
||||
|
||||
new_stock = Stock(
|
||||
id=uuid.uuid4(),
|
||||
id=new_stock_id,
|
||||
tenant_id=virtual_uuid,
|
||||
ingredient_id=new_ingredient_id,
|
||||
supplier_id=stock.supplier_id,
|
||||
@@ -250,6 +286,72 @@ async def clone_demo_data(
|
||||
db.add(new_stock)
|
||||
stats["stock_batches"] += 1
|
||||
|
||||
# Store mapping for movement cloning
|
||||
stock_id_mapping[stock.id] = new_stock_id
|
||||
|
||||
await db.flush() # Ensure stock is persisted before movements
|
||||
|
||||
# Clone Stock Movements with date adjustment
|
||||
result = await db.execute(
|
||||
select(StockMovement).where(StockMovement.tenant_id == base_uuid)
|
||||
)
|
||||
base_movements = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found stock movements to clone",
|
||||
count=len(base_movements),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for movement in base_movements:
|
||||
# Map ingredient ID and stock ID
|
||||
new_ingredient_id = ingredient_id_mapping.get(movement.ingredient_id)
|
||||
new_stock_id = stock_id_mapping.get(movement.stock_id) if movement.stock_id else None
|
||||
|
||||
if not new_ingredient_id:
|
||||
logger.warning(
|
||||
"Movement references non-existent ingredient, skipping",
|
||||
movement_id=str(movement.id),
|
||||
ingredient_id=str(movement.ingredient_id)
|
||||
)
|
||||
continue
|
||||
|
||||
# Adjust movement date relative to session creation
|
||||
adjusted_movement_date = adjust_date_for_demo(
|
||||
movement.movement_date,
|
||||
session_created_at,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_created_at
|
||||
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
movement.created_at,
|
||||
session_created_at,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_created_at
|
||||
|
||||
# Create new stock movement
|
||||
new_movement = StockMovement(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
ingredient_id=new_ingredient_id,
|
||||
stock_id=new_stock_id,
|
||||
movement_type=movement.movement_type,
|
||||
quantity=movement.quantity,
|
||||
unit_cost=movement.unit_cost,
|
||||
total_cost=movement.total_cost,
|
||||
quantity_before=movement.quantity_before,
|
||||
quantity_after=movement.quantity_after,
|
||||
reference_number=movement.reference_number,
|
||||
supplier_id=movement.supplier_id,
|
||||
notes=movement.notes,
|
||||
reason_code=movement.reason_code,
|
||||
movement_date=adjusted_movement_date,
|
||||
created_at=adjusted_created_at,
|
||||
created_by=movement.created_by
|
||||
)
|
||||
db.add(new_movement)
|
||||
stats["stock_movements"] += 1
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
@@ -312,3 +414,104 @@ async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Delete all inventory data for a virtual demo tenant
|
||||
|
||||
Called by demo session cleanup service to remove ephemeral data
|
||||
when demo sessions expire or are destroyed.
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant UUID to delete
|
||||
|
||||
Returns:
|
||||
Deletion status and count of records deleted
|
||||
"""
|
||||
from sqlalchemy import delete
|
||||
|
||||
logger.info(
|
||||
"Deleting inventory data for virtual tenant",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Count records before deletion for reporting
|
||||
stock_count = await db.scalar(
|
||||
select(func.count(Stock.id)).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
ingredient_count = await db.scalar(
|
||||
select(func.count(Ingredient.id)).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
movement_count = await db.scalar(
|
||||
select(func.count(StockMovement.id)).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# Delete in correct order to respect foreign key constraints
|
||||
# 1. Delete StockMovements (references Stock)
|
||||
await db.execute(
|
||||
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# 2. Delete Stock batches (references Ingredient)
|
||||
await db.execute(
|
||||
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# 3. Delete Ingredients
|
||||
await db.execute(
|
||||
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data deleted successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
stocks_deleted=stock_count,
|
||||
ingredients_deleted=ingredient_count,
|
||||
movements_deleted=movement_count,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"stock_batches": stock_count,
|
||||
"ingredients": ingredient_count,
|
||||
"stock_movements": movement_count,
|
||||
"total": stock_count + ingredient_count + movement_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete inventory data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
await db.rollback()
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete inventory data: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -100,6 +100,106 @@ async def get_stock(
|
||||
)
|
||||
|
||||
|
||||
# ===== STOCK MOVEMENTS ROUTES (must come before stock/{stock_id} route) =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("stock/movements"),
|
||||
response_model=List[StockMovementResponse]
|
||||
)
|
||||
async def get_stock_movements(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
ingredient_id: Optional[str] = Query(None, description="Filter by ingredient"),
|
||||
movement_type: Optional[str] = Query(None, description="Filter by movement type"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock movements with filtering"""
|
||||
logger.info("Stock movements endpoint called",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=ingredient_id,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
movement_type=movement_type)
|
||||
|
||||
# Validate and convert ingredient_id if provided
|
||||
ingredient_uuid = None
|
||||
if ingredient_id:
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_id)
|
||||
logger.info("Ingredient ID validated", ingredient_id=str(ingredient_uuid))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.error("Invalid ingredient_id format",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid ingredient_id format: {ingredient_id}. Must be a valid UUID."
|
||||
)
|
||||
|
||||
try:
|
||||
service = InventoryService()
|
||||
movements = await service.get_stock_movements(
|
||||
tenant_id, skip, limit, ingredient_uuid, movement_type
|
||||
)
|
||||
logger.info("Successfully retrieved stock movements",
|
||||
count=len(movements),
|
||||
tenant_id=str(tenant_id))
|
||||
return movements
|
||||
except ValueError as e:
|
||||
logger.error("Validation error in stock movements",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=ingredient_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock movements",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=ingredient_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get stock movements: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("stock/movements"),
|
||||
response_model=StockMovementResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_stock_movement(
|
||||
movement_data: StockMovementCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create stock movement record"""
|
||||
try:
|
||||
user_id = get_current_user_id(current_user)
|
||||
service = InventoryService()
|
||||
movement = await service.create_stock_movement(movement_data, tenant_id, user_id)
|
||||
return movement
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create stock movement"
|
||||
)
|
||||
|
||||
|
||||
# ===== STOCK DETAIL ROUTES (must come after stock/movements routes) =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("stock", "stock_id"),
|
||||
response_model=StockResponse
|
||||
@@ -199,68 +299,3 @@ async def delete_stock(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete stock entry"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("stock/movements"),
|
||||
response_model=List[StockMovementResponse]
|
||||
)
|
||||
async def get_stock_movements(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient"),
|
||||
movement_type: Optional[str] = Query(None, description="Filter by movement type"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock movements with filtering"""
|
||||
logger.info("API endpoint reached!",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
skip=skip,
|
||||
limit=limit)
|
||||
|
||||
try:
|
||||
service = InventoryService()
|
||||
movements = await service.get_stock_movements(
|
||||
tenant_id, skip, limit, ingredient_id, movement_type
|
||||
)
|
||||
logger.info("Returning movements", count=len(movements))
|
||||
return movements
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock movements", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get stock movements"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("stock/movements"),
|
||||
response_model=StockMovementResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_stock_movement(
|
||||
movement_data: StockMovementCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create stock movement record"""
|
||||
try:
|
||||
user_id = get_current_user_id(current_user)
|
||||
service = InventoryService()
|
||||
movement = await service.create_stock_movement(movement_data, tenant_id, user_id)
|
||||
return movement
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create stock movement"
|
||||
)
|
||||
|
||||
@@ -77,12 +77,12 @@ class StockMovementType(enum.Enum):
|
||||
"""Types of inventory movements"""
|
||||
PURCHASE = "PURCHASE"
|
||||
PRODUCTION_USE = "PRODUCTION_USE"
|
||||
TRANSFORMATION = "TRANSFORMATION" # Converting between production stages
|
||||
ADJUSTMENT = "ADJUSTMENT"
|
||||
WASTE = "WASTE"
|
||||
TRANSFER = "TRANSFER"
|
||||
RETURN = "RETURN"
|
||||
INITIAL_STOCK = "INITIAL_STOCK"
|
||||
TRANSFORMATION = "TRANSFORMATION" # Converting between production stages
|
||||
|
||||
|
||||
class Ingredient(Base):
|
||||
@@ -348,7 +348,7 @@ class StockMovement(Base):
|
||||
stock_id = Column(UUID(as_uuid=True), ForeignKey('stock.id'), nullable=True, index=True)
|
||||
|
||||
# Movement details
|
||||
movement_type = Column(SQLEnum('PURCHASE', 'PRODUCTION_USE', 'ADJUSTMENT', 'WASTE', 'TRANSFER', 'RETURN', 'INITIAL_STOCK', name='stockmovementtype', create_type=False), nullable=False, index=True)
|
||||
movement_type = Column(SQLEnum(StockMovementType, name='stockmovementtype', create_type=False), nullable=False, index=True)
|
||||
quantity = Column(Float, nullable=False)
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
@@ -553,4 +553,4 @@ class StockAlert(Base):
|
||||
'resolution_notes': self.resolution_notes,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import List, Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc, update
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc, update, exists
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
@@ -400,12 +400,33 @@ class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate], BatchCoun
|
||||
)
|
||||
expiring_count = expiring_result.scalar() or 0
|
||||
|
||||
# Count out of stock items (ingredients with no available stock)
|
||||
out_of_stock_result = await self.session.execute(
|
||||
select(func.count(Ingredient.id)).where(
|
||||
and_(
|
||||
Ingredient.tenant_id == tenant_id,
|
||||
~exists(
|
||||
select(1).where(
|
||||
and_(
|
||||
Stock.ingredient_id == Ingredient.id,
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
Stock.available_quantity > 0
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
out_of_stock_count = out_of_stock_result.scalar() or 0
|
||||
|
||||
return {
|
||||
'total_stock_items': basic_summary.total_stock_items or 0,
|
||||
'total_stock_value': float(basic_summary.total_stock_value) if basic_summary.total_stock_value else 0.0,
|
||||
'unique_ingredients': basic_summary.unique_ingredients or 0,
|
||||
'expired_items': expired_count,
|
||||
'expiring_soon_items': expiring_count
|
||||
'expiring_soon_items': expiring_count,
|
||||
'out_of_stock_count': out_of_stock_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -109,6 +109,16 @@ class GrantProgramEligibility(BaseModel):
|
||||
eligible: bool = Field(description="Whether eligible for this grant")
|
||||
confidence: str = Field(description="Confidence level: high, medium, low")
|
||||
requirements_met: bool = Field(description="Whether requirements are met")
|
||||
funding_eur: float = Field(description="Available funding in euros")
|
||||
deadline: str = Field(description="Application deadline")
|
||||
program_type: str = Field(description="Type: grant, loan, or certification")
|
||||
sector_specific: Optional[str] = Field(None, description="Sector if specific: bakery, retail, etc.")
|
||||
|
||||
|
||||
class SpainCompliance(BaseModel):
|
||||
"""Spain-specific legal compliance"""
|
||||
law_1_2025: bool = Field(description="Compliance with Spanish Law 1/2025 on food waste")
|
||||
circular_economy_strategy: bool = Field(description="Aligned with Spanish Circular Economy Strategy")
|
||||
|
||||
|
||||
class GrantReadiness(BaseModel):
|
||||
@@ -116,6 +126,7 @@ class GrantReadiness(BaseModel):
|
||||
overall_readiness_percentage: float = Field(description="Overall readiness percentage")
|
||||
grant_programs: Dict[str, GrantProgramEligibility] = Field(description="Eligibility by program")
|
||||
recommended_applications: List[str] = Field(description="Recommended grant programs to apply for")
|
||||
spain_compliance: SpainCompliance = Field(description="Spain-specific compliance status")
|
||||
|
||||
|
||||
class SustainabilityMetrics(BaseModel):
|
||||
|
||||
@@ -100,6 +100,9 @@ class DashboardService:
|
||||
stock_value_trend = await self._get_stock_value_trend(db, tenant_id, days=30)
|
||||
alert_trend = await dashboard_repo.get_alert_trend(tenant_id, days=30)
|
||||
|
||||
# Get stock summary for total stock items
|
||||
stock_summary = await repos['stock_repo'].get_stock_summary_by_tenant(tenant_id)
|
||||
|
||||
# Recent activity
|
||||
recent_activity = await self.get_recent_activity(db, tenant_id, limit=10)
|
||||
|
||||
@@ -108,7 +111,7 @@ class DashboardService:
|
||||
total_ingredients=inventory_summary.total_ingredients,
|
||||
active_ingredients=inventory_summary.total_ingredients, # Assuming all are active
|
||||
total_stock_value=inventory_summary.total_stock_value,
|
||||
total_stock_items=await self._get_total_stock_items(db, tenant_id),
|
||||
total_stock_items=stock_summary.get('total_stock_items', 0),
|
||||
|
||||
# Stock status breakdown
|
||||
in_stock_items=await self._get_in_stock_count(db, tenant_id),
|
||||
@@ -872,6 +875,201 @@ class DashboardService:
|
||||
"temperature_compliance_rate": Decimal("100")
|
||||
}
|
||||
|
||||
async def _get_in_stock_count(self, db, tenant_id: UUID) -> int:
|
||||
"""Get count of items currently in stock"""
|
||||
try:
|
||||
repos = self._get_repositories(db)
|
||||
stock_repo = repos['stock_repo']
|
||||
|
||||
# Get stock summary and extract in-stock count
|
||||
stock_summary = await stock_repo.get_stock_summary_by_tenant(tenant_id)
|
||||
return stock_summary.get('in_stock_items', 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get in-stock count", error=str(e))
|
||||
return 0
|
||||
|
||||
async def _get_ingredient_metrics(self, db, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get ingredient metrics for business model analysis"""
|
||||
try:
|
||||
repos = self._get_repositories(db)
|
||||
ingredient_repo = repos['ingredient_repo']
|
||||
|
||||
# Get all ingredients for the tenant
|
||||
ingredients = await ingredient_repo.get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
|
||||
if not ingredients:
|
||||
return {
|
||||
"total_types": 0,
|
||||
"avg_stock": 0.0,
|
||||
"finished_product_ratio": 0.0,
|
||||
"supplier_count": 0
|
||||
}
|
||||
|
||||
# Calculate metrics
|
||||
total_types = len(ingredients)
|
||||
|
||||
# Calculate average stock per ingredient
|
||||
total_stock = sum(float(i.current_stock_level or 0) for i in ingredients)
|
||||
avg_stock = total_stock / total_types if total_types > 0 else 0
|
||||
|
||||
# Calculate finished product ratio
|
||||
finished_products = len([i for i in ingredients if hasattr(i, 'product_type') and i.product_type and i.product_type.value == 'finished_product'])
|
||||
finished_ratio = finished_products / total_types if total_types > 0 else 0
|
||||
|
||||
# Estimate supplier diversity (simplified)
|
||||
supplier_count = len(set(str(i.supplier_id) for i in ingredients if hasattr(i, 'supplier_id') and i.supplier_id)) or 1
|
||||
|
||||
return {
|
||||
"total_types": total_types,
|
||||
"avg_stock": avg_stock,
|
||||
"finished_product_ratio": finished_ratio,
|
||||
"supplier_count": supplier_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient metrics", error=str(e))
|
||||
return {
|
||||
"total_types": 0,
|
||||
"avg_stock": 0.0,
|
||||
"finished_product_ratio": 0.0,
|
||||
"supplier_count": 0
|
||||
}
|
||||
|
||||
async def _analyze_operational_patterns(self, db, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Analyze operational patterns for business model insights"""
|
||||
try:
|
||||
repos = self._get_repositories(db)
|
||||
|
||||
# Get ingredients to analyze patterns
|
||||
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
|
||||
if not ingredients:
|
||||
return {
|
||||
"order_frequency": "unknown",
|
||||
"seasonal_variation": "low",
|
||||
"bulk_indicator": "unknown",
|
||||
"scale_indicator": "small"
|
||||
}
|
||||
|
||||
# Analyze order frequency based on reorder patterns
|
||||
frequent_reorders = len([i for i in ingredients if hasattr(i, 'reorder_frequency') and i.reorder_frequency and i.reorder_frequency > 5])
|
||||
infrequent_reorders = len([i for i in ingredients if hasattr(i, 'reorder_frequency') and i.reorder_frequency and i.reorder_frequency <= 2])
|
||||
|
||||
if frequent_reorders > len(ingredients) * 0.3:
|
||||
order_frequency = "high"
|
||||
elif infrequent_reorders > len(ingredients) * 0.4:
|
||||
order_frequency = "low"
|
||||
else:
|
||||
order_frequency = "moderate"
|
||||
|
||||
# Analyze seasonal variation (simplified estimation)
|
||||
seasonal_variation = "moderate" # Default assumption for bakery business
|
||||
|
||||
# Analyze bulk purchasing indicator
|
||||
bulk_items = len([i for i in ingredients if hasattr(i, 'bulk_order_quantity') and i.bulk_order_quantity and i.bulk_order_quantity > 100])
|
||||
if bulk_items > len(ingredients) * 0.2:
|
||||
bulk_indicator = "high"
|
||||
elif bulk_items < len(ingredients) * 0.05:
|
||||
bulk_indicator = "low"
|
||||
else:
|
||||
bulk_indicator = "moderate"
|
||||
|
||||
# Analyze production scale
|
||||
total_ingredients = len(ingredients)
|
||||
if total_ingredients > 500:
|
||||
scale_indicator = "large"
|
||||
elif total_ingredients > 100:
|
||||
scale_indicator = "medium"
|
||||
else:
|
||||
scale_indicator = "small"
|
||||
|
||||
return {
|
||||
"order_frequency": order_frequency,
|
||||
"seasonal_variation": seasonal_variation,
|
||||
"bulk_indicator": bulk_indicator,
|
||||
"scale_indicator": scale_indicator
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to analyze operational patterns", error=str(e))
|
||||
return {
|
||||
"order_frequency": "unknown",
|
||||
"seasonal_variation": "low",
|
||||
"bulk_indicator": "unknown",
|
||||
"scale_indicator": "small"
|
||||
}
|
||||
|
||||
async def _generate_model_recommendations(
|
||||
self,
|
||||
model: str,
|
||||
ingredient_metrics: Dict[str, Any],
|
||||
operational_patterns: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Generate business model specific recommendations"""
|
||||
try:
|
||||
recommendations = {
|
||||
"specific": [],
|
||||
"optimization": []
|
||||
}
|
||||
|
||||
# Model-specific recommendations
|
||||
if model == "central_bakery":
|
||||
recommendations["specific"].extend([
|
||||
"Optimize distribution network for multi-location delivery",
|
||||
"Implement centralized procurement for bulk discounts",
|
||||
"Standardize recipes across all production facilities"
|
||||
])
|
||||
|
||||
if operational_patterns.get("scale_indicator") == "large":
|
||||
recommendations["optimization"].extend([
|
||||
"Automate inter-facility transfers",
|
||||
"Implement predictive demand forecasting",
|
||||
"Optimize fleet routing for distribution"
|
||||
])
|
||||
|
||||
elif model == "individual_bakery":
|
||||
recommendations["specific"].extend([
|
||||
"Focus on local sourcing to reduce costs",
|
||||
"Implement just-in-time production scheduling",
|
||||
"Optimize single-location workflow efficiency"
|
||||
])
|
||||
|
||||
recommendations["optimization"].extend([
|
||||
"Reduce waste through better portion control",
|
||||
"Implement daily production planning",
|
||||
"Optimize oven scheduling for energy efficiency"
|
||||
])
|
||||
|
||||
elif model == "mixed":
|
||||
recommendations["specific"].extend([
|
||||
"Balance centralized and decentralized operations",
|
||||
"Implement hybrid sourcing strategy",
|
||||
"Maintain flexibility in production planning"
|
||||
])
|
||||
|
||||
recommendations["optimization"].extend([
|
||||
"Optimize batch sizes for efficiency",
|
||||
"Implement cross-training for staff flexibility",
|
||||
"Balance inventory across multiple locations"
|
||||
])
|
||||
|
||||
# Generic recommendations based on metrics
|
||||
if ingredient_metrics.get("finished_product_ratio", 0) > 0.5:
|
||||
recommendations["optimization"].append("Focus on finished product quality control")
|
||||
|
||||
if operational_patterns.get("order_frequency") == "high":
|
||||
recommendations["optimization"].append("Streamline ordering process with automated reordering")
|
||||
|
||||
return recommendations
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate model recommendations", error=str(e))
|
||||
return {
|
||||
"specific": ["Review business model configuration"],
|
||||
"optimization": ["Analyze operational data for insights"]
|
||||
}
|
||||
|
||||
async def _analyze_inventory_performance(self, db, tenant_id: UUID, days_back: int) -> Dict[str, Any]:
|
||||
"""Analyze overall inventory performance metrics using real data"""
|
||||
try:
|
||||
|
||||
@@ -412,11 +412,11 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
for rec in recommendations:
|
||||
await self._generate_stock_recommendation(tenant_id, rec)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating recommendations for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating recommendations for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Inventory recommendations failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -510,11 +510,11 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
for waste in waste_data:
|
||||
await self._generate_waste_recommendation(tenant_id, waste)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating waste recommendations",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating waste recommendations",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Waste reduction recommendations failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -885,4 +885,4 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
except Exception as e:
|
||||
logger.error("Error generating expired batch summary alert",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
error=str(e))
|
||||
|
||||
@@ -419,15 +419,36 @@ class InventoryService:
|
||||
) -> List[StockMovementResponse]:
|
||||
"""Get stock movements with filtering"""
|
||||
logger.info("📈 Getting stock movements",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id) if ingredient_id else None,
|
||||
skip=skip,
|
||||
limit=limit)
|
||||
limit=limit,
|
||||
movement_type=movement_type)
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
movement_repo = StockMovementRepository(db)
|
||||
ingredient_repo = IngredientRepository(db)
|
||||
|
||||
# Validate ingredient exists if filtering by ingredient
|
||||
if ingredient_id:
|
||||
ingredient = await ingredient_repo.get_by_id(ingredient_id)
|
||||
if not ingredient:
|
||||
logger.warning("Ingredient not found for movements query",
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise ValueError(f"Ingredient {ingredient_id} not found")
|
||||
|
||||
if ingredient.tenant_id != tenant_id:
|
||||
logger.error("Ingredient does not belong to tenant",
|
||||
ingredient_id=str(ingredient_id),
|
||||
ingredient_tenant=str(ingredient.tenant_id),
|
||||
requested_tenant=str(tenant_id))
|
||||
raise ValueError(f"Ingredient {ingredient_id} does not belong to tenant {tenant_id}")
|
||||
|
||||
logger.info("Ingredient validated for movements query",
|
||||
ingredient_name=ingredient.name,
|
||||
ingredient_id=str(ingredient_id))
|
||||
|
||||
# Get filtered movements
|
||||
movements = await movement_repo.get_movements(
|
||||
tenant_id=tenant_id,
|
||||
@@ -454,8 +475,14 @@ class InventoryService:
|
||||
logger.info("✅ Returning movements", response_count=len(responses))
|
||||
return responses
|
||||
|
||||
except ValueError:
|
||||
# Re-raise validation errors as-is
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("❌ Failed to get stock movements", error=str(e), tenant_id=tenant_id)
|
||||
logger.error("❌ Failed to get stock movements",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== ALERTS AND NOTIFICATIONS =====
|
||||
@@ -577,7 +604,7 @@ class InventoryService:
|
||||
low_stock_alerts=len(low_stock_items),
|
||||
expiring_soon_items=len(expiring_items),
|
||||
expired_items=len(expired_items),
|
||||
out_of_stock_items=0, # TODO: Calculate this
|
||||
out_of_stock_items=stock_summary.get('out_of_stock_count', 0),
|
||||
stock_by_category=stock_by_category,
|
||||
recent_movements=recent_activity.get('total_movements', 0),
|
||||
recent_purchases=recent_activity.get('purchase', {}).get('count', 0),
|
||||
|
||||
@@ -16,6 +16,7 @@ from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.config import settings
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
from shared.clients.production_client import create_production_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -170,6 +171,13 @@ class SustainabilityService:
|
||||
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
|
||||
}
|
||||
|
||||
# Get waste incidents from inventory alert repository
|
||||
alert_repo = InventoryAlertRepository(db)
|
||||
waste_opportunities = await alert_repo.get_waste_opportunities(tenant_id)
|
||||
|
||||
# Sum up all waste incidents for the period
|
||||
total_waste_incidents = sum(item['waste_incidents'] for item in waste_opportunities) if waste_opportunities else 0
|
||||
|
||||
return {
|
||||
'total_waste_kg': total_waste,
|
||||
'production_waste_kg': production_waste + defect_waste,
|
||||
@@ -177,7 +185,7 @@ class SustainabilityService:
|
||||
'waste_percentage': waste_percentage,
|
||||
'total_production_kg': total_production,
|
||||
'waste_by_reason': waste_by_reason,
|
||||
'waste_incidents': int(inv_data.waste_incidents or 0)
|
||||
'waste_incidents': total_waste_incidents
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
@@ -492,29 +500,54 @@ class SustainabilityService:
|
||||
return areas
|
||||
|
||||
def _assess_grant_readiness(self, sdg_compliance: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Assess readiness for various grant programs"""
|
||||
"""
|
||||
Assess readiness for EU grant programs accessible to Spanish bakeries and retail.
|
||||
Based on 2025 research and Spain's Law 1/2025 on food waste prevention.
|
||||
"""
|
||||
reduction = sdg_compliance['sdg_12_3']['reduction_achieved']
|
||||
|
||||
grants = {
|
||||
'eu_horizon_europe': {
|
||||
'eligible': reduction >= 30,
|
||||
'confidence': 'high' if reduction >= 50 else 'medium' if reduction >= 30 else 'low',
|
||||
'requirements_met': reduction >= 30
|
||||
},
|
||||
'eu_farm_to_fork': {
|
||||
'eligible': reduction >= 20,
|
||||
'confidence': 'high' if reduction >= 40 else 'medium' if reduction >= 20 else 'low',
|
||||
'requirements_met': reduction >= 20
|
||||
},
|
||||
'national_circular_economy': {
|
||||
'life_circular_economy': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15
|
||||
'requirements_met': reduction >= 15,
|
||||
'funding_eur': 73_000_000, # €73M available for circular economy
|
||||
'deadline': '2025-09-23',
|
||||
'program_type': 'grant'
|
||||
},
|
||||
'horizon_europe_cluster_6': {
|
||||
'eligible': reduction >= 20,
|
||||
'confidence': 'high' if reduction >= 35 else 'medium' if reduction >= 20 else 'low',
|
||||
'requirements_met': reduction >= 20,
|
||||
'funding_eur': 880_000_000, # €880M+ annually for food systems
|
||||
'deadline': 'rolling_2025',
|
||||
'program_type': 'grant'
|
||||
},
|
||||
'fedima_sustainability_grant': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 20 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15,
|
||||
'funding_eur': 20_000, # €20k bi-annual
|
||||
'deadline': '2025-06-30',
|
||||
'program_type': 'grant',
|
||||
'sector_specific': 'bakery'
|
||||
},
|
||||
'eit_food_retail': {
|
||||
'eligible': reduction >= 20,
|
||||
'confidence': 'high' if reduction >= 30 else 'medium' if reduction >= 20 else 'low',
|
||||
'requirements_met': reduction >= 20,
|
||||
'funding_eur': 45_000, # €15-45k range
|
||||
'deadline': 'rolling',
|
||||
'program_type': 'grant',
|
||||
'sector_specific': 'retail'
|
||||
},
|
||||
'un_sdg_certified': {
|
||||
'eligible': reduction >= 50,
|
||||
'confidence': 'high' if reduction >= 50 else 'low',
|
||||
'requirements_met': reduction >= 50
|
||||
'requirements_met': reduction >= 50,
|
||||
'funding_eur': 0, # Certification, not funding
|
||||
'deadline': 'ongoing',
|
||||
'program_type': 'certification'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -525,7 +558,11 @@ class SustainabilityService:
|
||||
'grant_programs': grants,
|
||||
'recommended_applications': [
|
||||
name for name, details in grants.items() if details['eligible']
|
||||
]
|
||||
],
|
||||
'spain_compliance': {
|
||||
'law_1_2025': True, # Spanish food waste prevention law
|
||||
'circular_economy_strategy': True # Spanish Circular Economy Strategy
|
||||
}
|
||||
}
|
||||
|
||||
async def export_grant_report(
|
||||
|
||||
Reference in New Issue
Block a user