Initial commit - production deployment
This commit is contained in:
54
services/procurement/Dockerfile
Normal file
54
services/procurement/Dockerfile
Normal file
@@ -0,0 +1,54 @@
|
||||
# =============================================================================
|
||||
# Procurement Service Dockerfile - Environment-Configurable Base Images
|
||||
# =============================================================================
|
||||
# Build arguments for registry configuration:
|
||||
# - BASE_REGISTRY: Registry URL (default: docker.io for Docker Hub)
|
||||
# - PYTHON_IMAGE: Python image name and tag (default: python:3.11-slim)
|
||||
# =============================================================================
|
||||
|
||||
ARG BASE_REGISTRY=docker.io
|
||||
ARG PYTHON_IMAGE=python:3.11-slim
|
||||
|
||||
FROM ${BASE_REGISTRY}/${PYTHON_IMAGE} AS shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
ARG BASE_REGISTRY=docker.io
|
||||
ARG PYTHON_IMAGE=python:3.11-slim
|
||||
FROM ${BASE_REGISTRY}/${PYTHON_IMAGE}
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
COPY services/procurement/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY services/procurement/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
1342
services/procurement/README.md
Normal file
1342
services/procurement/README.md
Normal file
File diff suppressed because it is too large
Load Diff
104
services/procurement/alembic.ini
Normal file
104
services/procurement/alembic.ini
Normal file
@@ -0,0 +1,104 @@
|
||||
# A generic, single database configuration for procurement service
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# max_length = 40
|
||||
|
||||
# version_num, name, path
|
||||
version_locations = %(here)s/migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses
|
||||
# os.pathsep. If this key is omitted entirely, it falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10.0
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stdout,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
0
services/procurement/app/__init__.py
Normal file
0
services/procurement/app/__init__.py
Normal file
11
services/procurement/app/api/__init__.py
Normal file
11
services/procurement/app/api/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Procurement Service API"""
|
||||
|
||||
from .procurement_plans import router as procurement_plans_router
|
||||
from .purchase_orders import router as purchase_orders_router
|
||||
from .replenishment import router as replenishment_router
|
||||
|
||||
__all__ = [
|
||||
"procurement_plans_router",
|
||||
"purchase_orders_router",
|
||||
"replenishment_router"
|
||||
]
|
||||
82
services/procurement/app/api/analytics.py
Normal file
82
services/procurement/app/api/analytics.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# services/procurement/app/api/analytics.py
|
||||
"""
|
||||
Procurement Analytics API - Reporting, statistics, and insights
|
||||
Professional+ tier subscription required
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.services.procurement_service import ProcurementService
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["procurement-analytics"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_procurement_service(db: AsyncSession = Depends(get_db)) -> ProcurementService:
|
||||
"""Dependency injection for ProcurementService"""
|
||||
return ProcurementService(db, settings)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("procurement")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_procurement_analytics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
procurement_service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement analytics dashboard for a tenant (Professional+ tier required)"""
|
||||
try:
|
||||
# Call the service method to get actual analytics data
|
||||
analytics_data = await procurement_service.get_procurement_analytics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
logger.info("Retrieved procurement analytics", tenant_id=tenant_id)
|
||||
return analytics_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get procurement analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get procurement analytics: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("procurement/trends")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_procurement_trends(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days: int = Query(7, description="Number of days to retrieve trends for", ge=1, le=90),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
procurement_service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement time-series trends for charts (Professional+ tier required)"""
|
||||
try:
|
||||
# Call the service method to get trends data
|
||||
trends_data = await procurement_service.get_procurement_trends(
|
||||
tenant_id=tenant_id,
|
||||
days=days
|
||||
)
|
||||
|
||||
logger.info("Retrieved procurement trends", tenant_id=tenant_id, days=days)
|
||||
return trends_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get procurement trends", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get procurement trends: {str(e)}")
|
||||
202
services/procurement/app/api/expected_deliveries.py
Normal file
202
services/procurement/app/api/expected_deliveries.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Expected Deliveries API for Procurement Service
|
||||
Public endpoint for expected delivery tracking
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["expected-deliveries"])
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("expected-deliveries")
|
||||
)
|
||||
async def get_expected_deliveries(
|
||||
tenant_id: str,
|
||||
days_ahead: int = Query(1, description="Number of days to look ahead", ge=0, le=30),
|
||||
include_overdue: bool = Query(True, description="Include overdue deliveries"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get expected deliveries for delivery tracking system.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to query
|
||||
days_ahead: Number of days to look ahead (default 1 = today + tomorrow)
|
||||
include_overdue: Include deliveries past expected date (default True)
|
||||
|
||||
Returns:
|
||||
{
|
||||
"deliveries": [
|
||||
{
|
||||
"po_id": "uuid",
|
||||
"po_number": "PO-2025-123",
|
||||
"supplier_id": "uuid",
|
||||
"supplier_name": "Molinos San José",
|
||||
"supplier_phone": "+34 915 234 567",
|
||||
"expected_delivery_date": "2025-12-02T10:00:00Z",
|
||||
"delivery_window_hours": 4,
|
||||
"status": "sent_to_supplier",
|
||||
"line_items": [...],
|
||||
"total_amount": 540.00,
|
||||
"currency": "EUR"
|
||||
}
|
||||
],
|
||||
"total_count": 8
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse tenant_id
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
logger.info(
|
||||
"Fetching expected deliveries",
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue
|
||||
)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_uuid,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
# Include deliveries from last 48 hours (recent overdue) until end_date
|
||||
# This ensures we only show truly recent overdue deliveries, not ancient history
|
||||
start_date = now - timedelta(hours=48)
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= start_date,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
else:
|
||||
# Only future deliveries within range
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
# Format deliveries for response
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Get supplier info from supplier service (for now, use supplier_id)
|
||||
# In production, you'd fetch from supplier service or join if same DB
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Try to get supplier details from notes or metadata
|
||||
# This is a simplified approach - in production you'd query supplier service
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
elif "Suministros Hostelería" in po.notes:
|
||||
supplier_name = "Suministros Hostelería"
|
||||
supplier_phone = "+34 911 234 567"
|
||||
elif "Miel Artesana" in po.notes:
|
||||
supplier_name = "Miel Artesana"
|
||||
supplier_phone = "+34 918 765 432"
|
||||
|
||||
# Format line items (limit to first 5)
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
# Default delivery window is 4 hours
|
||||
delivery_window_hours = 4
|
||||
|
||||
# Ensure expected delivery date is timezone-aware and in UTC format
|
||||
expected_delivery_utc = po.expected_delivery_date
|
||||
if expected_delivery_utc and expected_delivery_utc.tzinfo is None:
|
||||
# If naive datetime, assume it's UTC (this shouldn't happen with proper DB setup)
|
||||
expected_delivery_utc = expected_delivery_utc.replace(tzinfo=timezone.utc)
|
||||
elif expected_delivery_utc and expected_delivery_utc.tzinfo is not None:
|
||||
# Convert to UTC if it's in another timezone
|
||||
expected_delivery_utc = expected_delivery_utc.astimezone(timezone.utc)
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": expected_delivery_utc.isoformat() if expected_delivery_utc else None,
|
||||
"delivery_window_hours": delivery_window_hours,
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries retrieved",
|
||||
tenant_id=tenant_id,
|
||||
count=len(deliveries)
|
||||
)
|
||||
|
||||
return {
|
||||
"deliveries": deliveries,
|
||||
"total_count": len(deliveries)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
188
services/procurement/app/api/internal_delivery.py
Normal file
188
services/procurement/app/api/internal_delivery.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""
|
||||
Internal Delivery Tracking API for Procurement Service
|
||||
Service-to-service endpoint for expected delivery tracking by orchestrator
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal", tags=["internal"])
|
||||
|
||||
|
||||
@router.get("/expected-deliveries")
|
||||
async def get_expected_deliveries(
|
||||
tenant_id: str = Query(..., description="Tenant UUID"),
|
||||
days_ahead: int = Query(1, description="Number of days to look ahead", ge=0, le=30),
|
||||
include_overdue: bool = Query(True, description="Include overdue deliveries"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get expected deliveries for delivery tracking system.
|
||||
|
||||
Called by orchestrator's DeliveryTrackingService to monitor upcoming deliveries
|
||||
and generate delivery alerts (arriving_soon, overdue, receipt_incomplete).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to query
|
||||
days_ahead: Number of days to look ahead (default 1 = today + tomorrow)
|
||||
include_overdue: Include deliveries past expected date (default True)
|
||||
|
||||
Returns:
|
||||
{
|
||||
"deliveries": [
|
||||
{
|
||||
"po_id": "uuid",
|
||||
"po_number": "PO-2025-123",
|
||||
"supplier_id": "uuid",
|
||||
"supplier_name": "Molinos San José",
|
||||
"supplier_phone": "+34 915 234 567",
|
||||
"expected_delivery_date": "2025-12-02T10:00:00Z",
|
||||
"delivery_window_hours": 4,
|
||||
"status": "sent_to_supplier",
|
||||
"line_items": [...],
|
||||
"total_amount": 540.00,
|
||||
"currency": "EUR"
|
||||
}
|
||||
],
|
||||
"total_count": 8
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse tenant_id
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
logger.info(
|
||||
"Fetching expected deliveries",
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue
|
||||
)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_uuid,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
# Include any delivery from past until end_date
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
else:
|
||||
# Only future deliveries within range
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
# Format deliveries for response
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Get supplier info from supplier service (for now, use supplier_id)
|
||||
# In production, you'd fetch from supplier service or join if same DB
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Try to get supplier details from notes or metadata
|
||||
# This is a simplified approach - in production you'd query supplier service
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
elif "Suministros Hostelería" in po.notes:
|
||||
supplier_name = "Suministros Hostelería"
|
||||
supplier_phone = "+34 911 234 567"
|
||||
elif "Miel Artesana" in po.notes:
|
||||
supplier_name = "Miel Artesana"
|
||||
supplier_phone = "+34 918 765 432"
|
||||
|
||||
# Format line items (limit to first 5)
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
# Default delivery window is 4 hours
|
||||
delivery_window_hours = 4
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat() if po.expected_delivery_date else None,
|
||||
"delivery_window_hours": delivery_window_hours,
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries retrieved",
|
||||
tenant_id=tenant_id,
|
||||
count=len(deliveries)
|
||||
)
|
||||
|
||||
return {
|
||||
"deliveries": deliveries,
|
||||
"total_count": len(deliveries)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
102
services/procurement/app/api/internal_delivery_tracking.py
Normal file
102
services/procurement/app/api/internal_delivery_tracking.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Internal API for triggering delivery tracking alerts.
|
||||
Used by demo session cloning to generate realistic late delivery alerts.
|
||||
|
||||
Moved from orchestrator service to procurement service (domain ownership).
|
||||
|
||||
URL Pattern: /api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger
|
||||
This follows the tenant-scoped pattern so gateway can proxy correctly.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# New URL pattern: tenant-scoped so gateway proxies to procurement service correctly
|
||||
@router.post("/api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger")
|
||||
async def trigger_delivery_tracking(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check deliveries for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger delivery tracking for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after POs are seeded
|
||||
to generate realistic delivery alerts (arriving soon, overdue, etc.).
|
||||
|
||||
Security: Protected by x-internal-service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to check deliveries for
|
||||
request: FastAPI request object
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": true,
|
||||
"tenant_id": "uuid",
|
||||
"alerts_generated": 3,
|
||||
"breakdown": {
|
||||
"arriving_soon": 1,
|
||||
"overdue": 1,
|
||||
"receipt_incomplete": 1
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get delivery tracking service from app state
|
||||
delivery_tracking_service = getattr(request.app.state, 'delivery_tracking_service', None)
|
||||
|
||||
if not delivery_tracking_service:
|
||||
logger.error("Delivery tracking service not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Delivery tracking service not available"
|
||||
)
|
||||
|
||||
# Trigger delivery tracking for this tenant
|
||||
logger.info("Triggering delivery tracking", tenant_id=str(tenant_id))
|
||||
result = await delivery_tracking_service.check_expected_deliveries(tenant_id)
|
||||
|
||||
logger.info(
|
||||
"Delivery tracking completed",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("total_alerts", 0)
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"tenant_id": str(tenant_id),
|
||||
"alerts_generated": result.get("total_alerts", 0),
|
||||
"breakdown": {
|
||||
"arriving_soon": result.get("arriving_soon", 0),
|
||||
"overdue": result.get("overdue", 0),
|
||||
"receipt_incomplete": result.get("receipt_incomplete", 0)
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering delivery tracking",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger delivery tracking: {str(e)}"
|
||||
)
|
||||
701
services/procurement/app/api/internal_demo.py
Normal file
701
services/procurement/app/api/internal_demo.py
Normal file
@@ -0,0 +1,701 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Procurement Service
|
||||
Service-to-service endpoint for cloning procurement and purchase order data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta, date
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
|
||||
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
|
||||
from sqlalchemy.orm import selectinload
|
||||
from shared.schemas.reasoning_types import (
|
||||
create_po_reasoning_low_stock,
|
||||
create_po_reasoning_supplier_contract
|
||||
)
|
||||
from app.core.config import settings
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
async def _emit_po_approval_alerts_for_demo(
|
||||
virtual_tenant_id: uuid.UUID,
|
||||
pending_pos: list[PurchaseOrder]
|
||||
) -> int:
|
||||
"""
|
||||
Emit alerts for pending approval POs during demo cloning.
|
||||
Creates clients internally to avoid dependency injection issues.
|
||||
Returns the number of alerts successfully emitted.
|
||||
"""
|
||||
if not pending_pos:
|
||||
return 0
|
||||
|
||||
alerts_emitted = 0
|
||||
|
||||
try:
|
||||
# Initialize clients locally for this operation
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.messaging import RabbitMQClient
|
||||
|
||||
# Use the existing settings instead of creating a new config
|
||||
# This avoids issues with property-based configuration
|
||||
suppliers_client = SuppliersServiceClient(settings, "procurement-service")
|
||||
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement-service")
|
||||
|
||||
# Connect to RabbitMQ
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
logger.info(
|
||||
"Emitting PO approval alerts for demo",
|
||||
pending_po_count=len(pending_pos),
|
||||
virtual_tenant_id=str(virtual_tenant_id)
|
||||
)
|
||||
|
||||
# Emit alerts for each pending PO
|
||||
for po in pending_pos:
|
||||
try:
|
||||
# Get supplier details
|
||||
supplier_details = await suppliers_client.get_supplier_by_id(
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
supplier_id=str(po.supplier_id)
|
||||
)
|
||||
|
||||
# Skip if supplier not found
|
||||
if not supplier_details:
|
||||
logger.warning(
|
||||
"Supplier not found for PO, skipping alert",
|
||||
po_id=str(po.id),
|
||||
supplier_id=str(po.supplier_id)
|
||||
)
|
||||
continue
|
||||
|
||||
# Calculate urgency fields
|
||||
now = datetime.utcnow()
|
||||
hours_until_consequence = None
|
||||
deadline = None
|
||||
|
||||
if po.required_delivery_date:
|
||||
supplier_lead_time_days = supplier_details.get('standard_lead_time', 7)
|
||||
approval_deadline = po.required_delivery_date - timedelta(days=supplier_lead_time_days)
|
||||
deadline = approval_deadline
|
||||
hours_until_consequence = (approval_deadline - now).total_seconds() / 3600
|
||||
|
||||
# Prepare alert payload
|
||||
alert_data = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'tenant_id': str(virtual_tenant_id),
|
||||
'service': 'procurement',
|
||||
'type': 'po_approval_needed',
|
||||
'alert_type': 'po_approval_needed',
|
||||
'type_class': 'action_needed',
|
||||
'severity': 'high' if po.priority == 'critical' else 'medium',
|
||||
'title': '',
|
||||
'message': '',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'metadata': {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'supplier_name': supplier_details.get('name', ''),
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'created_at': po.created_at.isoformat(),
|
||||
'financial_impact': float(po.total_amount),
|
||||
'urgency_score': 85,
|
||||
'deadline': deadline.isoformat() if deadline else None,
|
||||
'hours_until_consequence': round(hours_until_consequence, 1) if hours_until_consequence else None,
|
||||
'reasoning_data': po.reasoning_data or {}
|
||||
},
|
||||
'message_params': {
|
||||
'po_number': po.po_number,
|
||||
'supplier_name': supplier_details.get('name', ''),
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'items_count': 0,
|
||||
'created_at': po.created_at.isoformat()
|
||||
},
|
||||
'actions': ['approve_po', 'reject_po', 'modify_po'],
|
||||
'item_type': 'alert'
|
||||
}
|
||||
|
||||
# Publish to RabbitMQ
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name='alerts.exchange',
|
||||
routing_key=f'alert.{alert_data["severity"]}.procurement',
|
||||
event_data=alert_data
|
||||
)
|
||||
|
||||
alerts_emitted += 1
|
||||
logger.debug(
|
||||
"PO approval alert emitted",
|
||||
po_id=str(po.id),
|
||||
po_number=po.po_number
|
||||
)
|
||||
|
||||
except Exception as po_error:
|
||||
logger.warning(
|
||||
"Failed to emit alert for PO",
|
||||
po_id=str(po.id),
|
||||
po_number=po.po_number,
|
||||
error=str(po_error)
|
||||
)
|
||||
# Continue with other POs
|
||||
|
||||
# Close RabbitMQ connection
|
||||
await rabbitmq_client.disconnect()
|
||||
|
||||
logger.info(
|
||||
"PO approval alerts emission completed",
|
||||
alerts_emitted=alerts_emitted,
|
||||
total_pending=len(pending_pos)
|
||||
)
|
||||
|
||||
return alerts_emitted
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to emit PO approval alerts",
|
||||
error=str(e),
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
exc_info=True
|
||||
)
|
||||
# Don't fail the cloning process - ensure we try to disconnect if connected
|
||||
try:
|
||||
if 'rabbitmq_client' in locals():
|
||||
await rabbitmq_client.disconnect()
|
||||
except:
|
||||
pass # Suppress cleanup errors
|
||||
return alerts_emitted
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone procurement service data for a virtual demo tenant
|
||||
|
||||
Loads seed data from JSON files and creates:
|
||||
- Purchase orders with line items
|
||||
- Procurement plans with requirements (if in seed data)
|
||||
- Replenishment plans with items (if in seed data)
|
||||
- Adjusts dates to recent timeframe
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting procurement data cloning from seed files",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"procurement_plans": 0,
|
||||
"procurement_requirements": 0,
|
||||
"purchase_orders": 0,
|
||||
"purchase_order_items": 0,
|
||||
"replenishment_plans": 0,
|
||||
"replenishment_items": 0
|
||||
}
|
||||
|
||||
def parse_date_field(date_value, session_time, field_name="date"):
|
||||
"""Parse date field, handling both ISO strings and BASE_TS markers"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
return adjust_date_for_demo(
|
||||
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
|
||||
session_time
|
||||
)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "07-procurement.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "07-procurement.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "07-procurement.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
logger.info(
|
||||
"Loaded procurement seed data",
|
||||
purchase_orders=len(seed_data.get('purchase_orders', [])),
|
||||
purchase_order_items=len(seed_data.get('purchase_order_items', [])),
|
||||
procurement_plans=len(seed_data.get('procurement_plans', []))
|
||||
)
|
||||
|
||||
# Load Purchase Orders from seed data
|
||||
order_id_map = {}
|
||||
for po_data in seed_data.get('purchase_orders', []):
|
||||
# Transform IDs using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
logger.debug("Processing purchase order", po_id=po_data.get('id'), po_number=po_data.get('po_number'))
|
||||
po_uuid = uuid.UUID(po_data['id'])
|
||||
transformed_id = transform_id(po_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse purchase order UUID",
|
||||
po_id=po_data.get('id'),
|
||||
po_number=po_data.get('po_number'),
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
order_id_map[uuid.UUID(po_data['id'])] = transformed_id
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
# FIX: Use current UTC time for future dates (expected delivery)
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.debug("Parsing dates for PO",
|
||||
po_number=po_data.get('po_number'),
|
||||
order_date_raw=po_data.get('order_date') or po_data.get('order_date_offset_days'),
|
||||
required_delivery_raw=po_data.get('required_delivery_date') or po_data.get('required_delivery_date_offset_days'))
|
||||
|
||||
# Handle both direct dates and offset-based dates
|
||||
if 'order_date_offset_days' in po_data:
|
||||
adjusted_order_date = session_time + timedelta(days=po_data['order_date_offset_days'])
|
||||
else:
|
||||
adjusted_order_date = parse_date_field(po_data.get('order_date'), session_time, "order_date") or session_time
|
||||
|
||||
if 'required_delivery_date_offset_days' in po_data:
|
||||
adjusted_required_delivery = session_time + timedelta(days=po_data['required_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), session_time, "required_delivery_date")
|
||||
|
||||
if 'estimated_delivery_date_offset_days' in po_data:
|
||||
adjusted_estimated_delivery = session_time + timedelta(days=po_data['estimated_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), session_time, "estimated_delivery_date")
|
||||
|
||||
# Calculate expected delivery date (use estimated delivery if not specified separately)
|
||||
# FIX: Use current UTC time for future delivery dates
|
||||
if 'expected_delivery_date_offset_days' in po_data:
|
||||
adjusted_expected_delivery = current_time + timedelta(days=po_data['expected_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_expected_delivery = adjusted_estimated_delivery # Fallback to estimated delivery
|
||||
|
||||
logger.debug("Dates parsed successfully",
|
||||
po_number=po_data.get('po_number'),
|
||||
order_date=adjusted_order_date,
|
||||
required_delivery=adjusted_required_delivery)
|
||||
|
||||
# Generate a system user UUID for audit fields (demo purposes)
|
||||
system_user_id = uuid.uuid4()
|
||||
|
||||
# Use status directly from JSON - JSON files should contain valid enum values
|
||||
# Valid values: draft, pending_approval, approved, sent_to_supplier, confirmed,
|
||||
# partially_received, completed, cancelled, disputed
|
||||
raw_status = po_data.get('status', 'draft')
|
||||
|
||||
# Validate that the status is a valid enum value
|
||||
valid_statuses = {'draft', 'pending_approval', 'approved', 'sent_to_supplier',
|
||||
'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed'}
|
||||
|
||||
if raw_status not in valid_statuses:
|
||||
logger.warning(
|
||||
"Invalid status value in seed data, using default 'draft'",
|
||||
invalid_status=raw_status,
|
||||
po_number=po_data.get('po_number'),
|
||||
valid_options=sorted(valid_statuses)
|
||||
)
|
||||
raw_status = 'draft'
|
||||
|
||||
# Transform supplier_id to match transformed supplier IDs in suppliers service
|
||||
raw_supplier_id = po_data.get('supplier_id')
|
||||
transformed_supplier_id = transform_id(raw_supplier_id, virtual_uuid) if raw_supplier_id else None
|
||||
|
||||
# Create new PurchaseOrder
|
||||
new_order = PurchaseOrder(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
po_number=f"{session_id[:8]}-{po_data.get('po_number', f'PO-{uuid.uuid4().hex[:8].upper()}')}",
|
||||
supplier_id=str(transformed_supplier_id) if transformed_supplier_id else None,
|
||||
order_date=adjusted_order_date,
|
||||
required_delivery_date=adjusted_required_delivery,
|
||||
estimated_delivery_date=adjusted_estimated_delivery,
|
||||
expected_delivery_date=adjusted_expected_delivery,
|
||||
status=raw_status,
|
||||
priority=po_data.get('priority', 'normal').lower() if po_data.get('priority') else 'normal',
|
||||
subtotal=po_data.get('subtotal', 0.0),
|
||||
tax_amount=po_data.get('tax_amount', 0.0),
|
||||
shipping_cost=po_data.get('shipping_cost', 0.0),
|
||||
discount_amount=po_data.get('discount_amount', 0.0),
|
||||
total_amount=po_data.get('total_amount', 0.0),
|
||||
currency=po_data.get('currency', 'EUR'),
|
||||
delivery_address=po_data.get('delivery_address'),
|
||||
delivery_instructions=po_data.get('delivery_instructions'),
|
||||
delivery_contact=po_data.get('delivery_contact'),
|
||||
delivery_phone=po_data.get('delivery_phone'),
|
||||
requires_approval=po_data.get('requires_approval', False),
|
||||
auto_approved=po_data.get('auto_approved', False),
|
||||
auto_approval_rule_id=po_data.get('auto_approval_rule_id') if po_data.get('auto_approval_rule_id') and len(po_data.get('auto_approval_rule_id', '')) >= 32 else None,
|
||||
rejection_reason=po_data.get('rejection_reason'),
|
||||
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), session_time, "sent_to_supplier_at"),
|
||||
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), session_time, "supplier_confirmation_date"),
|
||||
supplier_reference=po_data.get('supplier_reference'),
|
||||
notes=po_data.get('notes'),
|
||||
internal_notes=po_data.get('internal_notes'),
|
||||
terms_and_conditions=po_data.get('terms_and_conditions'),
|
||||
reasoning_data=po_data.get('reasoning_data'),
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=system_user_id,
|
||||
updated_by=system_user_id
|
||||
)
|
||||
|
||||
# Add expected_delivery_date if the model supports it
|
||||
if hasattr(PurchaseOrder, 'expected_delivery_date'):
|
||||
if 'expected_delivery_date_offset_days' in po_data:
|
||||
# Handle offset-based expected delivery dates
|
||||
expected_delivery = adjusted_order_date + timedelta(
|
||||
days=po_data['expected_delivery_date_offset_days']
|
||||
)
|
||||
else:
|
||||
expected_delivery = adjusted_estimated_delivery
|
||||
new_order.expected_delivery_date = expected_delivery
|
||||
|
||||
db.add(new_order)
|
||||
stats["purchase_orders"] += 1
|
||||
|
||||
# Load Purchase Order Items from seed data
|
||||
for po_item_data in seed_data.get('purchase_order_items', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
item_uuid = uuid.UUID(po_item_data['id'])
|
||||
transformed_id = transform_id(po_item_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse purchase order item UUID",
|
||||
item_id=po_item_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Map purchase_order_id if it exists in our map
|
||||
po_id_value = po_item_data.get('purchase_order_id')
|
||||
if po_id_value:
|
||||
po_id_value = order_id_map.get(uuid.UUID(po_id_value), uuid.UUID(po_id_value))
|
||||
|
||||
new_item = PurchaseOrderItem(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
purchase_order_id=str(po_id_value) if po_id_value else None,
|
||||
inventory_product_id=po_item_data.get('inventory_product_id'),
|
||||
product_name=po_item_data.get('product_name'),
|
||||
product_code=po_item_data.get('product_code'), # Use product_code directly from JSON
|
||||
ordered_quantity=po_item_data.get('ordered_quantity', 0.0),
|
||||
unit_of_measure=po_item_data.get('unit_of_measure'),
|
||||
unit_price=po_item_data.get('unit_price', 0.0),
|
||||
line_total=po_item_data.get('line_total', 0.0),
|
||||
received_quantity=po_item_data.get('received_quantity', 0.0),
|
||||
remaining_quantity=po_item_data.get('remaining_quantity', po_item_data.get('ordered_quantity', 0.0)),
|
||||
quality_requirements=po_item_data.get('quality_requirements'),
|
||||
item_notes=po_item_data.get('item_notes'),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_item)
|
||||
stats["purchase_order_items"] += 1
|
||||
|
||||
# Load Procurement Plans from seed data (if any)
|
||||
for plan_data in seed_data.get('procurement_plans', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
plan_uuid = uuid.UUID(plan_data['id'])
|
||||
transformed_id = transform_id(plan_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse procurement plan UUID",
|
||||
plan_id=plan_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Adjust dates
|
||||
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), session_time, "plan_date")
|
||||
|
||||
new_plan = ProcurementPlan(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
plan_number=plan_data.get('plan_number', f"PROC-{uuid.uuid4().hex[:8].upper()}"),
|
||||
plan_date=adjusted_plan_date,
|
||||
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), session_time, "plan_period_start"),
|
||||
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), session_time, "plan_period_end"),
|
||||
planning_horizon_days=plan_data.get('planning_horizon_days'),
|
||||
status=plan_data.get('status', 'draft'),
|
||||
plan_type=plan_data.get('plan_type'),
|
||||
priority=plan_data.get('priority', 'normal'),
|
||||
business_model=plan_data.get('business_model'),
|
||||
procurement_strategy=plan_data.get('procurement_strategy'),
|
||||
total_requirements=plan_data.get('total_requirements', 0),
|
||||
total_estimated_cost=plan_data.get('total_estimated_cost', 0.0),
|
||||
total_approved_cost=plan_data.get('total_approved_cost', 0.0),
|
||||
cost_variance=plan_data.get('cost_variance', 0.0),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_plan)
|
||||
stats["procurement_plans"] += 1
|
||||
|
||||
# Load Replenishment Plans from seed data (if any)
|
||||
for replan_data in seed_data.get('replenishment_plans', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
replan_uuid = uuid.UUID(replan_data['id'])
|
||||
transformed_id = transform_id(replan_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse replenishment plan UUID",
|
||||
replan_id=replan_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Adjust dates
|
||||
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), session_time, "plan_date")
|
||||
|
||||
new_replan = ReplenishmentPlan(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
plan_number=replan_data.get('plan_number', f"REPL-{uuid.uuid4().hex[:8].upper()}"),
|
||||
plan_date=adjusted_plan_date,
|
||||
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), session_time, "plan_period_start"),
|
||||
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), session_time, "plan_period_end"),
|
||||
planning_horizon_days=replan_data.get('planning_horizon_days'),
|
||||
status=replan_data.get('status', 'draft'),
|
||||
plan_type=replan_data.get('plan_type'),
|
||||
priority=replan_data.get('priority', 'normal'),
|
||||
business_model=replan_data.get('business_model'),
|
||||
total_items=replan_data.get('total_items', 0),
|
||||
total_estimated_cost=replan_data.get('total_estimated_cost', 0.0),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_replan)
|
||||
stats["replenishment_plans"] += 1
|
||||
|
||||
# Commit all loaded data
|
||||
await db.commit()
|
||||
|
||||
# Emit alerts for pending approval POs (CRITICAL for demo dashboard)
|
||||
alerts_emitted = 0
|
||||
try:
|
||||
# Get all pending approval POs that were just created
|
||||
pending_approval_pos = await db.execute(
|
||||
select(PurchaseOrder).where(
|
||||
PurchaseOrder.tenant_id == virtual_uuid,
|
||||
PurchaseOrder.status == 'pending_approval'
|
||||
)
|
||||
)
|
||||
pending_pos = pending_approval_pos.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found pending approval POs for alert emission",
|
||||
count=len(pending_pos),
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
# Emit alerts using refactored function
|
||||
if pending_pos:
|
||||
alerts_emitted = await _emit_po_approval_alerts_for_demo(
|
||||
virtual_tenant_id=virtual_uuid,
|
||||
pending_pos=pending_pos
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to emit PO approval alerts during demo cloning",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
# Don't fail the entire cloning process if alert emission fails
|
||||
|
||||
# Calculate total records
|
||||
total_records = (stats["procurement_plans"] + stats["procurement_requirements"] +
|
||||
stats["purchase_orders"] + stats["purchase_order_items"] +
|
||||
stats["replenishment_plans"] + stats["replenishment_items"])
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Procurement data loading from seed files completed",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "procurement",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats,
|
||||
"alerts_emitted": alerts_emitted
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to load procurement seed data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "procurement",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "procurement",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete all procurement data for a virtual demo tenant"""
|
||||
logger.info("Deleting procurement data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Count records
|
||||
po_count = await db.scalar(select(func.count(PurchaseOrder.id)).where(PurchaseOrder.tenant_id == virtual_uuid))
|
||||
po_item_count = await db.scalar(select(func.count(PurchaseOrderItem.id)).where(PurchaseOrderItem.tenant_id == virtual_uuid))
|
||||
plan_count = await db.scalar(select(func.count(ProcurementPlan.id)).where(ProcurementPlan.tenant_id == virtual_uuid))
|
||||
replan_count = await db.scalar(select(func.count(ReplenishmentPlan.id)).where(ReplenishmentPlan.tenant_id == virtual_uuid))
|
||||
|
||||
# Delete in order
|
||||
await db.execute(delete(PurchaseOrderItem).where(PurchaseOrderItem.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(PurchaseOrder).where(PurchaseOrder.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.plan_id.in_(
|
||||
select(ProcurementPlan.id).where(ProcurementPlan.tenant_id == virtual_uuid)
|
||||
)))
|
||||
await db.execute(delete(ProcurementPlan).where(ProcurementPlan.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(ReplenishmentPlanItem).where(ReplenishmentPlanItem.replenishment_plan_id.in_(
|
||||
select(ReplenishmentPlan.id).where(ReplenishmentPlan.tenant_id == virtual_uuid)
|
||||
)))
|
||||
await db.execute(delete(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == virtual_uuid))
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
logger.info("Procurement data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
||||
|
||||
return {
|
||||
"service": "procurement",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"purchase_orders": po_count,
|
||||
"purchase_order_items": po_item_count,
|
||||
"procurement_plans": plan_count,
|
||||
"replenishment_plans": replan_count,
|
||||
"total": po_count + po_item_count + plan_count + replan_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete procurement data", error=str(e), exc_info=True)
|
||||
await db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
175
services/procurement/app/api/internal_transfer.py
Normal file
175
services/procurement/app/api/internal_transfer.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""
|
||||
Internal Transfer API Endpoints
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Body
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.services.internal_transfer_service import InternalTransferService
|
||||
from app.repositories.purchase_order_repository import PurchaseOrderRepository
|
||||
from app.core.database import get_db
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.clients import get_recipes_client, get_production_client, get_inventory_client
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# Pydantic models for request validation
|
||||
class InternalTransferItem(BaseModel):
|
||||
product_id: str
|
||||
product_name: Optional[str] = None
|
||||
quantity: float
|
||||
unit_of_measure: str = 'units'
|
||||
|
||||
|
||||
class InternalTransferRequest(BaseModel):
|
||||
parent_tenant_id: str
|
||||
items: List[InternalTransferItem]
|
||||
delivery_date: str
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class ApprovalRequest(BaseModel):
|
||||
pass # Empty for now, might add approval notes later
|
||||
|
||||
|
||||
def get_internal_transfer_service(db: AsyncSession = Depends(get_db)) -> InternalTransferService:
|
||||
"""Dependency to get internal transfer service"""
|
||||
purchase_order_repository = PurchaseOrderRepository(db)
|
||||
recipe_client = get_recipes_client(config=settings, service_name="procurement-service")
|
||||
production_client = get_production_client(config=settings, service_name="procurement-service")
|
||||
inventory_client = get_inventory_client(config=settings, service_name="procurement-service")
|
||||
|
||||
return InternalTransferService(
|
||||
purchase_order_repository=purchase_order_repository,
|
||||
recipe_client=recipe_client,
|
||||
production_client=production_client,
|
||||
inventory_client=inventory_client
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/procurement/internal-transfers", response_model=None)
|
||||
async def create_internal_purchase_order(
|
||||
tenant_id: str,
|
||||
transfer_request: InternalTransferRequest,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Create an internal purchase order from child to parent tenant
|
||||
|
||||
**Enterprise Tier Feature**: Internal transfers require Enterprise subscription.
|
||||
"""
|
||||
try:
|
||||
# Validate subscription tier for internal transfers
|
||||
from shared.subscription.plans import PlanFeatures
|
||||
from shared.clients import get_tenant_client
|
||||
|
||||
tenant_client = get_tenant_client(config=settings, service_name="procurement-service")
|
||||
subscription = await tenant_client.get_tenant_subscription(tenant_id)
|
||||
|
||||
if not subscription:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="No active subscription found. Internal transfers require Enterprise tier."
|
||||
)
|
||||
|
||||
# Check if tier supports internal transfers
|
||||
if not PlanFeatures.validate_internal_transfers(subscription.get("plan", "starter")):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Internal transfers require Enterprise tier. Current tier: {subscription.get('plan', 'starter')}"
|
||||
)
|
||||
|
||||
# Parse delivery_date
|
||||
from datetime import datetime
|
||||
delivery_date = datetime.fromisoformat(transfer_request.delivery_date.split('T')[0]).date()
|
||||
|
||||
# Convert Pydantic items to dict
|
||||
items = [item.model_dump() for item in transfer_request.items]
|
||||
|
||||
# Create the internal purchase order
|
||||
result = await internal_transfer_service.create_internal_purchase_order(
|
||||
child_tenant_id=tenant_id,
|
||||
parent_tenant_id=transfer_request.parent_tenant_id,
|
||||
items=items,
|
||||
delivery_date=delivery_date,
|
||||
requested_by_user_id="temp_user_id", # Would come from auth context
|
||||
notes=transfer_request.notes
|
||||
)
|
||||
|
||||
return result
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create internal purchase order: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/procurement/internal-transfers/{po_id}/approve", response_model=None)
|
||||
async def approve_internal_transfer(
|
||||
tenant_id: str,
|
||||
po_id: str,
|
||||
approval_request: Optional[ApprovalRequest] = None,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Approve an internal transfer request
|
||||
"""
|
||||
try:
|
||||
approved_by_user_id = "temp_user_id" # Would come from auth context
|
||||
|
||||
result = await internal_transfer_service.approve_internal_transfer(
|
||||
po_id=po_id,
|
||||
approved_by_user_id=approved_by_user_id
|
||||
)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to approve internal transfer: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/procurement/internal-transfers/pending", response_model=None)
|
||||
async def get_pending_internal_transfers(
|
||||
tenant_id: str,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get pending internal transfers for a tenant
|
||||
"""
|
||||
try:
|
||||
result = await internal_transfer_service.get_pending_internal_transfers(tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get pending internal transfers: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/procurement/internal-transfers/history", response_model=None)
|
||||
async def get_internal_transfer_history(
|
||||
tenant_id: str,
|
||||
parent_tenant_id: Optional[str] = None,
|
||||
child_tenant_id: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get internal transfer history with optional filtering
|
||||
"""
|
||||
try:
|
||||
result = await internal_transfer_service.get_internal_transfer_history(
|
||||
tenant_id=tenant_id,
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get internal transfer history: {str(e)}")
|
||||
629
services/procurement/app/api/ml_insights.py
Normal file
629
services/procurement/app/api/ml_insights.py
Normal file
@@ -0,0 +1,629 @@
|
||||
"""
|
||||
ML Insights API Endpoints for Procurement Service
|
||||
|
||||
Provides endpoints to trigger ML insight generation for:
|
||||
- Supplier performance analysis
|
||||
- Price forecasting and timing recommendations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import pandas as pd
|
||||
|
||||
from app.core.database import get_db
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/v1/tenants/{tenant_id}/procurement/ml/insights",
|
||||
tags=["ML Insights"]
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS - SUPPLIER ANALYSIS
|
||||
# ================================================================
|
||||
|
||||
class SupplierAnalysisRequest(BaseModel):
|
||||
"""Request schema for supplier performance analysis"""
|
||||
supplier_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific supplier IDs to analyze. If None, analyzes all suppliers"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
180,
|
||||
description="Days of historical orders to analyze",
|
||||
ge=30,
|
||||
le=730
|
||||
)
|
||||
min_orders: int = Field(
|
||||
10,
|
||||
description="Minimum orders required for analysis",
|
||||
ge=5,
|
||||
le=100
|
||||
)
|
||||
|
||||
|
||||
class SupplierAnalysisResponse(BaseModel):
|
||||
"""Response schema for supplier performance analysis"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
suppliers_analyzed: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
high_risk_suppliers: int
|
||||
insights_by_supplier: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS - PRICE FORECASTING
|
||||
# ================================================================
|
||||
|
||||
class PriceForecastRequest(BaseModel):
|
||||
"""Request schema for price forecasting"""
|
||||
ingredient_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific ingredient IDs to forecast. If None, forecasts all ingredients"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
180,
|
||||
description="Days of historical price data to analyze",
|
||||
ge=90,
|
||||
le=730
|
||||
)
|
||||
forecast_horizon_days: int = Field(
|
||||
30,
|
||||
description="Days to forecast ahead",
|
||||
ge=7,
|
||||
le=90
|
||||
)
|
||||
|
||||
|
||||
class PriceForecastResponse(BaseModel):
|
||||
"""Response schema for price forecasting"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
ingredients_forecasted: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
buy_now_recommendations: int
|
||||
bulk_opportunities: int
|
||||
insights_by_ingredient: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# API ENDPOINTS - SUPPLIER ANALYSIS
|
||||
# ================================================================
|
||||
|
||||
@router.post("/analyze-suppliers", response_model=SupplierAnalysisResponse)
|
||||
async def trigger_supplier_analysis(
|
||||
tenant_id: str,
|
||||
request_data: SupplierAnalysisRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger supplier performance analysis.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical purchase order data for specified suppliers
|
||||
2. Runs the SupplierInsightsOrchestrator to analyze reliability
|
||||
3. Generates insights about supplier performance and risk
|
||||
4. Posts insights to AI Insights Service
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Analysis parameters
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
SupplierAnalysisResponse with analysis results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights supplier analysis requested",
|
||||
tenant_id=tenant_id,
|
||||
supplier_ids=request_data.supplier_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator and clients
|
||||
from app.ml.supplier_insights_orchestrator import SupplierInsightsOrchestrator
|
||||
from app.models.purchase_order import PurchaseOrder
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from app.core.config import settings
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and clients
|
||||
orchestrator = SupplierInsightsOrchestrator(event_publisher=event_publisher)
|
||||
suppliers_client = SuppliersServiceClient(settings)
|
||||
|
||||
# Get suppliers to analyze from suppliers service via API
|
||||
if request_data.supplier_ids:
|
||||
# Fetch specific suppliers
|
||||
suppliers = []
|
||||
for supplier_id in request_data.supplier_ids:
|
||||
supplier = await suppliers_client.get_supplier_by_id(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id
|
||||
)
|
||||
if supplier:
|
||||
suppliers.append(supplier)
|
||||
else:
|
||||
# Fetch all active suppliers (limit to 10)
|
||||
all_suppliers = await suppliers_client.get_all_suppliers(
|
||||
tenant_id=tenant_id,
|
||||
is_active=True
|
||||
)
|
||||
suppliers = (all_suppliers or [])[:10] # Limit to prevent timeout
|
||||
|
||||
if not suppliers:
|
||||
return SupplierAnalysisResponse(
|
||||
success=False,
|
||||
message="No suppliers found for analysis",
|
||||
tenant_id=tenant_id,
|
||||
suppliers_analyzed=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
high_risk_suppliers=0,
|
||||
insights_by_supplier={},
|
||||
errors=["No suppliers found"]
|
||||
)
|
||||
|
||||
# Calculate date range for order history
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each supplier
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
high_risk_suppliers = 0
|
||||
insights_by_supplier = {}
|
||||
errors = []
|
||||
|
||||
for supplier in suppliers:
|
||||
try:
|
||||
supplier_id = str(supplier['id'])
|
||||
supplier_name = supplier.get('name', 'Unknown')
|
||||
logger.info(f"Analyzing supplier {supplier_name} ({supplier_id})")
|
||||
|
||||
# Get purchase orders for this supplier from local database
|
||||
po_query = select(PurchaseOrder).where(
|
||||
PurchaseOrder.tenant_id == UUID(tenant_id),
|
||||
PurchaseOrder.supplier_id == UUID(supplier_id),
|
||||
PurchaseOrder.order_date >= start_date,
|
||||
PurchaseOrder.order_date <= end_date
|
||||
)
|
||||
|
||||
po_result = await db.execute(po_query)
|
||||
purchase_orders = po_result.scalars().all()
|
||||
|
||||
if len(purchase_orders) < request_data.min_orders:
|
||||
logger.warning(
|
||||
f"Insufficient orders for supplier {supplier_id}: "
|
||||
f"{len(purchase_orders)} < {request_data.min_orders} required"
|
||||
)
|
||||
continue
|
||||
|
||||
# Create order history DataFrame
|
||||
order_data = []
|
||||
for po in purchase_orders:
|
||||
# Calculate delivery performance
|
||||
if po.delivery_date and po.expected_delivery_date:
|
||||
days_late = (po.delivery_date - po.expected_delivery_date).days
|
||||
on_time = days_late <= 0
|
||||
else:
|
||||
days_late = 0
|
||||
on_time = True
|
||||
|
||||
# Calculate quality score (based on status)
|
||||
quality_score = 100 if po.status == 'completed' else 80
|
||||
|
||||
order_data.append({
|
||||
'order_date': po.order_date,
|
||||
'expected_delivery_date': po.expected_delivery_date,
|
||||
'delivery_date': po.delivery_date,
|
||||
'days_late': days_late,
|
||||
'on_time': on_time,
|
||||
'quality_score': quality_score,
|
||||
'total_amount': float(po.total_amount) if po.total_amount else 0
|
||||
})
|
||||
|
||||
order_history = pd.DataFrame(order_data)
|
||||
|
||||
# Run supplier analysis
|
||||
results = await orchestrator.analyze_and_post_supplier_insights(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
order_history=order_history,
|
||||
min_orders=request_data.min_orders
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
reliability_score = results.get('reliability_score', 100)
|
||||
if reliability_score < 70:
|
||||
high_risk_suppliers += 1
|
||||
|
||||
insights_by_supplier[supplier_id] = {
|
||||
'supplier_name': supplier_name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'reliability_score': reliability_score,
|
||||
'orders_analyzed': results['orders_analyzed']
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Supplier {supplier_id} analysis complete",
|
||||
insights_posted=results['insights_posted'],
|
||||
reliability_score=reliability_score
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error analyzing supplier {supplier_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = SupplierAnalysisResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully analyzed {len(insights_by_supplier)} suppliers, generated {total_insights_posted} insights",
|
||||
tenant_id=tenant_id,
|
||||
suppliers_analyzed=len(insights_by_supplier),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
high_risk_suppliers=high_risk_suppliers,
|
||||
insights_by_supplier=insights_by_supplier,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights supplier analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted,
|
||||
high_risk_suppliers=high_risk_suppliers
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights supplier analysis failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Supplier analysis failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# API ENDPOINTS - PRICE FORECASTING
|
||||
# ================================================================
|
||||
|
||||
@router.post("/forecast-prices", response_model=PriceForecastResponse)
|
||||
async def trigger_price_forecasting(
|
||||
tenant_id: str,
|
||||
request_data: PriceForecastRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger price forecasting for procurement ingredients.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical price data for specified ingredients
|
||||
2. Runs the PriceInsightsOrchestrator to forecast future prices
|
||||
3. Generates insights about optimal purchase timing
|
||||
4. Posts insights to AI Insights Service
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Forecasting parameters
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
PriceForecastResponse with forecasting results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights price forecasting requested",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_ids=request_data.ingredient_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator and clients
|
||||
from app.ml.price_insights_orchestrator import PriceInsightsOrchestrator
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from app.models.purchase_order import PurchaseOrderItem
|
||||
from app.core.config import settings
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and inventory client
|
||||
orchestrator = PriceInsightsOrchestrator(event_publisher=event_publisher)
|
||||
inventory_client = InventoryServiceClient(settings)
|
||||
|
||||
# Get ingredients to forecast from inventory service via API
|
||||
if request_data.ingredient_ids:
|
||||
# Fetch specific ingredients
|
||||
ingredients = []
|
||||
for ingredient_id in request_data.ingredient_ids:
|
||||
ingredient = await inventory_client.get_ingredient_by_id(
|
||||
ingredient_id=ingredient_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
if ingredient:
|
||||
ingredients.append(ingredient)
|
||||
else:
|
||||
# Fetch all ingredients for tenant (limit to 10)
|
||||
all_ingredients = await inventory_client.get_all_ingredients(tenant_id=tenant_id)
|
||||
ingredients = all_ingredients[:10] if all_ingredients else [] # Limit to prevent timeout
|
||||
|
||||
if not ingredients:
|
||||
return PriceForecastResponse(
|
||||
success=False,
|
||||
message="No ingredients found for forecasting",
|
||||
tenant_id=tenant_id,
|
||||
ingredients_forecasted=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
buy_now_recommendations=0,
|
||||
bulk_opportunities=0,
|
||||
insights_by_ingredient={},
|
||||
errors=["No ingredients found"]
|
||||
)
|
||||
|
||||
# Calculate date range for price history
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each ingredient
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
buy_now_recommendations = 0
|
||||
bulk_opportunities = 0
|
||||
insights_by_ingredient = {}
|
||||
errors = []
|
||||
|
||||
for ingredient in ingredients:
|
||||
try:
|
||||
ingredient_id = str(ingredient['id'])
|
||||
ingredient_name = ingredient.get('name', 'Unknown Ingredient')
|
||||
logger.info(f"Forecasting prices for {ingredient_name} ({ingredient_id})")
|
||||
|
||||
# Get price history from purchase order items
|
||||
poi_query = select(PurchaseOrderItem).where(
|
||||
PurchaseOrderItem.inventory_product_id == UUID(ingredient_id)
|
||||
).join(
|
||||
PurchaseOrderItem.purchase_order
|
||||
).where(
|
||||
PurchaseOrderItem.purchase_order.has(
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
)
|
||||
|
||||
poi_result = await db.execute(poi_query)
|
||||
purchase_items = poi_result.scalars().all()
|
||||
|
||||
if len(purchase_items) < 30:
|
||||
logger.warning(
|
||||
f"Insufficient price history for ingredient {ingredient_id}: "
|
||||
f"{len(purchase_items)} items"
|
||||
)
|
||||
continue
|
||||
|
||||
# Create price history DataFrame
|
||||
price_data = []
|
||||
for item in purchase_items:
|
||||
if item.unit_price and item.quantity:
|
||||
price_data.append({
|
||||
'date': item.purchase_order.order_date,
|
||||
'price': float(item.unit_price),
|
||||
'quantity': float(item.quantity),
|
||||
'supplier_id': str(item.purchase_order.supplier_id)
|
||||
})
|
||||
|
||||
price_history = pd.DataFrame(price_data)
|
||||
price_history = price_history.sort_values('date')
|
||||
|
||||
# Run price forecasting
|
||||
results = await orchestrator.forecast_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
price_history=price_history,
|
||||
forecast_horizon_days=request_data.forecast_horizon_days,
|
||||
min_history_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
recommendation = results.get('recommendation', {})
|
||||
if recommendation.get('action') == 'buy_now':
|
||||
buy_now_recommendations += 1
|
||||
|
||||
bulk_opp = results.get('bulk_opportunity', {})
|
||||
if bulk_opp.get('has_bulk_opportunity'):
|
||||
bulk_opportunities += 1
|
||||
|
||||
insights_by_ingredient[ingredient_id] = {
|
||||
'ingredient_name': ingredient_name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'recommendation': recommendation.get('action'),
|
||||
'has_bulk_opportunity': bulk_opp.get('has_bulk_opportunity', False)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Ingredient {ingredient_id} forecasting complete",
|
||||
insights_posted=results['insights_posted'],
|
||||
recommendation=recommendation.get('action')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error forecasting ingredient {ingredient_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = PriceForecastResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully forecasted {len(insights_by_ingredient)} ingredients, generated {total_insights_posted} insights",
|
||||
tenant_id=tenant_id,
|
||||
ingredients_forecasted=len(insights_by_ingredient),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
buy_now_recommendations=buy_now_recommendations,
|
||||
bulk_opportunities=bulk_opportunities,
|
||||
insights_by_ingredient=insights_by_ingredient,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights price forecasting complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted,
|
||||
buy_now_recommendations=buy_now_recommendations,
|
||||
bulk_opportunities=bulk_opportunities
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights price forecasting failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Price forecasting failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def ml_insights_health():
|
||||
"""Health check for ML insights endpoints"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "procurement-ml-insights",
|
||||
"endpoints": [
|
||||
"POST /ml/insights/analyze-suppliers",
|
||||
"POST /ml/insights/forecast-prices",
|
||||
"POST /internal/ml/generate-price-insights"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL API ENDPOINT - Called by demo session service
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
)
|
||||
|
||||
|
||||
@internal_router.post("/api/v1/tenants/{tenant_id}/procurement/internal/ml/generate-price-insights")
|
||||
async def generate_price_insights_internal(
|
||||
tenant_id: str,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to trigger price insights generation for demo sessions.
|
||||
|
||||
This endpoint is called by the demo-session service after cloning data.
|
||||
It uses the same ML logic as the public endpoint but with optimized defaults.
|
||||
|
||||
Security: Protected by x-internal-service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
request: FastAPI request object
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
{
|
||||
"insights_posted": int,
|
||||
"tenant_id": str,
|
||||
"status": str
|
||||
}
|
||||
"""
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
logger.info("Internal price insights generation triggered", tenant_id=tenant_id)
|
||||
|
||||
try:
|
||||
# Use the existing price forecasting logic with sensible defaults
|
||||
request_data = PriceForecastRequest(
|
||||
ingredient_ids=None, # Analyze all ingredients
|
||||
lookback_days=180, # 6 months of history
|
||||
forecast_horizon_days=30 # Forecast 30 days ahead
|
||||
)
|
||||
|
||||
# Call the existing price forecasting endpoint logic
|
||||
result = await trigger_price_forecasting(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return simplified response for internal use
|
||||
return {
|
||||
"insights_posted": result.total_insights_posted,
|
||||
"tenant_id": tenant_id,
|
||||
"status": "success" if result.success else "failed",
|
||||
"message": result.message,
|
||||
"ingredients_analyzed": result.ingredients_forecasted,
|
||||
"buy_now_recommendations": result.buy_now_recommendations
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Internal price insights generation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Internal price insights generation failed: {str(e)}"
|
||||
)
|
||||
346
services/procurement/app/api/procurement_plans.py
Normal file
346
services/procurement/app/api/procurement_plans.py
Normal file
@@ -0,0 +1,346 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/api/procurement_plans.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Plans API - Endpoints for procurement planning
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import List, Optional
|
||||
from datetime import date
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.services.procurement_service import ProcurementService
|
||||
from app.schemas.procurement_schemas import (
|
||||
ProcurementPlanResponse,
|
||||
GeneratePlanRequest,
|
||||
GeneratePlanResponse,
|
||||
AutoGenerateProcurementRequest,
|
||||
AutoGenerateProcurementResponse,
|
||||
PaginatedProcurementPlans,
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["procurement-plans"])
|
||||
|
||||
|
||||
def get_procurement_service(db: AsyncSession = Depends(get_db)) -> ProcurementService:
|
||||
"""Dependency to get procurement service"""
|
||||
return ProcurementService(db, settings)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# ORCHESTRATOR ENTRY POINT
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("auto-generate"),
|
||||
response_model=AutoGenerateProcurementResponse
|
||||
)
|
||||
async def auto_generate_procurement(
|
||||
request_data: AutoGenerateProcurementRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Auto-generate procurement plan from forecast data (called by Orchestrator)
|
||||
|
||||
This is the main entry point for orchestrated procurement planning.
|
||||
The Orchestrator calls Forecasting Service first, then passes forecast data here.
|
||||
|
||||
Flow:
|
||||
1. Receive forecast data from orchestrator
|
||||
2. Calculate procurement requirements
|
||||
3. Apply Recipe Explosion for locally-produced items
|
||||
4. Create procurement plan
|
||||
5. Optionally create and auto-approve purchase orders
|
||||
|
||||
Returns:
|
||||
AutoGenerateProcurementResponse with plan details and created POs
|
||||
"""
|
||||
try:
|
||||
logger.info("Auto-generate procurement endpoint called",
|
||||
tenant_id=tenant_id,
|
||||
has_forecast_data=bool(request_data.forecast_data))
|
||||
|
||||
result = await service.auto_generate_procurement(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
request=request_data
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in auto_generate_procurement endpoint", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# MANUAL PROCUREMENT PLAN GENERATION
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("plans"),
|
||||
response_model=GeneratePlanResponse
|
||||
)
|
||||
async def generate_procurement_plan(
|
||||
request_data: GeneratePlanRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""
|
||||
Generate a new procurement plan (manual/UI-driven)
|
||||
|
||||
This endpoint is used for manual procurement planning from the UI.
|
||||
Unlike auto_generate_procurement, this generates its own forecasts.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Plan generation parameters
|
||||
|
||||
Returns:
|
||||
GeneratePlanResponse with the created plan
|
||||
"""
|
||||
try:
|
||||
logger.info("Generate procurement plan endpoint called",
|
||||
tenant_id=tenant_id,
|
||||
plan_date=request_data.plan_date)
|
||||
|
||||
result = await service.generate_procurement_plan(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
request=request_data
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating procurement plan", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT PLAN CRUD
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("plans/current"),
|
||||
response_model=Optional[ProcurementPlanResponse]
|
||||
)
|
||||
async def get_current_plan(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get the current day's procurement plan"""
|
||||
try:
|
||||
plan = await service.get_current_plan(uuid.UUID(tenant_id))
|
||||
return plan
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting current plan", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("plans", "plan_id"),
|
||||
response_model=ProcurementPlanResponse
|
||||
)
|
||||
async def get_plan_by_id(
|
||||
plan_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement plan by ID"""
|
||||
try:
|
||||
plan = await service.get_plan_by_id(uuid.UUID(tenant_id), uuid.UUID(plan_id))
|
||||
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Plan not found")
|
||||
|
||||
return plan
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting plan by ID", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("plans/date/{plan_date}"),
|
||||
response_model=Optional[ProcurementPlanResponse]
|
||||
)
|
||||
async def get_plan_by_date(
|
||||
plan_date: date,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement plan for a specific date"""
|
||||
try:
|
||||
plan = await service.get_plan_by_date(uuid.UUID(tenant_id), plan_date)
|
||||
return plan
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting plan by date", error=str(e), tenant_id=tenant_id, plan_date=plan_date)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("plans"),
|
||||
response_model=PaginatedProcurementPlans
|
||||
)
|
||||
async def list_procurement_plans(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=50, ge=1, le=100),
|
||||
service: ProcurementService = Depends(get_procurement_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""List all procurement plans for tenant with pagination"""
|
||||
try:
|
||||
from app.repositories.procurement_plan_repository import ProcurementPlanRepository
|
||||
|
||||
repo = ProcurementPlanRepository(db)
|
||||
plans = await repo.list_plans(uuid.UUID(tenant_id), skip=skip, limit=limit)
|
||||
total = await repo.count_plans(uuid.UUID(tenant_id))
|
||||
|
||||
plans_response = [ProcurementPlanResponse.model_validate(p) for p in plans]
|
||||
|
||||
return PaginatedProcurementPlans(
|
||||
plans=plans_response,
|
||||
total=total,
|
||||
page=skip // limit + 1,
|
||||
limit=limit,
|
||||
has_more=(skip + limit) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing procurement plans", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_resource_action_route("plans", "plan_id", "status")
|
||||
)
|
||||
async def update_plan_status(
|
||||
plan_id: str,
|
||||
status: str = Query(..., regex="^(draft|pending_approval|approved|in_execution|completed|cancelled)$"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
notes: Optional[str] = None,
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Update procurement plan status"""
|
||||
try:
|
||||
updated_plan = await service.update_plan_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
plan_id=uuid.UUID(plan_id),
|
||||
status=status,
|
||||
approval_notes=notes
|
||||
)
|
||||
|
||||
if not updated_plan:
|
||||
raise HTTPException(status_code=404, detail="Plan not found")
|
||||
|
||||
return updated_plan
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating plan status", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("plans", "plan_id", "create-purchase-orders")
|
||||
)
|
||||
async def create_purchase_orders_from_plan(
|
||||
plan_id: str,
|
||||
auto_approve: bool = Query(default=False, description="Auto-approve qualifying purchase orders"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""
|
||||
Create purchase orders from procurement plan requirements
|
||||
|
||||
Groups requirements by supplier and creates POs automatically.
|
||||
Optionally evaluates auto-approval rules for qualifying POs.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
plan_id: Procurement plan UUID
|
||||
auto_approve: Whether to auto-approve qualifying POs
|
||||
|
||||
Returns:
|
||||
Summary of created, approved, and failed purchase orders
|
||||
"""
|
||||
try:
|
||||
result = await service.create_purchase_orders_from_plan(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
plan_id=uuid.UUID(plan_id),
|
||||
auto_approve=auto_approve
|
||||
)
|
||||
|
||||
if not result.get('success'):
|
||||
raise HTTPException(status_code=400, detail=result.get('error', 'Failed to create purchase orders'))
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error creating POs from plan", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# TESTING AND UTILITIES
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("plans", "plan_id", "requirements")
|
||||
)
|
||||
async def get_plan_requirements(
|
||||
plan_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all requirements for a procurement plan"""
|
||||
try:
|
||||
from app.repositories.procurement_plan_repository import ProcurementRequirementRepository
|
||||
|
||||
repo = ProcurementRequirementRepository(db)
|
||||
requirements = await repo.get_requirements_by_plan(uuid.UUID(plan_id))
|
||||
|
||||
return {
|
||||
"plan_id": plan_id,
|
||||
"requirements_count": len(requirements),
|
||||
"requirements": [
|
||||
{
|
||||
"id": str(req.id),
|
||||
"requirement_number": req.requirement_number,
|
||||
"product_name": req.product_name,
|
||||
"net_requirement": float(req.net_requirement),
|
||||
"unit_of_measure": req.unit_of_measure,
|
||||
"priority": req.priority,
|
||||
"status": req.status,
|
||||
"is_locally_produced": req.is_locally_produced,
|
||||
"bom_explosion_level": req.bom_explosion_level,
|
||||
"supplier_name": req.supplier_name,
|
||||
"estimated_total_cost": float(req.estimated_total_cost or 0)
|
||||
}
|
||||
for req in requirements
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting plan requirements", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
602
services/procurement/app/api/purchase_orders.py
Normal file
602
services/procurement/app/api/purchase_orders.py
Normal file
@@ -0,0 +1,602 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/api/purchase_orders.py
|
||||
# ================================================================
|
||||
"""
|
||||
Purchase Orders API - Endpoints for purchase order management
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.services.purchase_order_service import PurchaseOrderService
|
||||
from app.services.overdue_po_detector import OverduePODetector
|
||||
from app.schemas.purchase_order_schemas import (
|
||||
PurchaseOrderCreate,
|
||||
PurchaseOrderUpdate,
|
||||
PurchaseOrderResponse,
|
||||
PurchaseOrderWithSupplierResponse,
|
||||
PurchaseOrderApproval,
|
||||
DeliveryCreate,
|
||||
DeliveryResponse,
|
||||
SupplierInvoiceCreate,
|
||||
SupplierInvoiceResponse,
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.redis_utils import get_value, set_with_ttl
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["purchase-orders"])
|
||||
|
||||
|
||||
def get_po_service(db: AsyncSession = Depends(get_db)) -> PurchaseOrderService:
|
||||
"""Dependency to get purchase order service"""
|
||||
return PurchaseOrderService(db, settings)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER CRUD
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("purchase-orders"),
|
||||
response_model=PurchaseOrderResponse,
|
||||
status_code=201
|
||||
)
|
||||
async def create_purchase_order(
|
||||
po_data: PurchaseOrderCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Create a new purchase order with items
|
||||
|
||||
Creates a PO with automatic approval rules evaluation.
|
||||
Links to procurement plan if procurement_plan_id is provided.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_data: Purchase order creation data
|
||||
|
||||
Returns:
|
||||
PurchaseOrderResponse with created PO details
|
||||
"""
|
||||
try:
|
||||
logger.info("Create PO endpoint called", tenant_id=tenant_id)
|
||||
|
||||
po = await service.create_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_data=po_data
|
||||
)
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating purchase order", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("purchase-orders", "po_id"),
|
||||
response_model=PurchaseOrderWithSupplierResponse
|
||||
)
|
||||
async def get_purchase_order(
|
||||
po_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""Get purchase order by ID with items"""
|
||||
try:
|
||||
po = await service.get_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id)
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderWithSupplierResponse.model_validate(po)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting purchase order", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("purchase-orders"),
|
||||
response_model=List[PurchaseOrderResponse]
|
||||
)
|
||||
async def list_purchase_orders(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=50, ge=1, le=100),
|
||||
supplier_id: Optional[str] = Query(default=None),
|
||||
status: Optional[str] = Query(default=None),
|
||||
enrich_supplier: bool = Query(default=True, description="Include supplier details (slower)"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
List purchase orders with filters and caching (30s TTL)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
skip: Number of records to skip (pagination)
|
||||
limit: Maximum number of records to return
|
||||
supplier_id: Filter by supplier ID (optional)
|
||||
status: Filter by status (optional)
|
||||
enrich_supplier: Whether to enrich with supplier data (default: True)
|
||||
|
||||
Returns:
|
||||
List of purchase orders
|
||||
"""
|
||||
try:
|
||||
# PERFORMANCE OPTIMIZATION: Cache even with status filter for dashboard queries
|
||||
# Only skip cache for supplier_id filter and pagination (skip > 0)
|
||||
cache_key = None
|
||||
if skip == 0 and supplier_id is None:
|
||||
cache_key = f"purchase_orders:{tenant_id}:limit:{limit}:status:{status}:enrich:{enrich_supplier}"
|
||||
try:
|
||||
cached_result = await get_value(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug("Cache hit for purchase orders", cache_key=cache_key, tenant_id=tenant_id, status=status)
|
||||
return [PurchaseOrderResponse(**po) for po in cached_result]
|
||||
except Exception as e:
|
||||
logger.warning("Cache read failed, continuing without cache", cache_key=cache_key, error=str(e))
|
||||
|
||||
# Cache miss - fetch from database
|
||||
pos = await service.list_purchase_orders(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
supplier_id=uuid.UUID(supplier_id) if supplier_id else None,
|
||||
status=status,
|
||||
enrich_supplier=enrich_supplier
|
||||
)
|
||||
|
||||
result = [PurchaseOrderResponse.model_validate(po) for po in pos]
|
||||
|
||||
# PERFORMANCE OPTIMIZATION: Cache the result (20s TTL for purchase orders)
|
||||
if cache_key:
|
||||
try:
|
||||
import json
|
||||
await set_with_ttl(cache_key, json.dumps([po.model_dump() for po in result]), ttl=20)
|
||||
logger.debug("Cached purchase orders", cache_key=cache_key, ttl=20, tenant_id=tenant_id, status=status)
|
||||
except Exception as e:
|
||||
logger.warning("Cache write failed, continuing without caching", cache_key=cache_key, error=str(e))
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing purchase orders", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_resource_detail_route("purchase-orders", "po_id"),
|
||||
response_model=PurchaseOrderResponse
|
||||
)
|
||||
async def update_purchase_order(
|
||||
po_id: str,
|
||||
po_data: PurchaseOrderUpdate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Update purchase order information
|
||||
|
||||
Only draft or pending_approval orders can be modified.
|
||||
Financial field changes trigger automatic total recalculation.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
po_data: Update data
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
po = await service.update_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
po_data=po_data
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating purchase order", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "status")
|
||||
)
|
||||
async def update_order_status(
|
||||
po_id: str,
|
||||
status: str = Query(..., description="New status"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
notes: Optional[str] = Query(default=None),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Update purchase order status
|
||||
|
||||
Validates status transitions to prevent invalid state changes.
|
||||
|
||||
Valid transitions:
|
||||
- draft -> pending_approval, approved, cancelled
|
||||
- pending_approval -> approved, rejected, cancelled
|
||||
- approved -> sent_to_supplier, cancelled
|
||||
- sent_to_supplier -> confirmed, cancelled
|
||||
- confirmed -> in_production, cancelled
|
||||
- in_production -> shipped, cancelled
|
||||
- shipped -> delivered, cancelled
|
||||
- delivered -> completed
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
status: New status
|
||||
notes: Optional status change notes
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
po = await service.update_order_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
status=status,
|
||||
notes=notes
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating PO status", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# APPROVAL WORKFLOW
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "approve"),
|
||||
response_model=PurchaseOrderResponse
|
||||
)
|
||||
async def approve_purchase_order(
|
||||
po_id: str,
|
||||
approval_data: PurchaseOrderApproval,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Approve or reject a purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
approval_data: Approval or rejection data
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
if approval_data.action == "approve":
|
||||
po = await service.approve_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
approved_by=approval_data.approved_by,
|
||||
approval_notes=approval_data.notes
|
||||
)
|
||||
elif approval_data.action == "reject":
|
||||
po = await service.reject_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
rejected_by=approval_data.approved_by,
|
||||
rejection_reason=approval_data.notes or "No reason provided"
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid action. Must be 'approve' or 'reject'")
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error in PO approval workflow", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "cancel"),
|
||||
response_model=PurchaseOrderResponse
|
||||
)
|
||||
async def cancel_purchase_order(
|
||||
po_id: str,
|
||||
reason: str = Query(..., description="Cancellation reason"),
|
||||
cancelled_by: Optional[str] = Query(default=None),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Cancel a purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
reason: Cancellation reason
|
||||
cancelled_by: User ID performing cancellation
|
||||
|
||||
Returns:
|
||||
Cancelled purchase order
|
||||
"""
|
||||
try:
|
||||
po = await service.cancel_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
cancelled_by=uuid.UUID(cancelled_by) if cancelled_by else None,
|
||||
cancellation_reason=reason
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error cancelling purchase order", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# DELIVERY MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_nested_resource_route("purchase-orders", "po_id", "deliveries"),
|
||||
response_model=DeliveryResponse,
|
||||
status_code=201
|
||||
)
|
||||
async def create_delivery(
|
||||
po_id: str,
|
||||
delivery_data: DeliveryCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Create a delivery record for a purchase order
|
||||
|
||||
Tracks delivery scheduling, items, quality inspection, and receipt.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
delivery_data: Delivery creation data
|
||||
|
||||
Returns:
|
||||
DeliveryResponse with created delivery details
|
||||
"""
|
||||
try:
|
||||
# Validate PO ID matches
|
||||
if str(delivery_data.purchase_order_id) != po_id:
|
||||
raise ValueError("Purchase order ID mismatch")
|
||||
|
||||
delivery = await service.create_delivery(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
delivery_data=delivery_data,
|
||||
created_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
return DeliveryResponse.model_validate(delivery)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating delivery", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_nested_resource_route("purchase-orders", "po_id", "deliveries") + "/{delivery_id}/status"
|
||||
)
|
||||
async def update_delivery_status(
|
||||
po_id: str,
|
||||
delivery_id: str,
|
||||
status: str = Query(..., description="New delivery status"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Update delivery status
|
||||
|
||||
Valid statuses: scheduled, in_transit, delivered, completed, cancelled
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
delivery_id: Delivery UUID
|
||||
status: New status
|
||||
|
||||
Returns:
|
||||
Updated delivery
|
||||
"""
|
||||
try:
|
||||
delivery = await service.update_delivery_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
delivery_id=uuid.UUID(delivery_id),
|
||||
status=status,
|
||||
updated_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
if not delivery:
|
||||
raise HTTPException(status_code=404, detail="Delivery not found")
|
||||
|
||||
return DeliveryResponse.model_validate(delivery)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating delivery status", error=str(e), delivery_id=delivery_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INVOICE MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_nested_resource_route("purchase-orders", "po_id", "invoices"),
|
||||
response_model=SupplierInvoiceResponse,
|
||||
status_code=201
|
||||
)
|
||||
async def create_invoice(
|
||||
po_id: str,
|
||||
invoice_data: SupplierInvoiceCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Create a supplier invoice for a purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
invoice_data: Invoice creation data
|
||||
|
||||
Returns:
|
||||
SupplierInvoiceResponse with created invoice details
|
||||
"""
|
||||
try:
|
||||
# Validate PO ID matches
|
||||
if str(invoice_data.purchase_order_id) != po_id:
|
||||
raise ValueError("Purchase order ID mismatch")
|
||||
|
||||
invoice = await service.create_invoice(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
invoice_data=invoice_data,
|
||||
created_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
return SupplierInvoiceResponse.model_validate(invoice)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating invoice", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# OVERDUE PO DETECTION
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("purchase-orders/overdue"),
|
||||
response_model=List[dict]
|
||||
)
|
||||
async def get_overdue_purchase_orders(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(10, ge=1, le=100, description="Max results")
|
||||
):
|
||||
"""
|
||||
Get overdue purchase orders for dashboard display.
|
||||
|
||||
Returns POs that are past their estimated delivery date
|
||||
but not yet marked as delivered.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
limit: Maximum number of results (default: 10)
|
||||
|
||||
Returns:
|
||||
List of overdue PO summaries with severity and days overdue
|
||||
"""
|
||||
try:
|
||||
detector = OverduePODetector()
|
||||
overdue_pos = await detector.get_overdue_pos_for_dashboard(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return overdue_pos
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting overdue POs", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "overdue-status"),
|
||||
response_model=dict
|
||||
)
|
||||
async def check_po_overdue_status(
|
||||
po_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID")
|
||||
):
|
||||
"""
|
||||
Check if a specific PO is overdue.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
|
||||
Returns:
|
||||
Overdue status info or null if not overdue
|
||||
"""
|
||||
try:
|
||||
detector = OverduePODetector()
|
||||
overdue_info = await detector.check_single_po_overdue(
|
||||
po_id=uuid.UUID(po_id),
|
||||
tenant_id=uuid.UUID(tenant_id)
|
||||
)
|
||||
|
||||
if overdue_info:
|
||||
return overdue_info
|
||||
else:
|
||||
return {"overdue": False}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking PO overdue status", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
463
services/procurement/app/api/replenishment.py
Normal file
463
services/procurement/app/api/replenishment.py
Normal file
@@ -0,0 +1,463 @@
|
||||
"""
|
||||
Replenishment Planning API Routes
|
||||
|
||||
Provides endpoints for advanced replenishment planning including:
|
||||
- Generate replenishment plans
|
||||
- View inventory projections
|
||||
- Review supplier allocations
|
||||
- Get planning analytics
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from datetime import date
|
||||
|
||||
from app.schemas.replenishment import (
|
||||
GenerateReplenishmentPlanRequest,
|
||||
GenerateReplenishmentPlanResponse,
|
||||
ReplenishmentPlanResponse,
|
||||
ReplenishmentPlanSummary,
|
||||
InventoryProjectionResponse,
|
||||
SupplierAllocationResponse,
|
||||
SupplierSelectionRequest,
|
||||
SupplierSelectionResult,
|
||||
SafetyStockRequest,
|
||||
SafetyStockResponse,
|
||||
ProjectInventoryRequest,
|
||||
ProjectInventoryResponse,
|
||||
ReplenishmentAnalytics,
|
||||
MOQAggregationRequest,
|
||||
MOQAggregationResponse
|
||||
)
|
||||
from app.services.procurement_service import ProcurementService
|
||||
from app.services.replenishment_planning_service import ReplenishmentPlanningService
|
||||
from app.services.safety_stock_calculator import SafetyStockCalculator
|
||||
from app.services.inventory_projector import InventoryProjector, DailyDemand, ScheduledReceipt
|
||||
from app.services.moq_aggregator import MOQAggregator
|
||||
from app.services.supplier_selector import SupplierSelector
|
||||
from app.core.dependencies import get_db, get_current_tenant_id
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from shared.routing import RouteBuilder
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["replenishment-planning"])
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Replenishment Plan Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/generate"),
|
||||
response_model=GenerateReplenishmentPlanResponse
|
||||
)
|
||||
async def generate_replenishment_plan(
|
||||
request: GenerateReplenishmentPlanRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Generate advanced replenishment plan with:
|
||||
- Lead-time-aware order date calculation
|
||||
- Dynamic safety stock
|
||||
- Inventory projection
|
||||
- Shelf-life management
|
||||
"""
|
||||
try:
|
||||
logger.info("Generating replenishment plan", tenant_id=tenant_id)
|
||||
|
||||
# Initialize replenishment planner
|
||||
planner = ReplenishmentPlanningService(
|
||||
projection_horizon_days=request.projection_horizon_days,
|
||||
default_service_level=request.service_level,
|
||||
default_buffer_days=request.buffer_days
|
||||
)
|
||||
|
||||
# Generate plan
|
||||
plan = await planner.generate_replenishment_plan(
|
||||
tenant_id=str(tenant_id),
|
||||
requirements=request.requirements,
|
||||
forecast_id=request.forecast_id,
|
||||
production_schedule_id=request.production_schedule_id
|
||||
)
|
||||
|
||||
# Export to response
|
||||
plan_dict = planner.export_plan_to_dict(plan)
|
||||
|
||||
return GenerateReplenishmentPlanResponse(**plan_dict)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate replenishment plan",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("replenishment-plans"),
|
||||
response_model=List[ReplenishmentPlanSummary]
|
||||
)
|
||||
async def list_replenishment_plans(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
status: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List replenishment plans for tenant
|
||||
"""
|
||||
try:
|
||||
# Query from database (implementation depends on your repo)
|
||||
# This is a placeholder - implement based on your repository
|
||||
from app.repositories.replenishment_repository import ReplenishmentPlanRepository
|
||||
|
||||
repo = ReplenishmentPlanRepository(db)
|
||||
plans = await repo.list_plans(
|
||||
tenant_id=tenant_id,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
status=status
|
||||
)
|
||||
|
||||
return plans
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list replenishment plans",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("replenishment-plans", "plan_id"),
|
||||
response_model=ReplenishmentPlanResponse
|
||||
)
|
||||
async def get_replenishment_plan(
|
||||
plan_id: UUID = Path(...),
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get replenishment plan by ID
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import ReplenishmentPlanRepository
|
||||
|
||||
repo = ReplenishmentPlanRepository(db)
|
||||
plan = await repo.get_plan_by_id(plan_id, tenant_id)
|
||||
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Replenishment plan not found")
|
||||
|
||||
return plan
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment plan",
|
||||
tenant_id=tenant_id, plan_id=plan_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Inventory Projection Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/inventory-projections/project"),
|
||||
response_model=ProjectInventoryResponse
|
||||
)
|
||||
async def project_inventory(
|
||||
request: ProjectInventoryRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Project inventory levels to identify future stockouts
|
||||
"""
|
||||
try:
|
||||
logger.info("Projecting inventory", tenant_id=tenant_id,
|
||||
ingredient_id=request.ingredient_id)
|
||||
|
||||
projector = InventoryProjector(request.projection_horizon_days)
|
||||
|
||||
# Build daily demand objects
|
||||
daily_demand = [
|
||||
DailyDemand(
|
||||
ingredient_id=request.ingredient_id,
|
||||
date=d['date'],
|
||||
quantity=d['quantity']
|
||||
)
|
||||
for d in request.daily_demand
|
||||
]
|
||||
|
||||
# Build scheduled receipts
|
||||
scheduled_receipts = [
|
||||
ScheduledReceipt(
|
||||
ingredient_id=request.ingredient_id,
|
||||
date=r['date'],
|
||||
quantity=r['quantity'],
|
||||
source=r.get('source', 'purchase_order'),
|
||||
reference_id=r.get('reference_id')
|
||||
)
|
||||
for r in request.scheduled_receipts
|
||||
]
|
||||
|
||||
# Project inventory
|
||||
projection = projector.project_inventory(
|
||||
ingredient_id=request.ingredient_id,
|
||||
ingredient_name=request.ingredient_name,
|
||||
current_stock=request.current_stock,
|
||||
unit_of_measure=request.unit_of_measure,
|
||||
daily_demand=daily_demand,
|
||||
scheduled_receipts=scheduled_receipts
|
||||
)
|
||||
|
||||
# Export to response
|
||||
projection_dict = projector.export_projection_to_dict(projection)
|
||||
|
||||
return ProjectInventoryResponse(**projection_dict)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to project inventory",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("replenishment-plans/inventory-projections"),
|
||||
response_model=List[InventoryProjectionResponse]
|
||||
)
|
||||
async def list_inventory_projections(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
projection_date: Optional[date] = None,
|
||||
stockout_only: bool = False,
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List inventory projections
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import InventoryProjectionRepository
|
||||
|
||||
repo = InventoryProjectionRepository(db)
|
||||
projections = await repo.list_projections(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
projection_date=projection_date,
|
||||
stockout_only=stockout_only,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return projections
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list inventory projections",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Safety Stock Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/safety-stock/calculate"),
|
||||
response_model=SafetyStockResponse
|
||||
)
|
||||
async def calculate_safety_stock(
|
||||
request: SafetyStockRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Calculate dynamic safety stock using statistical methods
|
||||
"""
|
||||
try:
|
||||
logger.info("Calculating safety stock", tenant_id=tenant_id,
|
||||
ingredient_id=request.ingredient_id)
|
||||
|
||||
calculator = SafetyStockCalculator(request.service_level)
|
||||
|
||||
result = calculator.calculate_from_demand_history(
|
||||
daily_demands=request.daily_demands,
|
||||
lead_time_days=request.lead_time_days,
|
||||
service_level=request.service_level
|
||||
)
|
||||
|
||||
return SafetyStockResponse(**calculator.export_to_dict(result))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate safety stock",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Supplier Selection Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/supplier-selections/evaluate"),
|
||||
response_model=SupplierSelectionResult
|
||||
)
|
||||
async def evaluate_supplier_selection(
|
||||
request: SupplierSelectionRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Evaluate supplier options using multi-criteria decision analysis
|
||||
"""
|
||||
try:
|
||||
logger.info("Evaluating supplier selection", tenant_id=tenant_id,
|
||||
ingredient_id=request.ingredient_id)
|
||||
|
||||
selector = SupplierSelector()
|
||||
|
||||
# Convert supplier options
|
||||
from app.services.supplier_selector import SupplierOption
|
||||
supplier_options = [
|
||||
SupplierOption(**opt) for opt in request.supplier_options
|
||||
]
|
||||
|
||||
result = selector.select_suppliers(
|
||||
ingredient_id=request.ingredient_id,
|
||||
ingredient_name=request.ingredient_name,
|
||||
required_quantity=request.required_quantity,
|
||||
supplier_options=supplier_options
|
||||
)
|
||||
|
||||
return SupplierSelectionResult(**selector.export_result_to_dict(result))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to evaluate supplier selection",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("replenishment-plans/supplier-allocations"),
|
||||
response_model=List[SupplierAllocationResponse]
|
||||
)
|
||||
async def list_supplier_allocations(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
requirement_id: Optional[UUID] = None,
|
||||
supplier_id: Optional[UUID] = None,
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List supplier allocations
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import SupplierAllocationRepository
|
||||
|
||||
repo = SupplierAllocationRepository(db)
|
||||
allocations = await repo.list_allocations(
|
||||
tenant_id=tenant_id,
|
||||
requirement_id=requirement_id,
|
||||
supplier_id=supplier_id,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return allocations
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list supplier allocations",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# MOQ Aggregation Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/moq-aggregation/aggregate"),
|
||||
response_model=MOQAggregationResponse
|
||||
)
|
||||
async def aggregate_for_moq(
|
||||
request: MOQAggregationRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Aggregate requirements to meet Minimum Order Quantities
|
||||
"""
|
||||
try:
|
||||
logger.info("Aggregating requirements for MOQ", tenant_id=tenant_id)
|
||||
|
||||
aggregator = MOQAggregator()
|
||||
|
||||
# Convert requirements and constraints
|
||||
from app.services.moq_aggregator import (
|
||||
ProcurementRequirement as MOQReq,
|
||||
SupplierConstraints
|
||||
)
|
||||
|
||||
requirements = [MOQReq(**req) for req in request.requirements]
|
||||
constraints = {
|
||||
k: SupplierConstraints(**v)
|
||||
for k, v in request.supplier_constraints.items()
|
||||
}
|
||||
|
||||
# Aggregate
|
||||
aggregated_orders = aggregator.aggregate_requirements(
|
||||
requirements=requirements,
|
||||
supplier_constraints=constraints
|
||||
)
|
||||
|
||||
# Calculate efficiency
|
||||
efficiency = aggregator.calculate_order_efficiency(aggregated_orders)
|
||||
|
||||
return MOQAggregationResponse(
|
||||
aggregated_orders=[aggregator.export_to_dict(order) for order in aggregated_orders],
|
||||
efficiency_metrics=efficiency
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to aggregate for MOQ",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Analytics Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("replenishment-plans"),
|
||||
response_model=ReplenishmentAnalytics
|
||||
)
|
||||
async def get_replenishment_analytics(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get replenishment planning analytics
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import ReplenishmentAnalyticsRepository
|
||||
|
||||
repo = ReplenishmentAnalyticsRepository(db)
|
||||
analytics = await repo.get_analytics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment analytics",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
0
services/procurement/app/core/__init__.py
Normal file
0
services/procurement/app/core/__init__.py
Normal file
142
services/procurement/app/core/config.py
Normal file
142
services/procurement/app/core/config.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/core/config.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Service Configuration
|
||||
"""
|
||||
|
||||
import os
|
||||
from decimal import Decimal
|
||||
from pydantic import Field
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class ProcurementSettings(BaseServiceSettings):
|
||||
"""Procurement service specific settings"""
|
||||
|
||||
# Service Identity
|
||||
APP_NAME: str = "Procurement Service"
|
||||
SERVICE_NAME: str = "procurement-service"
|
||||
VERSION: str = "1.0.0"
|
||||
DESCRIPTION: str = "Procurement planning, purchase order management, and supplier integration"
|
||||
|
||||
# Database configuration (secure approach - build from components)
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("PROCUREMENT_DATABASE_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("PROCUREMENT_DB_USER", "procurement_user")
|
||||
password = os.getenv("PROCUREMENT_DB_PASSWORD", "procurement_pass123")
|
||||
host = os.getenv("PROCUREMENT_DB_HOST", "localhost")
|
||||
port = os.getenv("PROCUREMENT_DB_PORT", "5432")
|
||||
name = os.getenv("PROCUREMENT_DB_NAME", "procurement_db")
|
||||
|
||||
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
||||
|
||||
# Procurement Planning
|
||||
PROCUREMENT_PLANNING_ENABLED: bool = os.getenv("PROCUREMENT_PLANNING_ENABLED", "true").lower() == "true"
|
||||
PROCUREMENT_LEAD_TIME_DAYS: int = int(os.getenv("PROCUREMENT_LEAD_TIME_DAYS", "3"))
|
||||
DEMAND_FORECAST_DAYS: int = int(os.getenv("DEMAND_FORECAST_DAYS", "14"))
|
||||
SAFETY_STOCK_PERCENTAGE: float = float(os.getenv("SAFETY_STOCK_PERCENTAGE", "20.0"))
|
||||
|
||||
# Purchase Order Settings
|
||||
AUTO_APPROVE_POS: bool = os.getenv("AUTO_APPROVE_POS", "false").lower() == "true"
|
||||
AUTO_APPROVAL_MAX_AMOUNT: float = float(os.getenv("AUTO_APPROVAL_MAX_AMOUNT", "1000.0"))
|
||||
MAX_PO_ITEMS: int = int(os.getenv("MAX_PO_ITEMS", "100"))
|
||||
PO_EXPIRY_DAYS: int = int(os.getenv("PO_EXPIRY_DAYS", "30"))
|
||||
|
||||
# Local Production Settings
|
||||
SUPPORT_LOCAL_PRODUCTION: bool = os.getenv("SUPPORT_LOCAL_PRODUCTION", "true").lower() == "true"
|
||||
MAX_BOM_EXPLOSION_DEPTH: int = int(os.getenv("MAX_BOM_EXPLOSION_DEPTH", "5"))
|
||||
RECIPE_CACHE_TTL_SECONDS: int = int(os.getenv("RECIPE_CACHE_TTL_SECONDS", "3600"))
|
||||
|
||||
# Supplier Integration
|
||||
SUPPLIER_VALIDATION_ENABLED: bool = os.getenv("SUPPLIER_VALIDATION_ENABLED", "true").lower() == "true"
|
||||
MIN_SUPPLIER_RATING: float = float(os.getenv("MIN_SUPPLIER_RATING", "3.0"))
|
||||
MULTI_SUPPLIER_ENABLED: bool = os.getenv("MULTI_SUPPLIER_ENABLED", "true").lower() == "true"
|
||||
|
||||
# Plan Management
|
||||
STALE_PLAN_DAYS: int = int(os.getenv("STALE_PLAN_DAYS", "7"))
|
||||
ARCHIVE_PLAN_DAYS: int = int(os.getenv("ARCHIVE_PLAN_DAYS", "90"))
|
||||
MAX_CONCURRENT_PLANS: int = int(os.getenv("MAX_CONCURRENT_PLANS", "10"))
|
||||
|
||||
# Integration Settings
|
||||
INVENTORY_SERVICE_URL: str = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
|
||||
SUPPLIERS_SERVICE_URL: str = os.getenv("SUPPLIERS_SERVICE_URL", "http://suppliers-service:8000")
|
||||
|
||||
# ================================================================
|
||||
# REPLENISHMENT PLANNING SETTINGS
|
||||
# ================================================================
|
||||
|
||||
# Projection Settings
|
||||
REPLENISHMENT_PROJECTION_HORIZON_DAYS: int = Field(
|
||||
default=7,
|
||||
description="Days to project ahead for inventory planning"
|
||||
)
|
||||
REPLENISHMENT_SERVICE_LEVEL: float = Field(
|
||||
default=0.95,
|
||||
description="Target service level for safety stock (0-1)"
|
||||
)
|
||||
REPLENISHMENT_BUFFER_DAYS: int = Field(
|
||||
default=1,
|
||||
description="Buffer days to add to lead time"
|
||||
)
|
||||
|
||||
# Safety Stock Settings
|
||||
SAFETY_STOCK_SERVICE_LEVEL: float = Field(
|
||||
default=0.95,
|
||||
description="Default service level for safety stock calculation"
|
||||
)
|
||||
SAFETY_STOCK_METHOD: str = Field(
|
||||
default="statistical",
|
||||
description="Method for safety stock: 'statistical' or 'fixed_percentage'"
|
||||
)
|
||||
|
||||
# MOQ Aggregation Settings
|
||||
MOQ_CONSOLIDATION_WINDOW_DAYS: int = Field(
|
||||
default=7,
|
||||
description="Days within which to consolidate orders for MOQ"
|
||||
)
|
||||
MOQ_ALLOW_EARLY_ORDERING: bool = Field(
|
||||
default=True,
|
||||
description="Allow ordering early to meet MOQ"
|
||||
)
|
||||
|
||||
# Supplier Selection Settings
|
||||
SUPPLIER_PRICE_WEIGHT: float = Field(
|
||||
default=0.40,
|
||||
description="Weight for price in supplier selection (0-1)"
|
||||
)
|
||||
SUPPLIER_LEAD_TIME_WEIGHT: float = Field(
|
||||
default=0.20,
|
||||
description="Weight for lead time in supplier selection (0-1)"
|
||||
)
|
||||
SUPPLIER_QUALITY_WEIGHT: float = Field(
|
||||
default=0.20,
|
||||
description="Weight for quality in supplier selection (0-1)"
|
||||
)
|
||||
SUPPLIER_RELIABILITY_WEIGHT: float = Field(
|
||||
default=0.20,
|
||||
description="Weight for reliability in supplier selection (0-1)"
|
||||
)
|
||||
SUPPLIER_DIVERSIFICATION_THRESHOLD: Decimal = Field(
|
||||
default=Decimal('1000'),
|
||||
description="Quantity threshold for supplier diversification"
|
||||
)
|
||||
SUPPLIER_MAX_SINGLE_PERCENTAGE: float = Field(
|
||||
default=0.70,
|
||||
description="Maximum % of order to single supplier (0-1)"
|
||||
)
|
||||
FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000")
|
||||
RECIPES_SERVICE_URL: str = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8000")
|
||||
NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000")
|
||||
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = ProcurementSettings()
|
||||
47
services/procurement/app/core/database.py
Normal file
47
services/procurement/app/core/database.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/core/database.py
|
||||
# ================================================================
|
||||
"""
|
||||
Database connection and session management for Procurement Service
|
||||
"""
|
||||
|
||||
from shared.database.base import DatabaseManager
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
from .config import settings
|
||||
|
||||
# Initialize database manager
|
||||
database_manager = DatabaseManager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
echo=settings.DEBUG
|
||||
)
|
||||
|
||||
# Create async session factory
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
database_manager.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_db() -> AsyncSession:
|
||||
"""
|
||||
Dependency to get database session.
|
||||
Used in FastAPI endpoints via Depends(get_db).
|
||||
"""
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database (create tables if needed)"""
|
||||
await database_manager.create_all()
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close database connections"""
|
||||
await database_manager.close()
|
||||
47
services/procurement/app/core/dependencies.py
Normal file
47
services/procurement/app/core/dependencies.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
FastAPI Dependencies for Procurement Service
|
||||
Uses shared authentication infrastructure with UUID validation
|
||||
"""
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from uuid import UUID
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from .database import get_db
|
||||
from shared.auth.decorators import get_current_tenant_id_dep
|
||||
|
||||
|
||||
async def get_current_tenant_id(
|
||||
tenant_id: Optional[str] = Depends(get_current_tenant_id_dep)
|
||||
) -> UUID:
|
||||
"""
|
||||
Extract and validate tenant ID from request using shared infrastructure.
|
||||
Adds UUID validation to ensure tenant ID format is correct.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID from shared dependency
|
||||
|
||||
Returns:
|
||||
UUID: Validated tenant ID
|
||||
|
||||
Raises:
|
||||
HTTPException: If tenant ID is missing or invalid UUID format
|
||||
"""
|
||||
if not tenant_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="x-tenant-id header is required"
|
||||
)
|
||||
|
||||
try:
|
||||
return UUID(tenant_id)
|
||||
except (ValueError, AttributeError):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid tenant ID format: {tenant_id}"
|
||||
)
|
||||
|
||||
|
||||
# Re-export get_db for convenience
|
||||
__all__ = ["get_db", "get_current_tenant_id"]
|
||||
6
services/procurement/app/jobs/__init__.py
Normal file
6
services/procurement/app/jobs/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
Procurement background jobs and schedulers
|
||||
"""
|
||||
from .overdue_po_scheduler import OverduePOScheduler
|
||||
|
||||
__all__ = ["OverduePOScheduler"]
|
||||
216
services/procurement/app/jobs/overdue_po_scheduler.py
Normal file
216
services/procurement/app/jobs/overdue_po_scheduler.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""
|
||||
Overdue PO Scheduler
|
||||
|
||||
Background scheduler that periodically checks for overdue purchase orders
|
||||
and publishes alerts for them.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Optional
|
||||
from datetime import datetime, timezone
|
||||
import structlog
|
||||
|
||||
from app.services.overdue_po_detector import OverduePODetector
|
||||
from shared.messaging import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class OverduePOScheduler:
|
||||
"""
|
||||
Overdue PO Scheduler
|
||||
|
||||
Background task that periodically checks for overdue POs
|
||||
and publishes alerts.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
rabbitmq_client: Optional[RabbitMQClient] = None,
|
||||
check_interval_seconds: int = 3600, # 1 hour default
|
||||
):
|
||||
"""
|
||||
Initialize overdue PO scheduler.
|
||||
|
||||
Args:
|
||||
rabbitmq_client: RabbitMQ client for publishing events
|
||||
check_interval_seconds: Seconds between checks (default: 3600 = 1 hour)
|
||||
"""
|
||||
self.detector = OverduePODetector()
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.check_interval_seconds = check_interval_seconds
|
||||
|
||||
self._task: Optional[asyncio.Task] = None
|
||||
self._running = False
|
||||
|
||||
logger.info(
|
||||
"Overdue PO Scheduler initialized",
|
||||
check_interval_seconds=check_interval_seconds
|
||||
)
|
||||
|
||||
async def start(self):
|
||||
"""Start the scheduler background task"""
|
||||
if self._running:
|
||||
logger.warning("Overdue PO Scheduler already running")
|
||||
return
|
||||
|
||||
self._running = True
|
||||
self._task = asyncio.create_task(self._run_scheduler())
|
||||
|
||||
logger.info("Overdue PO Scheduler started")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the scheduler background task"""
|
||||
if not self._running:
|
||||
return
|
||||
|
||||
self._running = False
|
||||
|
||||
if self._task:
|
||||
self._task.cancel()
|
||||
try:
|
||||
await self._task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Overdue PO Scheduler stopped")
|
||||
|
||||
async def _run_scheduler(self):
|
||||
"""Main scheduler loop"""
|
||||
logger.info("Overdue PO Scheduler loop started")
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
await self._process_cycle()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Overdue PO scheduler cycle failed",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Wait for next cycle
|
||||
try:
|
||||
await asyncio.sleep(self.check_interval_seconds)
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
|
||||
logger.info("Overdue PO Scheduler loop ended")
|
||||
|
||||
async def _process_cycle(self):
|
||||
"""Process one scheduler cycle - detect and alert on overdue POs"""
|
||||
logger.info("Starting overdue PO detection cycle")
|
||||
|
||||
try:
|
||||
# Detect all overdue POs across all tenants
|
||||
overdue_pos = await self.detector.detect_overdue_pos()
|
||||
|
||||
if not overdue_pos:
|
||||
logger.info("No overdue POs detected in this cycle")
|
||||
return
|
||||
|
||||
# Group by severity
|
||||
by_severity = {
|
||||
'critical': [],
|
||||
'high': [],
|
||||
'medium': [],
|
||||
'low': []
|
||||
}
|
||||
|
||||
for po in overdue_pos:
|
||||
severity = po.get('severity', 'medium')
|
||||
by_severity[severity].append(po)
|
||||
|
||||
# Log summary
|
||||
logger.warning(
|
||||
"Overdue POs detected",
|
||||
total=len(overdue_pos),
|
||||
critical=len(by_severity['critical']),
|
||||
high=len(by_severity['high']),
|
||||
medium=len(by_severity['medium']),
|
||||
low=len(by_severity['low'])
|
||||
)
|
||||
|
||||
# Publish events for critical and high severity
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
critical_and_high = by_severity['critical'] + by_severity['high']
|
||||
|
||||
for po in critical_and_high:
|
||||
await self._publish_overdue_alert(po)
|
||||
|
||||
logger.info(
|
||||
"Published overdue alerts",
|
||||
count=len(critical_and_high)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"RabbitMQ not available, skipping alert publishing",
|
||||
overdue_count=len(by_severity['critical'] + by_severity['high'])
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in overdue PO detection cycle",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
async def _publish_overdue_alert(self, po_summary: dict):
|
||||
"""
|
||||
Publish an overdue PO alert event.
|
||||
|
||||
Args:
|
||||
po_summary: Overdue PO summary from detector
|
||||
"""
|
||||
try:
|
||||
event_data = {
|
||||
'po_id': po_summary['po_id'],
|
||||
'tenant_id': po_summary['tenant_id'],
|
||||
'po_number': po_summary['po_number'],
|
||||
'supplier_id': po_summary['supplier_id'],
|
||||
'status': po_summary['status'],
|
||||
'total_amount': po_summary['total_amount'],
|
||||
'currency': po_summary['currency'],
|
||||
'estimated_delivery_date': po_summary['estimated_delivery_date'],
|
||||
'days_overdue': po_summary['days_overdue'],
|
||||
'severity': po_summary['severity'],
|
||||
'priority': po_summary['priority'],
|
||||
'detected_at': datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Create event data structure
|
||||
event_data_full = {
|
||||
'service_name': 'procurement',
|
||||
'event_type': 'po.overdue_detected',
|
||||
'timestamp': datetime.now(timezone.utc).isoformat(),
|
||||
**event_data # Include the original event_data
|
||||
}
|
||||
|
||||
# Publish to RabbitMQ
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name='procurement.events',
|
||||
routing_key='po.overdue',
|
||||
event_data=event_data_full,
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published overdue alert",
|
||||
po_number=po_summary['po_number'],
|
||||
days_overdue=po_summary['days_overdue'],
|
||||
severity=po_summary['severity']
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to publish overdue alert",
|
||||
po_number=po_summary['po_number']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error publishing overdue alert",
|
||||
error=str(e),
|
||||
po_number=po_summary.get('po_number'),
|
||||
exc_info=True
|
||||
)
|
||||
233
services/procurement/app/main.py
Normal file
233
services/procurement/app/main.py
Normal file
@@ -0,0 +1,233 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/main.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Service - FastAPI Application
|
||||
Procurement planning, purchase order management, and supplier integration
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
from app.jobs.overdue_po_scheduler import OverduePOScheduler
|
||||
|
||||
|
||||
class ProcurementService(StandardFastAPIService):
|
||||
"""Procurement Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "001_unified_initial_schema"
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
def __init__(self):
|
||||
# Define expected database tables for health checks
|
||||
procurement_expected_tables = [
|
||||
'procurement_plans',
|
||||
'procurement_requirements',
|
||||
'purchase_orders',
|
||||
'purchase_order_items',
|
||||
'deliveries',
|
||||
'delivery_items',
|
||||
'supplier_invoices',
|
||||
'replenishment_plans',
|
||||
'replenishment_plan_items',
|
||||
'inventory_projections',
|
||||
'supplier_allocations',
|
||||
'supplier_selection_history'
|
||||
]
|
||||
|
||||
# Initialize scheduler, delivery tracking, and rabbitmq client
|
||||
self.overdue_po_scheduler = None
|
||||
self.delivery_tracking_service = None
|
||||
self.rabbitmq_client = None
|
||||
self.event_publisher = None
|
||||
|
||||
super().__init__(
|
||||
service_name="procurement-service",
|
||||
app_name=settings.APP_NAME,
|
||||
description=settings.DESCRIPTION,
|
||||
version=settings.VERSION,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=procurement_expected_tables,
|
||||
enable_messaging=True # Enable RabbitMQ for event publishing
|
||||
)
|
||||
|
||||
async def _setup_messaging(self):
|
||||
"""Setup messaging for procurement service"""
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
|
||||
try:
|
||||
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="procurement-service")
|
||||
await self.rabbitmq_client.connect()
|
||||
# Create unified event publisher
|
||||
self.event_publisher = UnifiedEventPublisher(self.rabbitmq_client, "procurement-service")
|
||||
self.logger.info("Procurement service messaging setup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to setup procurement messaging", error=str(e))
|
||||
raise
|
||||
|
||||
async def _cleanup_messaging(self):
|
||||
"""Cleanup messaging for procurement service"""
|
||||
try:
|
||||
if self.rabbitmq_client:
|
||||
await self.rabbitmq_client.disconnect()
|
||||
self.logger.info("Procurement service messaging cleanup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Error during procurement messaging cleanup", error=str(e))
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for procurement service"""
|
||||
await super().on_startup(app)
|
||||
|
||||
self.logger.info("Procurement Service starting up...")
|
||||
|
||||
# Start delivery tracking service (APScheduler with leader election)
|
||||
from app.services.delivery_tracking_service import DeliveryTrackingService
|
||||
self.delivery_tracking_service = DeliveryTrackingService(
|
||||
event_publisher=self.event_publisher,
|
||||
config=settings,
|
||||
database_manager=self.database_manager
|
||||
)
|
||||
await self.delivery_tracking_service.start()
|
||||
self.logger.info("Delivery tracking service started")
|
||||
|
||||
# Initialize Redis for caching (optional - service can run without Redis)
|
||||
from shared.redis_utils import initialize_redis, get_redis_client
|
||||
try:
|
||||
redis_url = settings.REDIS_URL # Use configured Redis URL with TLS and auth
|
||||
await initialize_redis(redis_url, db=settings.REDIS_DB, max_connections=settings.REDIS_MAX_CONNECTIONS)
|
||||
redis_client = await get_redis_client()
|
||||
self.logger.info("Redis initialized successfully for procurement service",
|
||||
redis_url=redis_url.split("@")[-1], db=settings.REDIS_DB)
|
||||
except Exception as e:
|
||||
self.logger.warning("Failed to initialize Redis for caching, service will continue without caching",
|
||||
error=str(e), redis_url=redis_url.split("@")[-1] if 'redis_url' in locals() else "unknown")
|
||||
redis_client = None
|
||||
|
||||
# Store in app state for internal API access
|
||||
app.state.delivery_tracking_service = self.delivery_tracking_service
|
||||
app.state.event_publisher = self.event_publisher
|
||||
app.state.redis_client = redis_client
|
||||
|
||||
# Start overdue PO scheduler
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
self.overdue_po_scheduler = OverduePOScheduler(
|
||||
rabbitmq_client=self.rabbitmq_client,
|
||||
check_interval_seconds=3600 # Check every hour
|
||||
)
|
||||
await self.overdue_po_scheduler.start()
|
||||
self.logger.info("Overdue PO scheduler started")
|
||||
else:
|
||||
self.logger.warning("RabbitMQ not available, overdue PO scheduler not started")
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for procurement service"""
|
||||
self.logger.info("Procurement Service shutting down...")
|
||||
|
||||
# Close Redis connections (if initialized)
|
||||
try:
|
||||
from shared.redis_utils import close_redis
|
||||
await close_redis()
|
||||
self.logger.info("Redis connections closed")
|
||||
except Exception as e:
|
||||
self.logger.debug("Redis cleanup failed or Redis was not initialized", error=str(e))
|
||||
|
||||
# Stop delivery tracking service
|
||||
if self.delivery_tracking_service:
|
||||
await self.delivery_tracking_service.stop()
|
||||
self.logger.info("Delivery tracking service stopped")
|
||||
|
||||
# Stop overdue PO scheduler
|
||||
if self.overdue_po_scheduler:
|
||||
await self.overdue_po_scheduler.stop()
|
||||
self.logger.info("Overdue PO scheduler stopped")
|
||||
|
||||
await super().on_shutdown(app)
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return procurement-specific features"""
|
||||
return [
|
||||
"procurement_planning",
|
||||
"purchase_order_management",
|
||||
"delivery_tracking",
|
||||
"invoice_management",
|
||||
"supplier_integration",
|
||||
"local_production_support",
|
||||
"recipe_explosion"
|
||||
]
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = ProcurementService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app()
|
||||
|
||||
# Setup standard endpoints (health, readiness, metrics)
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Include routers
|
||||
from app.api.procurement_plans import router as procurement_plans_router
|
||||
from app.api.purchase_orders import router as purchase_orders_router
|
||||
from app.api import internal_transfer # Internal Transfer Routes
|
||||
from app.api import replenishment # Enhanced Replenishment Planning Routes
|
||||
from app.api import analytics # Procurement Analytics Routes
|
||||
from app.api import internal_delivery # Internal Delivery Tracking Routes
|
||||
from app.api import ml_insights # ML insights endpoint
|
||||
from app.api import internal_demo # Internal demo data cloning
|
||||
from app.api.expected_deliveries import router as expected_deliveries_router # Expected Deliveries Routes
|
||||
from app.api.internal_delivery_tracking import router as internal_delivery_tracking_router # NEW: Internal trigger endpoint
|
||||
|
||||
service.add_router(procurement_plans_router)
|
||||
service.add_router(purchase_orders_router)
|
||||
service.add_router(internal_transfer.router, tags=["internal-transfer"]) # Internal transfer routes
|
||||
service.add_router(replenishment.router, tags=["replenishment"]) # RouteBuilder already includes full path
|
||||
service.add_router(analytics.router, tags=["analytics"]) # RouteBuilder already includes full path
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal demo data cloning
|
||||
service.add_router(internal_delivery.router, tags=["internal-delivery"]) # Internal delivery tracking
|
||||
service.add_router(internal_delivery_tracking_router, tags=["internal-delivery-tracking"]) # NEW: Delivery alert trigger
|
||||
service.add_router(ml_insights.router) # ML insights endpoint
|
||||
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint
|
||||
service.add_router(expected_deliveries_router, tags=["expected-deliveries"]) # Expected deliveries endpoint
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def logging_middleware(request: Request, call_next):
|
||||
"""Add request logging middleware"""
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
response = await call_next(request)
|
||||
process_time = time.time() - start_time
|
||||
|
||||
service.logger.info("HTTP request processed",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
status_code=response.status_code,
|
||||
process_time=round(process_time, 4))
|
||||
|
||||
return response
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=settings.DEBUG
|
||||
)
|
||||
851
services/procurement/app/ml/price_forecaster.py
Normal file
851
services/procurement/app/ml/price_forecaster.py
Normal file
@@ -0,0 +1,851 @@
|
||||
"""
|
||||
Price Forecaster
|
||||
Predicts supplier price changes for opportunistic buying recommendations
|
||||
Identifies optimal timing for bulk purchases and price negotiation opportunities
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import structlog
|
||||
from datetime import datetime, timedelta
|
||||
from scipy import stats
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.ensemble import RandomForestRegressor
|
||||
import warnings
|
||||
warnings.filterwarnings('ignore')
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class PriceForecaster:
|
||||
"""
|
||||
Forecasts ingredient and product prices for opportunistic procurement.
|
||||
|
||||
Capabilities:
|
||||
1. Short-term price forecasting (1-4 weeks)
|
||||
2. Seasonal price pattern detection
|
||||
3. Price trend analysis
|
||||
4. Buy/wait recommendations
|
||||
5. Bulk purchase opportunity identification
|
||||
6. Price volatility assessment
|
||||
7. Supplier comparison for price optimization
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.price_models = {}
|
||||
self.seasonal_patterns = {}
|
||||
self.volatility_scores = {}
|
||||
|
||||
async def forecast_price(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
price_history: pd.DataFrame,
|
||||
forecast_horizon_days: int = 30,
|
||||
min_history_days: int = 180
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Forecast future prices and generate procurement recommendations.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
ingredient_id: Ingredient/product identifier
|
||||
price_history: Historical price data with columns:
|
||||
- date
|
||||
- price_per_unit
|
||||
- quantity_purchased (optional)
|
||||
- supplier_id (optional)
|
||||
forecast_horizon_days: Days to forecast ahead (default 30)
|
||||
min_history_days: Minimum days of history required (default 180)
|
||||
|
||||
Returns:
|
||||
Dictionary with price forecast and insights
|
||||
"""
|
||||
logger.info(
|
||||
"Forecasting prices",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
history_days=len(price_history),
|
||||
forecast_days=forecast_horizon_days
|
||||
)
|
||||
|
||||
# Validate input
|
||||
if len(price_history) < min_history_days:
|
||||
logger.warning(
|
||||
"Insufficient price history",
|
||||
ingredient_id=ingredient_id,
|
||||
days=len(price_history),
|
||||
required=min_history_days
|
||||
)
|
||||
return self._insufficient_data_response(
|
||||
tenant_id, ingredient_id, price_history
|
||||
)
|
||||
|
||||
# Prepare data
|
||||
price_history = price_history.copy()
|
||||
price_history['date'] = pd.to_datetime(price_history['date'])
|
||||
price_history = price_history.sort_values('date')
|
||||
|
||||
# Calculate price statistics
|
||||
price_stats = self._calculate_price_statistics(price_history)
|
||||
|
||||
# Detect seasonal patterns
|
||||
seasonal_analysis = self._detect_seasonal_patterns(price_history)
|
||||
|
||||
# Detect trends
|
||||
trend_analysis = self._analyze_price_trends(price_history)
|
||||
|
||||
# Forecast future prices
|
||||
forecast = self._generate_price_forecast(
|
||||
price_history,
|
||||
forecast_horizon_days,
|
||||
seasonal_analysis,
|
||||
trend_analysis
|
||||
)
|
||||
|
||||
# Calculate volatility
|
||||
volatility = self._calculate_price_volatility(price_history)
|
||||
|
||||
# Generate buy/wait recommendations
|
||||
recommendations = self._generate_procurement_recommendations(
|
||||
price_history,
|
||||
forecast,
|
||||
price_stats,
|
||||
volatility,
|
||||
trend_analysis
|
||||
)
|
||||
|
||||
# Identify bulk purchase opportunities
|
||||
bulk_opportunities = self._identify_bulk_opportunities(
|
||||
forecast,
|
||||
price_stats,
|
||||
volatility
|
||||
)
|
||||
|
||||
# Generate insights
|
||||
insights = self._generate_price_insights(
|
||||
tenant_id,
|
||||
ingredient_id,
|
||||
price_stats,
|
||||
forecast,
|
||||
recommendations,
|
||||
bulk_opportunities,
|
||||
trend_analysis,
|
||||
volatility
|
||||
)
|
||||
|
||||
# Store models
|
||||
self.seasonal_patterns[ingredient_id] = seasonal_analysis
|
||||
self.volatility_scores[ingredient_id] = volatility
|
||||
|
||||
logger.info(
|
||||
"Price forecasting complete",
|
||||
ingredient_id=ingredient_id,
|
||||
avg_forecast_price=forecast['mean_forecast_price'],
|
||||
recommendation=recommendations['action'],
|
||||
insights_generated=len(insights)
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
'forecasted_at': datetime.utcnow().isoformat(),
|
||||
'history_days': len(price_history),
|
||||
'forecast_horizon_days': forecast_horizon_days,
|
||||
'price_stats': price_stats,
|
||||
'seasonal_analysis': seasonal_analysis,
|
||||
'trend_analysis': trend_analysis,
|
||||
'forecast': forecast,
|
||||
'volatility': volatility,
|
||||
'recommendations': recommendations,
|
||||
'bulk_opportunities': bulk_opportunities,
|
||||
'insights': insights
|
||||
}
|
||||
|
||||
def _calculate_price_statistics(
|
||||
self,
|
||||
price_history: pd.DataFrame
|
||||
) -> Dict[str, float]:
|
||||
"""
|
||||
Calculate comprehensive price statistics.
|
||||
|
||||
Args:
|
||||
price_history: Historical price data
|
||||
|
||||
Returns:
|
||||
Dictionary of price statistics
|
||||
"""
|
||||
prices = price_history['price_per_unit'].values
|
||||
|
||||
# Basic statistics
|
||||
current_price = float(prices[-1])
|
||||
mean_price = float(prices.mean())
|
||||
std_price = float(prices.std())
|
||||
cv_price = (std_price / mean_price) if mean_price > 0 else 0
|
||||
|
||||
# Price range
|
||||
min_price = float(prices.min())
|
||||
max_price = float(prices.max())
|
||||
price_range_pct = ((max_price - min_price) / mean_price * 100) if mean_price > 0 else 0
|
||||
|
||||
# Recent vs historical
|
||||
if len(prices) >= 60:
|
||||
recent_30d_mean = float(prices[-30:].mean())
|
||||
historical_mean = float(prices[:-30].mean())
|
||||
price_change_pct = ((recent_30d_mean - historical_mean) / historical_mean * 100) if historical_mean > 0 else 0
|
||||
else:
|
||||
recent_30d_mean = current_price
|
||||
price_change_pct = 0
|
||||
|
||||
# Price momentum (last 7 days vs previous 7 days)
|
||||
if len(prices) >= 14:
|
||||
last_week = prices[-7:].mean()
|
||||
prev_week = prices[-14:-7].mean()
|
||||
momentum = ((last_week - prev_week) / prev_week * 100) if prev_week > 0 else 0
|
||||
else:
|
||||
momentum = 0
|
||||
|
||||
return {
|
||||
'current_price': current_price,
|
||||
'mean_price': mean_price,
|
||||
'std_price': std_price,
|
||||
'cv_price': cv_price,
|
||||
'min_price': min_price,
|
||||
'max_price': max_price,
|
||||
'price_range_pct': price_range_pct,
|
||||
'recent_30d_mean': recent_30d_mean,
|
||||
'price_change_30d_pct': price_change_pct,
|
||||
'momentum_7d_pct': momentum,
|
||||
'data_points': len(prices)
|
||||
}
|
||||
|
||||
def _detect_seasonal_patterns(
|
||||
self,
|
||||
price_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Detect seasonal price patterns.
|
||||
|
||||
Args:
|
||||
price_history: Historical price data
|
||||
|
||||
Returns:
|
||||
Seasonal pattern analysis
|
||||
"""
|
||||
# Extract month from date
|
||||
price_history = price_history.copy()
|
||||
price_history['month'] = price_history['date'].dt.month
|
||||
|
||||
# Calculate average price per month
|
||||
monthly_avg = price_history.groupby('month')['price_per_unit'].agg(['mean', 'std', 'count'])
|
||||
|
||||
overall_mean = price_history['price_per_unit'].mean()
|
||||
|
||||
seasonal_patterns = {}
|
||||
has_seasonality = False
|
||||
|
||||
month_names = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
||||
|
||||
for month in range(1, 13):
|
||||
if month in monthly_avg.index and monthly_avg.loc[month, 'count'] >= 3:
|
||||
month_mean = monthly_avg.loc[month, 'mean']
|
||||
deviation_pct = ((month_mean - overall_mean) / overall_mean * 100) if overall_mean > 0 else 0
|
||||
|
||||
seasonal_patterns[month_names[month-1]] = {
|
||||
'month': month,
|
||||
'avg_price': round(float(month_mean), 2),
|
||||
'deviation_pct': round(float(deviation_pct), 2),
|
||||
'sample_size': int(monthly_avg.loc[month, 'count'])
|
||||
}
|
||||
|
||||
# Significant seasonality if >10% deviation
|
||||
if abs(deviation_pct) > 10:
|
||||
has_seasonality = True
|
||||
|
||||
return {
|
||||
'has_seasonality': has_seasonality,
|
||||
'monthly_patterns': seasonal_patterns,
|
||||
'overall_mean_price': round(float(overall_mean), 2)
|
||||
}
|
||||
|
||||
def _analyze_price_trends(
|
||||
self,
|
||||
price_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze price trends using linear regression.
|
||||
|
||||
Args:
|
||||
price_history: Historical price data
|
||||
|
||||
Returns:
|
||||
Trend analysis
|
||||
"""
|
||||
# Create time index (days from start)
|
||||
price_history = price_history.copy()
|
||||
price_history['days_from_start'] = (
|
||||
price_history['date'] - price_history['date'].min()
|
||||
).dt.days
|
||||
|
||||
X = price_history['days_from_start'].values.reshape(-1, 1)
|
||||
y = price_history['price_per_unit'].values
|
||||
|
||||
# Fit linear regression
|
||||
model = LinearRegression()
|
||||
model.fit(X, y)
|
||||
|
||||
# Calculate trend
|
||||
slope = float(model.coef_[0])
|
||||
intercept = float(model.intercept_)
|
||||
r_squared = float(model.score(X, y))
|
||||
|
||||
# Trend direction and magnitude
|
||||
avg_price = y.mean()
|
||||
trend_pct_per_month = (slope * 30 / avg_price * 100) if avg_price > 0 else 0
|
||||
|
||||
# Classify trend
|
||||
if abs(trend_pct_per_month) < 2:
|
||||
trend_direction = 'stable'
|
||||
elif trend_pct_per_month > 2:
|
||||
trend_direction = 'increasing'
|
||||
else:
|
||||
trend_direction = 'decreasing'
|
||||
|
||||
# Recent trend (last 90 days)
|
||||
if len(price_history) >= 90:
|
||||
recent_data = price_history.tail(90).copy()
|
||||
recent_X = recent_data['days_from_start'].values.reshape(-1, 1)
|
||||
recent_y = recent_data['price_per_unit'].values
|
||||
|
||||
recent_model = LinearRegression()
|
||||
recent_model.fit(recent_X, recent_y)
|
||||
|
||||
recent_slope = float(recent_model.coef_[0])
|
||||
recent_trend_pct = (recent_slope * 30 / recent_y.mean() * 100) if recent_y.mean() > 0 else 0
|
||||
else:
|
||||
recent_trend_pct = trend_pct_per_month
|
||||
|
||||
return {
|
||||
'trend_direction': trend_direction,
|
||||
'trend_pct_per_month': round(trend_pct_per_month, 2),
|
||||
'recent_trend_pct_per_month': round(recent_trend_pct, 2),
|
||||
'slope': round(slope, 4),
|
||||
'r_squared': round(r_squared, 3),
|
||||
'is_accelerating': abs(recent_trend_pct) > abs(trend_pct_per_month) * 1.5
|
||||
}
|
||||
|
||||
def _generate_price_forecast(
|
||||
self,
|
||||
price_history: pd.DataFrame,
|
||||
forecast_days: int,
|
||||
seasonal_analysis: Dict[str, Any],
|
||||
trend_analysis: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate price forecast for specified horizon.
|
||||
|
||||
Args:
|
||||
price_history: Historical price data
|
||||
forecast_days: Days to forecast
|
||||
seasonal_analysis: Seasonal patterns
|
||||
trend_analysis: Trend analysis
|
||||
|
||||
Returns:
|
||||
Price forecast
|
||||
"""
|
||||
current_price = price_history['price_per_unit'].iloc[-1]
|
||||
current_date = price_history['date'].iloc[-1]
|
||||
|
||||
# Simple forecast: current price + trend + seasonal adjustment
|
||||
trend_slope = trend_analysis['slope']
|
||||
|
||||
forecast_prices = []
|
||||
forecast_dates = []
|
||||
|
||||
for day in range(1, forecast_days + 1):
|
||||
forecast_date = current_date + timedelta(days=day)
|
||||
forecast_dates.append(forecast_date)
|
||||
|
||||
# Base forecast from trend
|
||||
base_forecast = current_price + (trend_slope * day)
|
||||
|
||||
# Seasonal adjustment
|
||||
if seasonal_analysis['has_seasonality']:
|
||||
month_name = forecast_date.strftime('%b')
|
||||
if month_name in seasonal_analysis['monthly_patterns']:
|
||||
month_deviation = seasonal_analysis['monthly_patterns'][month_name]['deviation_pct']
|
||||
seasonal_adjustment = base_forecast * (month_deviation / 100)
|
||||
base_forecast += seasonal_adjustment
|
||||
|
||||
forecast_prices.append(base_forecast)
|
||||
|
||||
forecast_prices = np.array(forecast_prices)
|
||||
|
||||
# Calculate confidence intervals (±2 std)
|
||||
historical_std = price_history['price_per_unit'].std()
|
||||
lower_bound = forecast_prices - 2 * historical_std
|
||||
upper_bound = forecast_prices + 2 * historical_std
|
||||
|
||||
return {
|
||||
'forecast_dates': [d.strftime('%Y-%m-%d') for d in forecast_dates],
|
||||
'forecast_prices': [round(float(p), 2) for p in forecast_prices],
|
||||
'lower_bound': [round(float(p), 2) for p in lower_bound],
|
||||
'upper_bound': [round(float(p), 2) for p in upper_bound],
|
||||
'mean_forecast_price': round(float(forecast_prices.mean()), 2),
|
||||
'min_forecast_price': round(float(forecast_prices.min()), 2),
|
||||
'max_forecast_price': round(float(forecast_prices.max()), 2),
|
||||
'confidence': self._calculate_forecast_confidence(price_history, trend_analysis)
|
||||
}
|
||||
|
||||
def _calculate_forecast_confidence(
|
||||
self,
|
||||
price_history: pd.DataFrame,
|
||||
trend_analysis: Dict[str, Any]
|
||||
) -> int:
|
||||
"""Calculate confidence in price forecast (0-100)."""
|
||||
confidence = 50 # Base confidence
|
||||
|
||||
# More data = higher confidence
|
||||
data_points = len(price_history)
|
||||
if data_points >= 365:
|
||||
confidence += 30
|
||||
elif data_points >= 180:
|
||||
confidence += 20
|
||||
else:
|
||||
confidence += 10
|
||||
|
||||
# Strong trend = higher confidence
|
||||
r_squared = trend_analysis['r_squared']
|
||||
if r_squared > 0.7:
|
||||
confidence += 20
|
||||
elif r_squared > 0.5:
|
||||
confidence += 10
|
||||
|
||||
# Low volatility = higher confidence
|
||||
cv = price_history['price_per_unit'].std() / price_history['price_per_unit'].mean()
|
||||
if cv < 0.1:
|
||||
confidence += 10
|
||||
elif cv < 0.2:
|
||||
confidence += 5
|
||||
|
||||
return min(100, confidence)
|
||||
|
||||
def _calculate_price_volatility(
|
||||
self,
|
||||
price_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate price volatility metrics.
|
||||
|
||||
Args:
|
||||
price_history: Historical price data
|
||||
|
||||
Returns:
|
||||
Volatility analysis
|
||||
"""
|
||||
prices = price_history['price_per_unit'].values
|
||||
|
||||
# Coefficient of variation
|
||||
cv = float(prices.std() / prices.mean()) if prices.mean() > 0 else 0
|
||||
|
||||
# Price changes (day-to-day)
|
||||
price_changes = np.diff(prices)
|
||||
pct_changes = (price_changes / prices[:-1] * 100)
|
||||
|
||||
# Volatility classification
|
||||
if cv < 0.1:
|
||||
volatility_level = 'low'
|
||||
elif cv < 0.2:
|
||||
volatility_level = 'medium'
|
||||
else:
|
||||
volatility_level = 'high'
|
||||
|
||||
return {
|
||||
'coefficient_of_variation': round(cv, 3),
|
||||
'volatility_level': volatility_level,
|
||||
'avg_daily_change_pct': round(float(np.abs(pct_changes).mean()), 2),
|
||||
'max_daily_increase_pct': round(float(pct_changes.max()), 2),
|
||||
'max_daily_decrease_pct': round(float(pct_changes.min()), 2)
|
||||
}
|
||||
|
||||
def _generate_procurement_recommendations(
|
||||
self,
|
||||
price_history: pd.DataFrame,
|
||||
forecast: Dict[str, Any],
|
||||
price_stats: Dict[str, float],
|
||||
volatility: Dict[str, Any],
|
||||
trend_analysis: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate buy/wait recommendations based on forecast.
|
||||
|
||||
Args:
|
||||
price_history: Historical data
|
||||
forecast: Price forecast
|
||||
price_stats: Price statistics
|
||||
volatility: Volatility analysis
|
||||
trend_analysis: Trend analysis
|
||||
|
||||
Returns:
|
||||
Procurement recommendations
|
||||
"""
|
||||
current_price = price_stats['current_price']
|
||||
forecast_mean = forecast['mean_forecast_price']
|
||||
forecast_min = forecast['min_forecast_price']
|
||||
|
||||
# Calculate expected price change
|
||||
expected_change_pct = ((forecast_mean - current_price) / current_price * 100) if current_price > 0 else 0
|
||||
|
||||
# Decision logic with i18n-friendly reasoning codes
|
||||
if expected_change_pct < -5:
|
||||
# Price expected to drop >5%
|
||||
action = 'wait'
|
||||
reasoning_data = {
|
||||
'type': 'decrease_expected',
|
||||
'parameters': {
|
||||
'change_pct': round(abs(expected_change_pct), 1),
|
||||
'forecast_days': 30,
|
||||
'current_price': round(current_price, 2),
|
||||
'forecast_mean': round(forecast_mean, 2)
|
||||
}
|
||||
}
|
||||
urgency = 'low'
|
||||
|
||||
elif expected_change_pct > 5:
|
||||
# Price expected to increase >5%
|
||||
action = 'buy_now'
|
||||
reasoning_data = {
|
||||
'type': 'increase_expected',
|
||||
'parameters': {
|
||||
'change_pct': round(expected_change_pct, 1),
|
||||
'forecast_days': 30,
|
||||
'current_price': round(current_price, 2),
|
||||
'forecast_mean': round(forecast_mean, 2)
|
||||
}
|
||||
}
|
||||
urgency = 'high'
|
||||
|
||||
elif volatility['volatility_level'] == 'high':
|
||||
# High volatility - wait for dip
|
||||
action = 'wait_for_dip'
|
||||
reasoning_data = {
|
||||
'type': 'high_volatility',
|
||||
'parameters': {
|
||||
'coefficient': round(volatility['coefficient_of_variation'], 2),
|
||||
'volatility_level': volatility['volatility_level'],
|
||||
'avg_daily_change_pct': round(volatility['avg_daily_change_pct'], 2)
|
||||
}
|
||||
}
|
||||
urgency = 'medium'
|
||||
|
||||
elif current_price < price_stats['mean_price'] * 0.95:
|
||||
# Currently below average
|
||||
below_avg_pct = ((price_stats["mean_price"] - current_price) / price_stats["mean_price"] * 100)
|
||||
action = 'buy_now'
|
||||
reasoning_data = {
|
||||
'type': 'below_average',
|
||||
'parameters': {
|
||||
'current_price': round(current_price, 2),
|
||||
'mean_price': round(price_stats['mean_price'], 2),
|
||||
'below_avg_pct': round(below_avg_pct, 1)
|
||||
}
|
||||
}
|
||||
urgency = 'medium'
|
||||
|
||||
else:
|
||||
# Neutral
|
||||
action = 'normal_purchase'
|
||||
reasoning_data = {
|
||||
'type': 'stable',
|
||||
'parameters': {
|
||||
'current_price': round(current_price, 2),
|
||||
'forecast_mean': round(forecast_mean, 2),
|
||||
'expected_change_pct': round(expected_change_pct, 2)
|
||||
}
|
||||
}
|
||||
urgency = 'low'
|
||||
|
||||
# Optimal purchase timing
|
||||
min_price_index = forecast['forecast_prices'].index(forecast_min)
|
||||
optimal_date = forecast['forecast_dates'][min_price_index]
|
||||
|
||||
return {
|
||||
'action': action,
|
||||
'reasoning_data': reasoning_data,
|
||||
'urgency': urgency,
|
||||
'expected_price_change_pct': round(expected_change_pct, 2),
|
||||
'current_price': current_price,
|
||||
'forecast_mean_price': forecast_mean,
|
||||
'forecast_min_price': forecast_min,
|
||||
'optimal_purchase_date': optimal_date,
|
||||
'days_until_optimal': min_price_index + 1
|
||||
}
|
||||
|
||||
def _identify_bulk_opportunities(
|
||||
self,
|
||||
forecast: Dict[str, Any],
|
||||
price_stats: Dict[str, float],
|
||||
volatility: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Identify bulk purchase opportunities.
|
||||
|
||||
Args:
|
||||
forecast: Price forecast
|
||||
price_stats: Price statistics
|
||||
volatility: Volatility analysis
|
||||
|
||||
Returns:
|
||||
Bulk opportunity analysis
|
||||
"""
|
||||
current_price = price_stats['current_price']
|
||||
forecast_max = forecast['max_forecast_price']
|
||||
|
||||
# Potential savings from bulk buy at current price
|
||||
if forecast_max > current_price:
|
||||
potential_savings_pct = ((forecast_max - current_price) / current_price * 100)
|
||||
|
||||
if potential_savings_pct > 10:
|
||||
opportunity_level = 'high'
|
||||
elif potential_savings_pct > 5:
|
||||
opportunity_level = 'medium'
|
||||
else:
|
||||
opportunity_level = 'low'
|
||||
|
||||
has_opportunity = potential_savings_pct > 5
|
||||
|
||||
else:
|
||||
potential_savings_pct = 0
|
||||
opportunity_level = 'none'
|
||||
has_opportunity = False
|
||||
|
||||
return {
|
||||
'has_bulk_opportunity': has_opportunity,
|
||||
'opportunity_level': opportunity_level,
|
||||
'potential_savings_pct': round(potential_savings_pct, 2),
|
||||
'recommended_bulk_quantity_months': 2 if has_opportunity and volatility['volatility_level'] != 'high' else 1
|
||||
}
|
||||
|
||||
def _generate_price_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
price_stats: Dict[str, float],
|
||||
forecast: Dict[str, Any],
|
||||
recommendations: Dict[str, Any],
|
||||
bulk_opportunities: Dict[str, Any],
|
||||
trend_analysis: Dict[str, Any],
|
||||
volatility: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Generate actionable pricing insights.
|
||||
|
||||
Returns:
|
||||
List of insights
|
||||
"""
|
||||
insights = []
|
||||
|
||||
# Insight 1: Buy now recommendation
|
||||
if recommendations['action'] == 'buy_now':
|
||||
insights.append({
|
||||
'type': 'recommendation',
|
||||
'priority': recommendations['urgency'],
|
||||
'category': 'procurement',
|
||||
'title': f'Buy Now: Price Increasing {recommendations["expected_price_change_pct"]:.1f}%',
|
||||
'reasoning_data': recommendations['reasoning_data'],
|
||||
'impact_type': 'cost_avoidance',
|
||||
'impact_value': abs(recommendations['expected_price_change_pct']),
|
||||
'impact_unit': 'percentage',
|
||||
'confidence': forecast['confidence'],
|
||||
'metrics_json': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'current_price': price_stats['current_price'],
|
||||
'forecast_price': forecast['mean_forecast_price'],
|
||||
'expected_change_pct': recommendations['expected_price_change_pct'],
|
||||
'optimal_date': recommendations['optimal_purchase_date']
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Purchase Now',
|
||||
'action': 'create_purchase_order',
|
||||
'params': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'priority': 'high'
|
||||
}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
})
|
||||
|
||||
# Insight 2: Wait recommendation
|
||||
elif recommendations['action'] == 'wait':
|
||||
insights.append({
|
||||
'type': 'recommendation',
|
||||
'priority': 'medium',
|
||||
'category': 'procurement',
|
||||
'title': f'Wait to Buy: Price Decreasing {abs(recommendations["expected_price_change_pct"]):.1f}%',
|
||||
'reasoning_data': {
|
||||
**recommendations['reasoning_data'],
|
||||
'optimal_purchase_date': recommendations['optimal_purchase_date'],
|
||||
'days_until_optimal': recommendations['days_until_optimal']
|
||||
},
|
||||
'impact_type': 'cost_savings',
|
||||
'impact_value': abs(recommendations['expected_price_change_pct']),
|
||||
'impact_unit': 'percentage',
|
||||
'confidence': forecast['confidence'],
|
||||
'metrics_json': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'current_price': price_stats['current_price'],
|
||||
'forecast_min_price': forecast['min_forecast_price'],
|
||||
'optimal_date': recommendations['optimal_purchase_date'],
|
||||
'days_until_optimal': recommendations['days_until_optimal']
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Delay Purchase',
|
||||
'action': 'delay_purchase_order',
|
||||
'params': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'delay_days': recommendations['days_until_optimal']
|
||||
}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
})
|
||||
|
||||
# Insight 3: Bulk opportunity
|
||||
if bulk_opportunities['has_bulk_opportunity']:
|
||||
insights.append({
|
||||
'type': 'optimization',
|
||||
'priority': bulk_opportunities['opportunity_level'],
|
||||
'category': 'procurement',
|
||||
'title': f'Bulk Buy Opportunity: Save {bulk_opportunities["potential_savings_pct"]:.1f}%',
|
||||
'description': f'Current price is favorable. Purchasing {bulk_opportunities["recommended_bulk_quantity_months"]} months supply now could save {bulk_opportunities["potential_savings_pct"]:.1f}% vs future prices.',
|
||||
'impact_type': 'cost_savings',
|
||||
'impact_value': bulk_opportunities['potential_savings_pct'],
|
||||
'impact_unit': 'percentage',
|
||||
'confidence': forecast['confidence'],
|
||||
'metrics_json': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'current_price': price_stats['current_price'],
|
||||
'forecast_max_price': forecast['max_forecast_price'],
|
||||
'savings_pct': bulk_opportunities['potential_savings_pct'],
|
||||
'recommended_months_supply': bulk_opportunities['recommended_bulk_quantity_months']
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Create Bulk Order',
|
||||
'action': 'create_bulk_purchase_order',
|
||||
'params': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'months_supply': bulk_opportunities['recommended_bulk_quantity_months']
|
||||
}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
})
|
||||
|
||||
# Insight 4: High volatility warning
|
||||
if volatility['volatility_level'] == 'high':
|
||||
insights.append({
|
||||
'type': 'alert',
|
||||
'priority': 'medium',
|
||||
'category': 'procurement',
|
||||
'title': f'High Price Volatility: CV={volatility["coefficient_of_variation"]:.2f}',
|
||||
'description': f'Ingredient {ingredient_id} shows high price volatility with {volatility["avg_daily_change_pct"]:.1f}% average daily change. Consider alternative suppliers or hedge strategies.',
|
||||
'impact_type': 'risk_warning',
|
||||
'impact_value': volatility['coefficient_of_variation'],
|
||||
'impact_unit': 'cv_score',
|
||||
'confidence': 90,
|
||||
'metrics_json': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'volatility_level': volatility['volatility_level'],
|
||||
'cv': volatility['coefficient_of_variation'],
|
||||
'avg_daily_change_pct': volatility['avg_daily_change_pct']
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Find Alternative Suppliers',
|
||||
'action': 'search_alternative_suppliers',
|
||||
'params': {'ingredient_id': ingredient_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
})
|
||||
|
||||
# Insight 5: Strong price trend
|
||||
if abs(trend_analysis['trend_pct_per_month']) > 5:
|
||||
direction = 'increasing' if trend_analysis['trend_pct_per_month'] > 0 else 'decreasing'
|
||||
insights.append({
|
||||
'type': 'insight',
|
||||
'priority': 'medium',
|
||||
'category': 'procurement',
|
||||
'title': f'Strong Price Trend: {direction.title()} {abs(trend_analysis["trend_pct_per_month"]):.1f}%/month',
|
||||
'description': f'Ingredient {ingredient_id} prices are {direction} at {abs(trend_analysis["trend_pct_per_month"]):.1f}% per month. Plan procurement strategy accordingly.',
|
||||
'impact_type': 'trend_warning',
|
||||
'impact_value': abs(trend_analysis['trend_pct_per_month']),
|
||||
'impact_unit': 'pct_per_month',
|
||||
'confidence': int(trend_analysis['r_squared'] * 100),
|
||||
'metrics_json': {
|
||||
'ingredient_id': ingredient_id,
|
||||
'trend_direction': trend_analysis['trend_direction'],
|
||||
'trend_pct_per_month': trend_analysis['trend_pct_per_month'],
|
||||
'r_squared': trend_analysis['r_squared']
|
||||
},
|
||||
'actionable': False,
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
})
|
||||
|
||||
return insights
|
||||
|
||||
def _insufficient_data_response(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
price_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""Return response when insufficient data available."""
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
'forecasted_at': datetime.utcnow().isoformat(),
|
||||
'history_days': len(price_history),
|
||||
'forecast_horizon_days': 0,
|
||||
'price_stats': {},
|
||||
'seasonal_analysis': {'has_seasonality': False},
|
||||
'trend_analysis': {},
|
||||
'forecast': {},
|
||||
'volatility': {},
|
||||
'recommendations': {
|
||||
'action': 'insufficient_data',
|
||||
'reasoning_data': {
|
||||
'type': 'insufficient_data',
|
||||
'parameters': {
|
||||
'history_days': len(price_history),
|
||||
'min_required_days': 180
|
||||
}
|
||||
},
|
||||
'urgency': 'low'
|
||||
},
|
||||
'bulk_opportunities': {'has_bulk_opportunity': False},
|
||||
'insights': []
|
||||
}
|
||||
|
||||
def get_seasonal_patterns(self, ingredient_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached seasonal patterns for an ingredient."""
|
||||
return self.seasonal_patterns.get(ingredient_id)
|
||||
|
||||
def get_volatility_score(self, ingredient_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached volatility score for an ingredient."""
|
||||
return self.volatility_scores.get(ingredient_id)
|
||||
449
services/procurement/app/ml/price_insights_orchestrator.py
Normal file
449
services/procurement/app/ml/price_insights_orchestrator.py
Normal file
@@ -0,0 +1,449 @@
|
||||
"""
|
||||
Price Insights Orchestrator
|
||||
Coordinates price forecasting and insight posting
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
from typing import Dict, List, Any, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.price_forecaster import PriceForecaster
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class PriceInsightsOrchestrator:
|
||||
"""
|
||||
Orchestrates price forecasting and insight generation workflow.
|
||||
|
||||
Workflow:
|
||||
1. Forecast prices from historical data
|
||||
2. Generate buy/wait/bulk recommendations
|
||||
3. Post insights to AI Insights Service
|
||||
4. Provide price forecasts for procurement planning
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.forecaster = PriceForecaster()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def forecast_and_post_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
price_history: pd.DataFrame,
|
||||
forecast_horizon_days: int = 30,
|
||||
min_history_days: int = 180
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Complete workflow: Forecast prices and post insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
ingredient_id: Ingredient identifier
|
||||
price_history: Historical price data
|
||||
forecast_horizon_days: Days to forecast ahead
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Workflow results with forecast and posted insights
|
||||
"""
|
||||
logger.info(
|
||||
"Starting price forecasting workflow",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
history_days=len(price_history)
|
||||
)
|
||||
|
||||
# Step 1: Forecast prices
|
||||
forecast_results = await self.forecaster.forecast_price(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
price_history=price_history,
|
||||
forecast_horizon_days=forecast_horizon_days,
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Price forecasting complete",
|
||||
ingredient_id=ingredient_id,
|
||||
recommendation=forecast_results.get('recommendations', {}).get('action'),
|
||||
insights_generated=len(forecast_results.get('insights', []))
|
||||
)
|
||||
|
||||
# Step 2: Enrich insights with tenant_id and ingredient context
|
||||
enriched_insights = self._enrich_insights(
|
||||
forecast_results.get('insights', []),
|
||||
tenant_id,
|
||||
ingredient_id
|
||||
)
|
||||
|
||||
# Step 3: Post insights to AI Insights Service
|
||||
if enriched_insights:
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_insights
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Price insights posted to AI Insights Service",
|
||||
ingredient_id=ingredient_id,
|
||||
total=post_results['total'],
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for ingredient", ingredient_id=ingredient_id)
|
||||
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
ingredient_context = {'ingredient_id': ingredient_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
ingredient_context=ingredient_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
'forecasted_at': forecast_results['forecasted_at'],
|
||||
'history_days': forecast_results['history_days'],
|
||||
'forecast': forecast_results.get('forecast', {}),
|
||||
'recommendation': forecast_results.get('recommendations', {}),
|
||||
'bulk_opportunity': forecast_results.get('bulk_opportunities', {}),
|
||||
'insights_generated': len(enriched_insights),
|
||||
'insights_posted': post_results['successful'],
|
||||
'insights_failed': post_results['failed'],
|
||||
'created_insights': post_results.get('created_insights', [])
|
||||
}
|
||||
|
||||
def _enrich_insights(
|
||||
self,
|
||||
insights: List[Dict[str, Any]],
|
||||
tenant_id: str,
|
||||
ingredient_id: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Enrich insights with required fields for AI Insights Service.
|
||||
|
||||
Args:
|
||||
insights: Raw insights from forecaster
|
||||
tenant_id: Tenant identifier
|
||||
ingredient_id: Ingredient identifier
|
||||
|
||||
Returns:
|
||||
Enriched insights ready for posting
|
||||
"""
|
||||
enriched = []
|
||||
|
||||
for insight in insights:
|
||||
# Add required tenant_id
|
||||
enriched_insight = insight.copy()
|
||||
enriched_insight['tenant_id'] = tenant_id
|
||||
|
||||
# Add ingredient context to metrics
|
||||
if 'metrics_json' not in enriched_insight:
|
||||
enriched_insight['metrics_json'] = {}
|
||||
|
||||
enriched_insight['metrics_json']['ingredient_id'] = ingredient_id
|
||||
|
||||
# Add source metadata
|
||||
enriched_insight['source_service'] = 'procurement'
|
||||
enriched_insight['source_model'] = 'price_forecaster'
|
||||
enriched_insight['detected_at'] = datetime.utcnow().isoformat()
|
||||
|
||||
enriched.append(enriched_insight)
|
||||
|
||||
return enriched
|
||||
|
||||
async def forecast_all_ingredients(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredients_data: Dict[str, pd.DataFrame],
|
||||
forecast_horizon_days: int = 30,
|
||||
min_history_days: int = 180
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Forecast prices for all ingredients for a tenant.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
ingredients_data: Dict of {ingredient_id: price_history DataFrame}
|
||||
forecast_horizon_days: Days to forecast
|
||||
min_history_days: Minimum history required
|
||||
|
||||
Returns:
|
||||
Comprehensive forecasting results
|
||||
"""
|
||||
logger.info(
|
||||
"Forecasting prices for all ingredients",
|
||||
tenant_id=tenant_id,
|
||||
ingredients=len(ingredients_data)
|
||||
)
|
||||
|
||||
all_results = []
|
||||
total_insights_posted = 0
|
||||
buy_now_count = 0
|
||||
wait_count = 0
|
||||
bulk_opportunity_count = 0
|
||||
|
||||
# Forecast each ingredient
|
||||
for ingredient_id, price_history in ingredients_data.items():
|
||||
try:
|
||||
results = await self.forecast_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
price_history=price_history,
|
||||
forecast_horizon_days=forecast_horizon_days,
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
all_results.append(results)
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
# Count recommendations
|
||||
action = results['recommendation'].get('action')
|
||||
if action == 'buy_now':
|
||||
buy_now_count += 1
|
||||
elif action in ['wait', 'wait_for_dip']:
|
||||
wait_count += 1
|
||||
|
||||
if results['bulk_opportunity'].get('has_bulk_opportunity'):
|
||||
bulk_opportunity_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error forecasting ingredient",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Generate summary insight
|
||||
if buy_now_count > 0 or bulk_opportunity_count > 0:
|
||||
summary_insight = self._generate_portfolio_summary_insight(
|
||||
tenant_id, all_results, buy_now_count, wait_count, bulk_opportunity_count
|
||||
)
|
||||
|
||||
if summary_insight:
|
||||
enriched_summary = self._enrich_insights(
|
||||
[summary_insight], tenant_id, 'all_ingredients'
|
||||
)
|
||||
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_summary
|
||||
)
|
||||
|
||||
total_insights_posted += post_results['successful']
|
||||
|
||||
logger.info(
|
||||
"All ingredients forecasting complete",
|
||||
tenant_id=tenant_id,
|
||||
ingredients_forecasted=len(all_results),
|
||||
total_insights_posted=total_insights_posted,
|
||||
buy_now_recommendations=buy_now_count,
|
||||
bulk_opportunities=bulk_opportunity_count
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'forecasted_at': datetime.utcnow().isoformat(),
|
||||
'ingredients_forecasted': len(all_results),
|
||||
'ingredient_results': all_results,
|
||||
'total_insights_posted': total_insights_posted,
|
||||
'buy_now_count': buy_now_count,
|
||||
'wait_count': wait_count,
|
||||
'bulk_opportunity_count': bulk_opportunity_count
|
||||
}
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, ingredient_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
ingredient_context: Additional context about the ingredient
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for price insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'ingredient_id': ingredient_context.get('ingredient_id') if ingredient_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_price_forecast',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published price insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish price insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
def _generate_portfolio_summary_insight(
|
||||
self,
|
||||
tenant_id: str,
|
||||
all_results: List[Dict[str, Any]],
|
||||
buy_now_count: int,
|
||||
wait_count: int,
|
||||
bulk_opportunity_count: int
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate portfolio-level summary insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
all_results: All ingredient forecast results
|
||||
buy_now_count: Number of buy now recommendations
|
||||
wait_count: Number of wait recommendations
|
||||
bulk_opportunity_count: Number of bulk opportunities
|
||||
|
||||
Returns:
|
||||
Summary insight or None
|
||||
"""
|
||||
if buy_now_count == 0 and bulk_opportunity_count == 0:
|
||||
return None
|
||||
|
||||
# Calculate potential savings from bulk opportunities
|
||||
total_potential_savings = 0
|
||||
for result in all_results:
|
||||
bulk_opp = result.get('bulk_opportunity', {})
|
||||
if bulk_opp.get('has_bulk_opportunity'):
|
||||
# Estimate savings (simplified)
|
||||
savings_pct = bulk_opp.get('potential_savings_pct', 0)
|
||||
total_potential_savings += savings_pct
|
||||
|
||||
avg_potential_savings = total_potential_savings / max(1, bulk_opportunity_count)
|
||||
|
||||
description_parts = []
|
||||
if buy_now_count > 0:
|
||||
description_parts.append(f'{buy_now_count} ingredients show price increases - purchase soon')
|
||||
if bulk_opportunity_count > 0:
|
||||
description_parts.append(f'{bulk_opportunity_count} ingredients have bulk buying opportunities (avg {avg_potential_savings:.1f}% savings)')
|
||||
|
||||
return {
|
||||
'type': 'recommendation',
|
||||
'priority': 'high' if buy_now_count > 2 else 'medium',
|
||||
'category': 'procurement',
|
||||
'title': f'Procurement Timing Opportunities: {buy_now_count + bulk_opportunity_count} Items',
|
||||
'description': 'Price forecast analysis identified procurement timing opportunities. ' + '. '.join(description_parts) + '.',
|
||||
'impact_type': 'cost_optimization',
|
||||
'impact_value': avg_potential_savings if bulk_opportunity_count > 0 else buy_now_count,
|
||||
'impact_unit': 'percentage' if bulk_opportunity_count > 0 else 'items',
|
||||
'confidence': 75,
|
||||
'metrics_json': {
|
||||
'ingredients_analyzed': len(all_results),
|
||||
'buy_now_count': buy_now_count,
|
||||
'wait_count': wait_count,
|
||||
'bulk_opportunity_count': bulk_opportunity_count,
|
||||
'avg_potential_savings_pct': round(avg_potential_savings, 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Review Price Forecasts',
|
||||
'action': 'review_price_forecasts',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
},
|
||||
{
|
||||
'label': 'Create Optimized Orders',
|
||||
'action': 'create_optimized_purchase_orders',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
}
|
||||
|
||||
async def get_price_forecast(
|
||||
self,
|
||||
ingredient_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached seasonal patterns for an ingredient.
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient identifier
|
||||
|
||||
Returns:
|
||||
Seasonal patterns or None if not forecasted
|
||||
"""
|
||||
return self.forecaster.get_seasonal_patterns(ingredient_id)
|
||||
|
||||
async def get_volatility_assessment(
|
||||
self,
|
||||
ingredient_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached volatility assessment for an ingredient.
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient identifier
|
||||
|
||||
Returns:
|
||||
Volatility assessment or None if not assessed
|
||||
"""
|
||||
return self.forecaster.get_volatility_score(ingredient_id)
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client connections."""
|
||||
await self.ai_insights_client.close()
|
||||
399
services/procurement/app/ml/supplier_insights_orchestrator.py
Normal file
399
services/procurement/app/ml/supplier_insights_orchestrator.py
Normal file
@@ -0,0 +1,399 @@
|
||||
"""
|
||||
Supplier Insights Orchestrator
|
||||
Coordinates supplier performance analysis and insight posting
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
from typing import Dict, List, Any, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.supplier_performance_predictor import SupplierPerformancePredictor
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SupplierInsightsOrchestrator:
|
||||
"""
|
||||
Orchestrates supplier performance analysis and insight generation workflow.
|
||||
|
||||
Workflow:
|
||||
1. Analyze supplier performance from historical orders
|
||||
2. Generate insights for procurement risk management
|
||||
3. Post insights to AI Insights Service
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide supplier comparison and recommendations
|
||||
6. Track supplier reliability scores
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.predictor = SupplierPerformancePredictor()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def analyze_and_post_supplier_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
supplier_id: str,
|
||||
order_history: pd.DataFrame,
|
||||
min_orders: int = 10
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Complete workflow: Analyze supplier and post insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
supplier_id: Supplier identifier
|
||||
order_history: Historical order data
|
||||
min_orders: Minimum orders for analysis
|
||||
|
||||
Returns:
|
||||
Workflow results with analysis and posted insights
|
||||
"""
|
||||
logger.info(
|
||||
"Starting supplier performance analysis workflow",
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
orders=len(order_history)
|
||||
)
|
||||
|
||||
# Step 1: Analyze supplier performance
|
||||
analysis_results = await self.predictor.analyze_supplier_performance(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
order_history=order_history,
|
||||
min_orders=min_orders
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Supplier analysis complete",
|
||||
supplier_id=supplier_id,
|
||||
reliability_score=analysis_results.get('reliability_score'),
|
||||
insights_generated=len(analysis_results.get('insights', []))
|
||||
)
|
||||
|
||||
# Step 2: Enrich insights with tenant_id and supplier context
|
||||
enriched_insights = self._enrich_insights(
|
||||
analysis_results.get('insights', []),
|
||||
tenant_id,
|
||||
supplier_id
|
||||
)
|
||||
|
||||
# Step 3: Post insights to AI Insights Service
|
||||
if enriched_insights:
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_insights
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Supplier insights posted to AI Insights Service",
|
||||
supplier_id=supplier_id,
|
||||
total=post_results['total'],
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for supplier", supplier_id=supplier_id)
|
||||
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
supplier_context = {'supplier_id': supplier_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
supplier_context=supplier_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_id': supplier_id,
|
||||
'analyzed_at': analysis_results['analyzed_at'],
|
||||
'orders_analyzed': analysis_results['orders_analyzed'],
|
||||
'reliability_score': analysis_results.get('reliability_score'),
|
||||
'risk_assessment': analysis_results.get('risk_assessment', {}),
|
||||
'predictions': analysis_results.get('predictions', {}),
|
||||
'insights_generated': len(enriched_insights),
|
||||
'insights_posted': post_results['successful'],
|
||||
'insights_failed': post_results['failed'],
|
||||
'created_insights': post_results.get('created_insights', [])
|
||||
}
|
||||
|
||||
def _enrich_insights(
|
||||
self,
|
||||
insights: List[Dict[str, Any]],
|
||||
tenant_id: str,
|
||||
supplier_id: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Enrich insights with required fields for AI Insights Service.
|
||||
|
||||
Args:
|
||||
insights: Raw insights from predictor
|
||||
tenant_id: Tenant identifier
|
||||
supplier_id: Supplier identifier
|
||||
|
||||
Returns:
|
||||
Enriched insights ready for posting
|
||||
"""
|
||||
enriched = []
|
||||
|
||||
for insight in insights:
|
||||
# Add required tenant_id
|
||||
enriched_insight = insight.copy()
|
||||
enriched_insight['tenant_id'] = tenant_id
|
||||
|
||||
# Add supplier context to metrics
|
||||
if 'metrics_json' not in enriched_insight:
|
||||
enriched_insight['metrics_json'] = {}
|
||||
|
||||
enriched_insight['metrics_json']['supplier_id'] = supplier_id
|
||||
|
||||
# Add source metadata
|
||||
enriched_insight['source_service'] = 'procurement'
|
||||
enriched_insight['source_model'] = 'supplier_performance_predictor'
|
||||
enriched_insight['detected_at'] = datetime.utcnow().isoformat()
|
||||
|
||||
enriched.append(enriched_insight)
|
||||
|
||||
return enriched
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, supplier_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
supplier_context: Additional context about the supplier
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for supplier insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'supplier_id': supplier_context.get('supplier_id') if supplier_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_supplier_recommendation',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published supplier insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish supplier insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def analyze_all_suppliers(
|
||||
self,
|
||||
tenant_id: str,
|
||||
suppliers_data: Dict[str, pd.DataFrame],
|
||||
min_orders: int = 10
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze all suppliers for a tenant and generate comparative insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
suppliers_data: Dict of {supplier_id: order_history DataFrame}
|
||||
min_orders: Minimum orders for analysis
|
||||
|
||||
Returns:
|
||||
Comprehensive analysis with supplier comparison
|
||||
"""
|
||||
logger.info(
|
||||
"Analyzing all suppliers for tenant",
|
||||
tenant_id=tenant_id,
|
||||
suppliers=len(suppliers_data)
|
||||
)
|
||||
|
||||
all_results = []
|
||||
total_insights_posted = 0
|
||||
|
||||
# Analyze each supplier
|
||||
for supplier_id, order_history in suppliers_data.items():
|
||||
try:
|
||||
results = await self.analyze_and_post_supplier_insights(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
order_history=order_history,
|
||||
min_orders=min_orders
|
||||
)
|
||||
|
||||
all_results.append(results)
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error analyzing supplier",
|
||||
supplier_id=supplier_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Compare suppliers
|
||||
comparison = self.predictor.compare_suppliers(
|
||||
[r for r in all_results if r.get('reliability_score') is not None]
|
||||
)
|
||||
|
||||
# Generate comparative insights if needed
|
||||
comparative_insights = self._generate_comparative_insights(
|
||||
tenant_id, comparison
|
||||
)
|
||||
|
||||
if comparative_insights:
|
||||
enriched_comparative = self._enrich_insights(
|
||||
comparative_insights, tenant_id, 'all_suppliers'
|
||||
)
|
||||
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_comparative
|
||||
)
|
||||
|
||||
total_insights_posted += post_results['successful']
|
||||
|
||||
logger.info(
|
||||
"All suppliers analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
suppliers_analyzed=len(all_results),
|
||||
total_insights_posted=total_insights_posted
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'suppliers_analyzed': len(all_results),
|
||||
'supplier_results': all_results,
|
||||
'comparison': comparison,
|
||||
'total_insights_posted': total_insights_posted
|
||||
}
|
||||
|
||||
def _generate_comparative_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
comparison: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Generate insights from supplier comparison.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
comparison: Supplier comparison results
|
||||
|
||||
Returns:
|
||||
List of comparative insights
|
||||
"""
|
||||
insights = []
|
||||
|
||||
if 'recommendations' in comparison and comparison['recommendations']:
|
||||
for rec in comparison['recommendations']:
|
||||
if 'URGENT' in rec['recommendation']:
|
||||
priority = 'critical'
|
||||
elif 'high-risk' in rec.get('reason', '').lower():
|
||||
priority = 'high'
|
||||
else:
|
||||
priority = 'medium'
|
||||
|
||||
insights.append({
|
||||
'type': 'recommendation',
|
||||
'priority': priority,
|
||||
'category': 'procurement',
|
||||
'title': 'Supplier Comparison: Action Required',
|
||||
'description': rec['recommendation'],
|
||||
'impact_type': 'cost_optimization',
|
||||
'impact_value': 0,
|
||||
'impact_unit': 'recommendation',
|
||||
'confidence': 85,
|
||||
'metrics_json': {
|
||||
'comparison_type': 'multi_supplier',
|
||||
'suppliers_compared': comparison['suppliers_compared'],
|
||||
'top_supplier': comparison.get('top_supplier'),
|
||||
'top_score': comparison.get('top_supplier_score'),
|
||||
'reason': rec.get('reason', '')
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Review Supplier Portfolio',
|
||||
'action': 'review_supplier_portfolio',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
})
|
||||
|
||||
return insights
|
||||
|
||||
async def get_supplier_risk_score(
|
||||
self,
|
||||
supplier_id: str
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
Get cached reliability score for a supplier.
|
||||
|
||||
Args:
|
||||
supplier_id: Supplier identifier
|
||||
|
||||
Returns:
|
||||
Reliability score (0-100) or None if not analyzed
|
||||
"""
|
||||
return self.predictor.get_supplier_reliability_score(supplier_id)
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client connections."""
|
||||
await self.ai_insights_client.close()
|
||||
701
services/procurement/app/ml/supplier_performance_predictor.py
Normal file
701
services/procurement/app/ml/supplier_performance_predictor.py
Normal file
@@ -0,0 +1,701 @@
|
||||
"""
|
||||
Supplier Performance Predictor
|
||||
Predicts supplier reliability, delivery delays, and quality issues
|
||||
Generates insights for procurement risk management
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import structlog
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict
|
||||
from sklearn.ensemble import GradientBoostingClassifier, GradientBoostingRegressor
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
import warnings
|
||||
warnings.filterwarnings('ignore')
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SupplierPerformancePredictor:
|
||||
"""
|
||||
Predicts supplier performance metrics for procurement risk management.
|
||||
|
||||
Capabilities:
|
||||
1. Delivery delay probability prediction
|
||||
2. Quality issue likelihood scoring
|
||||
3. Supplier reliability scoring (0-100)
|
||||
4. Alternative supplier recommendations
|
||||
5. Procurement risk assessment
|
||||
6. Insight generation for high-risk suppliers
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.delay_model = None
|
||||
self.quality_model = None
|
||||
self.reliability_scores = {}
|
||||
self.scaler = StandardScaler()
|
||||
self.feature_columns = []
|
||||
|
||||
async def analyze_supplier_performance(
|
||||
self,
|
||||
tenant_id: str,
|
||||
supplier_id: str,
|
||||
order_history: pd.DataFrame,
|
||||
min_orders: int = 10
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze historical supplier performance and generate insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
supplier_id: Supplier identifier
|
||||
order_history: Historical orders with columns:
|
||||
- order_date
|
||||
- expected_delivery_date
|
||||
- actual_delivery_date
|
||||
- order_quantity
|
||||
- received_quantity
|
||||
- quality_issues (bool)
|
||||
- quality_score (0-100)
|
||||
- order_value
|
||||
min_orders: Minimum orders required for analysis
|
||||
|
||||
Returns:
|
||||
Dictionary with performance metrics and insights
|
||||
"""
|
||||
logger.info(
|
||||
"Analyzing supplier performance",
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
orders=len(order_history)
|
||||
)
|
||||
|
||||
if len(order_history) < min_orders:
|
||||
logger.warning(
|
||||
"Insufficient order history",
|
||||
supplier_id=supplier_id,
|
||||
orders=len(order_history),
|
||||
required=min_orders
|
||||
)
|
||||
return self._insufficient_data_response(tenant_id, supplier_id)
|
||||
|
||||
# Calculate performance metrics
|
||||
metrics = self._calculate_performance_metrics(order_history)
|
||||
|
||||
# Calculate reliability score
|
||||
reliability_score = self._calculate_reliability_score(metrics)
|
||||
|
||||
# Predict future performance
|
||||
predictions = self._predict_future_performance(order_history, metrics)
|
||||
|
||||
# Assess procurement risk
|
||||
risk_assessment = self._assess_procurement_risk(
|
||||
metrics, reliability_score, predictions
|
||||
)
|
||||
|
||||
# Generate insights
|
||||
insights = self._generate_supplier_insights(
|
||||
tenant_id, supplier_id, metrics, reliability_score,
|
||||
risk_assessment, predictions
|
||||
)
|
||||
|
||||
# Store reliability score
|
||||
self.reliability_scores[supplier_id] = reliability_score
|
||||
|
||||
logger.info(
|
||||
"Supplier performance analysis complete",
|
||||
supplier_id=supplier_id,
|
||||
reliability_score=reliability_score,
|
||||
insights_generated=len(insights)
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_id': supplier_id,
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'orders_analyzed': len(order_history),
|
||||
'metrics': metrics,
|
||||
'reliability_score': reliability_score,
|
||||
'predictions': predictions,
|
||||
'risk_assessment': risk_assessment,
|
||||
'insights': insights
|
||||
}
|
||||
|
||||
def _calculate_performance_metrics(
|
||||
self,
|
||||
order_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate comprehensive supplier performance metrics.
|
||||
|
||||
Args:
|
||||
order_history: Historical order data
|
||||
|
||||
Returns:
|
||||
Dictionary of performance metrics
|
||||
"""
|
||||
# Ensure datetime columns
|
||||
order_history['order_date'] = pd.to_datetime(order_history['order_date'])
|
||||
order_history['expected_delivery_date'] = pd.to_datetime(order_history['expected_delivery_date'])
|
||||
order_history['actual_delivery_date'] = pd.to_datetime(order_history['actual_delivery_date'])
|
||||
|
||||
# Calculate delivery delays
|
||||
order_history['delivery_delay_days'] = (
|
||||
order_history['actual_delivery_date'] - order_history['expected_delivery_date']
|
||||
).dt.days
|
||||
|
||||
order_history['is_delayed'] = order_history['delivery_delay_days'] > 0
|
||||
order_history['is_early'] = order_history['delivery_delay_days'] < 0
|
||||
|
||||
# Calculate quantity accuracy
|
||||
order_history['quantity_accuracy'] = (
|
||||
order_history['received_quantity'] / order_history['order_quantity']
|
||||
)
|
||||
|
||||
order_history['is_short_delivery'] = order_history['quantity_accuracy'] < 1.0
|
||||
order_history['is_over_delivery'] = order_history['quantity_accuracy'] > 1.0
|
||||
|
||||
metrics = {
|
||||
# Delivery metrics
|
||||
'total_orders': int(len(order_history)),
|
||||
'on_time_orders': int((~order_history['is_delayed']).sum()),
|
||||
'delayed_orders': int(order_history['is_delayed'].sum()),
|
||||
'on_time_rate': float((~order_history['is_delayed']).mean() * 100),
|
||||
'avg_delivery_delay_days': float(order_history[order_history['is_delayed']]['delivery_delay_days'].mean()) if order_history['is_delayed'].any() else 0.0,
|
||||
'max_delivery_delay_days': int(order_history['delivery_delay_days'].max()),
|
||||
'delivery_delay_std': float(order_history['delivery_delay_days'].std()),
|
||||
|
||||
# Quantity accuracy metrics
|
||||
'avg_quantity_accuracy': float(order_history['quantity_accuracy'].mean() * 100),
|
||||
'short_deliveries': int(order_history['is_short_delivery'].sum()),
|
||||
'short_delivery_rate': float(order_history['is_short_delivery'].mean() * 100),
|
||||
|
||||
# Quality metrics
|
||||
'quality_issues': int(order_history['quality_issues'].sum()) if 'quality_issues' in order_history.columns else 0,
|
||||
'quality_issue_rate': float(order_history['quality_issues'].mean() * 100) if 'quality_issues' in order_history.columns else 0.0,
|
||||
'avg_quality_score': float(order_history['quality_score'].mean()) if 'quality_score' in order_history.columns else 100.0,
|
||||
|
||||
# Consistency metrics
|
||||
'delivery_consistency': float(100 - order_history['delivery_delay_days'].std() * 10), # Lower variance = higher consistency
|
||||
'quantity_consistency': float(100 - (order_history['quantity_accuracy'].std() * 100)),
|
||||
|
||||
# Recent trend (last 30 days vs overall)
|
||||
'recent_on_time_rate': self._calculate_recent_trend(order_history, 'is_delayed', days=30),
|
||||
|
||||
# Cost metrics
|
||||
'total_order_value': float(order_history['order_value'].sum()) if 'order_value' in order_history.columns else 0.0,
|
||||
'avg_order_value': float(order_history['order_value'].mean()) if 'order_value' in order_history.columns else 0.0
|
||||
}
|
||||
|
||||
# Ensure all metrics are valid (no NaN)
|
||||
for key, value in metrics.items():
|
||||
if isinstance(value, float) and np.isnan(value):
|
||||
metrics[key] = 0.0
|
||||
|
||||
return metrics
|
||||
|
||||
def _calculate_recent_trend(
|
||||
self,
|
||||
order_history: pd.DataFrame,
|
||||
metric_column: str,
|
||||
days: int = 30
|
||||
) -> float:
|
||||
"""Calculate recent trend for a metric."""
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days)
|
||||
recent_orders = order_history[order_history['order_date'] >= cutoff_date]
|
||||
|
||||
if len(recent_orders) < 3:
|
||||
return 0.0 # Not enough recent data
|
||||
|
||||
if metric_column == 'is_delayed':
|
||||
return float((~recent_orders['is_delayed']).mean() * 100)
|
||||
else:
|
||||
return float(recent_orders[metric_column].mean() * 100)
|
||||
|
||||
def _calculate_reliability_score(
|
||||
self,
|
||||
metrics: Dict[str, Any]
|
||||
) -> int:
|
||||
"""
|
||||
Calculate overall supplier reliability score (0-100).
|
||||
|
||||
Factors:
|
||||
- On-time delivery rate (40%)
|
||||
- Quantity accuracy (20%)
|
||||
- Quality score (25%)
|
||||
- Consistency (15%)
|
||||
"""
|
||||
# On-time delivery score (40 points)
|
||||
on_time_score = metrics['on_time_rate'] * 0.40
|
||||
|
||||
# Quantity accuracy score (20 points)
|
||||
quantity_score = min(100, metrics['avg_quantity_accuracy']) * 0.20
|
||||
|
||||
# Quality score (25 points)
|
||||
quality_score = metrics['avg_quality_score'] * 0.25
|
||||
|
||||
# Consistency score (15 points)
|
||||
# Average of delivery and quantity consistency
|
||||
consistency_score = (
|
||||
(metrics['delivery_consistency'] + metrics['quantity_consistency']) / 2
|
||||
) * 0.15
|
||||
|
||||
total_score = on_time_score + quantity_score + quality_score + consistency_score
|
||||
|
||||
# Penalties
|
||||
# Severe penalty for high quality issue rate
|
||||
if metrics['quality_issue_rate'] > 10:
|
||||
total_score *= 0.8 # 20% penalty
|
||||
|
||||
# Penalty for high short delivery rate
|
||||
if metrics['short_delivery_rate'] > 15:
|
||||
total_score *= 0.9 # 10% penalty
|
||||
|
||||
return int(round(max(0, min(100, total_score))))
|
||||
|
||||
def _predict_future_performance(
|
||||
self,
|
||||
order_history: pd.DataFrame,
|
||||
metrics: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Predict future supplier performance based on trends.
|
||||
|
||||
Args:
|
||||
order_history: Historical order data
|
||||
metrics: Calculated performance metrics
|
||||
|
||||
Returns:
|
||||
Dictionary of predictions
|
||||
"""
|
||||
# Simple trend-based predictions
|
||||
# For production, could use ML models trained on multi-supplier data
|
||||
|
||||
predictions = {
|
||||
'next_order_delay_probability': 0.0,
|
||||
'next_order_quality_issue_probability': 0.0,
|
||||
'predicted_delivery_days': 0,
|
||||
'confidence': 0
|
||||
}
|
||||
|
||||
# Delay probability based on historical rate and recent trend
|
||||
historical_delay_rate = metrics['delayed_orders'] / max(1, metrics['total_orders'])
|
||||
recent_on_time_rate = metrics['recent_on_time_rate'] / 100
|
||||
|
||||
# Weight recent performance higher
|
||||
predicted_on_time_prob = (historical_delay_rate * 0.3) + ((1 - recent_on_time_rate) * 0.7)
|
||||
predictions['next_order_delay_probability'] = float(min(1.0, max(0.0, predicted_on_time_prob)))
|
||||
|
||||
# Quality issue probability
|
||||
if metrics['quality_issues'] > 0:
|
||||
quality_issue_prob = metrics['quality_issue_rate'] / 100
|
||||
predictions['next_order_quality_issue_probability'] = float(quality_issue_prob)
|
||||
|
||||
# Predicted delivery days (expected delay)
|
||||
if metrics['avg_delivery_delay_days'] > 0:
|
||||
predictions['predicted_delivery_days'] = int(round(metrics['avg_delivery_delay_days']))
|
||||
|
||||
# Confidence based on data quantity and recency
|
||||
if metrics['total_orders'] >= 50:
|
||||
predictions['confidence'] = 90
|
||||
elif metrics['total_orders'] >= 30:
|
||||
predictions['confidence'] = 80
|
||||
elif metrics['total_orders'] >= 20:
|
||||
predictions['confidence'] = 70
|
||||
else:
|
||||
predictions['confidence'] = 60
|
||||
|
||||
return predictions
|
||||
|
||||
def _assess_procurement_risk(
|
||||
self,
|
||||
metrics: Dict[str, Any],
|
||||
reliability_score: int,
|
||||
predictions: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Assess overall procurement risk for this supplier.
|
||||
|
||||
Risk levels: low, medium, high, critical
|
||||
"""
|
||||
risk_factors = []
|
||||
risk_score = 0 # 0-100, higher = more risky
|
||||
|
||||
# Low reliability
|
||||
if reliability_score < 60:
|
||||
risk_factors.append('Low reliability score')
|
||||
risk_score += 30
|
||||
elif reliability_score < 75:
|
||||
risk_factors.append('Medium reliability score')
|
||||
risk_score += 15
|
||||
|
||||
# High delay probability
|
||||
if predictions['next_order_delay_probability'] > 0.5:
|
||||
risk_factors.append('High delay probability')
|
||||
risk_score += 25
|
||||
elif predictions['next_order_delay_probability'] > 0.3:
|
||||
risk_factors.append('Moderate delay probability')
|
||||
risk_score += 15
|
||||
|
||||
# Quality issues
|
||||
if metrics['quality_issue_rate'] > 15:
|
||||
risk_factors.append('High quality issue rate')
|
||||
risk_score += 25
|
||||
elif metrics['quality_issue_rate'] > 5:
|
||||
risk_factors.append('Moderate quality issue rate')
|
||||
risk_score += 10
|
||||
|
||||
# Quantity accuracy issues
|
||||
if metrics['short_delivery_rate'] > 20:
|
||||
risk_factors.append('Frequent short deliveries')
|
||||
risk_score += 15
|
||||
elif metrics['short_delivery_rate'] > 10:
|
||||
risk_factors.append('Occasional short deliveries')
|
||||
risk_score += 8
|
||||
|
||||
# Low consistency
|
||||
if metrics['delivery_consistency'] < 60:
|
||||
risk_factors.append('Inconsistent delivery timing')
|
||||
risk_score += 10
|
||||
|
||||
# Determine risk level
|
||||
if risk_score >= 70:
|
||||
risk_level = 'critical'
|
||||
elif risk_score >= 50:
|
||||
risk_level = 'high'
|
||||
elif risk_score >= 30:
|
||||
risk_level = 'medium'
|
||||
else:
|
||||
risk_level = 'low'
|
||||
|
||||
return {
|
||||
'risk_level': risk_level,
|
||||
'risk_score': min(100, risk_score),
|
||||
'risk_factors': risk_factors,
|
||||
'recommendation': self._get_risk_recommendation(risk_level, risk_factors)
|
||||
}
|
||||
|
||||
def _get_risk_recommendation(
|
||||
self,
|
||||
risk_level: str,
|
||||
risk_factors: List[str]
|
||||
) -> str:
|
||||
"""Generate risk mitigation recommendation."""
|
||||
if risk_level == 'critical':
|
||||
return 'URGENT: Consider switching to alternative supplier. Current supplier poses significant operational risk.'
|
||||
elif risk_level == 'high':
|
||||
return 'HIGH PRIORITY: Increase safety stock and have backup supplier ready. Monitor closely.'
|
||||
elif risk_level == 'medium':
|
||||
return 'MONITOR: Keep standard safety stock. Review performance quarterly.'
|
||||
else:
|
||||
return 'LOW RISK: Supplier performing well. Maintain current relationship.'
|
||||
|
||||
def _generate_supplier_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
supplier_id: str,
|
||||
metrics: Dict[str, Any],
|
||||
reliability_score: int,
|
||||
risk_assessment: Dict[str, Any],
|
||||
predictions: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Generate actionable insights for procurement team.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
supplier_id: Supplier ID
|
||||
metrics: Performance metrics
|
||||
reliability_score: Overall reliability (0-100)
|
||||
risk_assessment: Risk assessment results
|
||||
predictions: Future performance predictions
|
||||
|
||||
Returns:
|
||||
List of insight dictionaries
|
||||
"""
|
||||
insights = []
|
||||
|
||||
# Insight 1: Low reliability alert
|
||||
if reliability_score < 60:
|
||||
insights.append({
|
||||
'type': 'alert',
|
||||
'priority': 'critical' if reliability_score < 50 else 'high',
|
||||
'category': 'procurement',
|
||||
'title': f'Low Supplier Reliability: {reliability_score}/100',
|
||||
'description': f'Supplier {supplier_id} has low reliability score of {reliability_score}. On-time rate: {metrics["on_time_rate"]:.1f}%, Quality: {metrics["avg_quality_score"]:.1f}. Consider alternative suppliers.',
|
||||
'impact_type': 'operational_risk',
|
||||
'impact_value': 100 - reliability_score,
|
||||
'impact_unit': 'risk_points',
|
||||
'confidence': 85,
|
||||
'metrics_json': {
|
||||
'supplier_id': supplier_id,
|
||||
'reliability_score': reliability_score,
|
||||
'on_time_rate': round(metrics['on_time_rate'], 2),
|
||||
'quality_score': round(metrics['avg_quality_score'], 2),
|
||||
'quality_issue_rate': round(metrics['quality_issue_rate'], 2),
|
||||
'delayed_orders': metrics['delayed_orders'],
|
||||
'total_orders': metrics['total_orders']
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Find Alternative Supplier',
|
||||
'action': 'search_alternative_suppliers',
|
||||
'params': {'current_supplier_id': supplier_id}
|
||||
},
|
||||
{
|
||||
'label': 'Increase Safety Stock',
|
||||
'action': 'adjust_safety_stock',
|
||||
'params': {'supplier_id': supplier_id, 'multiplier': 1.5}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
})
|
||||
|
||||
# Insight 2: High delay probability
|
||||
if predictions['next_order_delay_probability'] > 0.4:
|
||||
delay_prob_pct = predictions['next_order_delay_probability'] * 100
|
||||
insights.append({
|
||||
'type': 'prediction',
|
||||
'priority': 'high' if delay_prob_pct > 60 else 'medium',
|
||||
'category': 'procurement',
|
||||
'title': f'High Delay Risk: {delay_prob_pct:.0f}% Probability',
|
||||
'description': f'Supplier {supplier_id} has {delay_prob_pct:.0f}% probability of delaying next order. Expected delay: {predictions["predicted_delivery_days"]} days. Plan accordingly.',
|
||||
'impact_type': 'operational_risk',
|
||||
'impact_value': delay_prob_pct,
|
||||
'impact_unit': 'probability_percent',
|
||||
'confidence': predictions['confidence'],
|
||||
'metrics_json': {
|
||||
'supplier_id': supplier_id,
|
||||
'delay_probability': round(delay_prob_pct, 2),
|
||||
'predicted_delay_days': predictions['predicted_delivery_days'],
|
||||
'historical_delay_rate': round(metrics['delayed_orders'] / max(1, metrics['total_orders']) * 100, 2),
|
||||
'avg_delay_days': round(metrics['avg_delivery_delay_days'], 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Order Earlier',
|
||||
'action': 'adjust_order_lead_time',
|
||||
'params': {
|
||||
'supplier_id': supplier_id,
|
||||
'additional_days': predictions['predicted_delivery_days'] + 2
|
||||
}
|
||||
},
|
||||
{
|
||||
'label': 'Increase Safety Stock',
|
||||
'action': 'adjust_safety_stock',
|
||||
'params': {'supplier_id': supplier_id, 'multiplier': 1.3}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
})
|
||||
|
||||
# Insight 3: Quality issues
|
||||
if metrics['quality_issue_rate'] > 10:
|
||||
insights.append({
|
||||
'type': 'alert',
|
||||
'priority': 'high',
|
||||
'category': 'procurement',
|
||||
'title': f'Quality Issues: {metrics["quality_issue_rate"]:.1f}% of Orders',
|
||||
'description': f'Supplier {supplier_id} has quality issues in {metrics["quality_issue_rate"]:.1f}% of orders ({metrics["quality_issues"]} of {metrics["total_orders"]}). This impacts product quality and customer satisfaction.',
|
||||
'impact_type': 'quality_risk',
|
||||
'impact_value': metrics['quality_issue_rate'],
|
||||
'impact_unit': 'percentage',
|
||||
'confidence': 90,
|
||||
'metrics_json': {
|
||||
'supplier_id': supplier_id,
|
||||
'quality_issue_rate': round(metrics['quality_issue_rate'], 2),
|
||||
'quality_issues': metrics['quality_issues'],
|
||||
'avg_quality_score': round(metrics['avg_quality_score'], 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Review Supplier Quality',
|
||||
'action': 'schedule_supplier_review',
|
||||
'params': {'supplier_id': supplier_id, 'reason': 'quality_issues'}
|
||||
},
|
||||
{
|
||||
'label': 'Increase Inspection',
|
||||
'action': 'increase_quality_checks',
|
||||
'params': {'supplier_id': supplier_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
})
|
||||
|
||||
# Insight 4: Excellent performance (positive insight)
|
||||
if reliability_score >= 90:
|
||||
insights.append({
|
||||
'type': 'insight',
|
||||
'priority': 'low',
|
||||
'category': 'procurement',
|
||||
'title': f'Excellent Supplier Performance: {reliability_score}/100',
|
||||
'description': f'Supplier {supplier_id} demonstrates excellent performance with {reliability_score} reliability score. On-time: {metrics["on_time_rate"]:.1f}%, Quality: {metrics["avg_quality_score"]:.1f}. Consider expanding partnership.',
|
||||
'impact_type': 'positive_performance',
|
||||
'impact_value': reliability_score,
|
||||
'impact_unit': 'score',
|
||||
'confidence': 90,
|
||||
'metrics_json': {
|
||||
'supplier_id': supplier_id,
|
||||
'reliability_score': reliability_score,
|
||||
'on_time_rate': round(metrics['on_time_rate'], 2),
|
||||
'quality_score': round(metrics['avg_quality_score'], 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Increase Order Volume',
|
||||
'action': 'adjust_supplier_allocation',
|
||||
'params': {'supplier_id': supplier_id, 'increase_pct': 20}
|
||||
},
|
||||
{
|
||||
'label': 'Negotiate Better Terms',
|
||||
'action': 'initiate_negotiation',
|
||||
'params': {'supplier_id': supplier_id, 'reason': 'volume_increase'}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
})
|
||||
|
||||
# Insight 5: Performance decline
|
||||
if metrics['recent_on_time_rate'] > 0 and metrics['recent_on_time_rate'] < metrics['on_time_rate'] - 15:
|
||||
insights.append({
|
||||
'type': 'alert',
|
||||
'priority': 'medium',
|
||||
'category': 'procurement',
|
||||
'title': 'Supplier Performance Decline Detected',
|
||||
'description': f'Supplier {supplier_id} recent performance ({metrics["recent_on_time_rate"]:.1f}% on-time) is significantly worse than historical average ({metrics["on_time_rate"]:.1f}%). Investigate potential issues.',
|
||||
'impact_type': 'performance_decline',
|
||||
'impact_value': metrics['on_time_rate'] - metrics['recent_on_time_rate'],
|
||||
'impact_unit': 'percentage_points',
|
||||
'confidence': 75,
|
||||
'metrics_json': {
|
||||
'supplier_id': supplier_id,
|
||||
'recent_on_time_rate': round(metrics['recent_on_time_rate'], 2),
|
||||
'historical_on_time_rate': round(metrics['on_time_rate'], 2),
|
||||
'decline': round(metrics['on_time_rate'] - metrics['recent_on_time_rate'], 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Contact Supplier',
|
||||
'action': 'schedule_supplier_meeting',
|
||||
'params': {'supplier_id': supplier_id, 'reason': 'performance_decline'}
|
||||
},
|
||||
{
|
||||
'label': 'Monitor Closely',
|
||||
'action': 'increase_monitoring_frequency',
|
||||
'params': {'supplier_id': supplier_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
})
|
||||
|
||||
logger.info(
|
||||
"Generated supplier insights",
|
||||
supplier_id=supplier_id,
|
||||
insights=len(insights)
|
||||
)
|
||||
|
||||
return insights
|
||||
|
||||
def _insufficient_data_response(
|
||||
self,
|
||||
tenant_id: str,
|
||||
supplier_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Return response when insufficient data available."""
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_id': supplier_id,
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'orders_analyzed': 0,
|
||||
'metrics': {},
|
||||
'reliability_score': None,
|
||||
'predictions': {},
|
||||
'risk_assessment': {
|
||||
'risk_level': 'unknown',
|
||||
'risk_score': None,
|
||||
'risk_factors': ['Insufficient historical data'],
|
||||
'recommendation': 'Collect more order history before assessing supplier performance.'
|
||||
},
|
||||
'insights': []
|
||||
}
|
||||
|
||||
def compare_suppliers(
|
||||
self,
|
||||
suppliers_analysis: List[Dict[str, Any]],
|
||||
product_category: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Compare multiple suppliers and provide recommendations.
|
||||
|
||||
Args:
|
||||
suppliers_analysis: List of supplier analysis results
|
||||
product_category: Optional product category filter
|
||||
|
||||
Returns:
|
||||
Comparison report with recommendations
|
||||
"""
|
||||
if not suppliers_analysis:
|
||||
return {'error': 'No suppliers to compare'}
|
||||
|
||||
# Sort by reliability score
|
||||
ranked_suppliers = sorted(
|
||||
suppliers_analysis,
|
||||
key=lambda x: x.get('reliability_score', 0),
|
||||
reverse=True
|
||||
)
|
||||
|
||||
comparison = {
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'suppliers_compared': len(ranked_suppliers),
|
||||
'product_category': product_category,
|
||||
'top_supplier': ranked_suppliers[0]['supplier_id'],
|
||||
'top_supplier_score': ranked_suppliers[0]['reliability_score'],
|
||||
'bottom_supplier': ranked_suppliers[-1]['supplier_id'],
|
||||
'bottom_supplier_score': ranked_suppliers[-1]['reliability_score'],
|
||||
'ranked_suppliers': [
|
||||
{
|
||||
'supplier_id': s['supplier_id'],
|
||||
'reliability_score': s['reliability_score'],
|
||||
'risk_level': s['risk_assessment']['risk_level']
|
||||
}
|
||||
for s in ranked_suppliers
|
||||
],
|
||||
'recommendations': []
|
||||
}
|
||||
|
||||
# Generate comparison insights
|
||||
if len(ranked_suppliers) >= 2:
|
||||
score_gap = ranked_suppliers[0]['reliability_score'] - ranked_suppliers[-1]['reliability_score']
|
||||
|
||||
if score_gap > 30:
|
||||
comparison['recommendations'].append({
|
||||
'recommendation': f'Consider consolidating orders with top supplier {ranked_suppliers[0]["supplier_id"]} (score: {ranked_suppliers[0]["reliability_score"]})',
|
||||
'reason': f'Significant performance gap ({score_gap} points) from lowest performer'
|
||||
})
|
||||
|
||||
# Check for high-risk suppliers
|
||||
high_risk = [s for s in ranked_suppliers if s['risk_assessment']['risk_level'] in ['high', 'critical']]
|
||||
if high_risk:
|
||||
comparison['recommendations'].append({
|
||||
'recommendation': f'URGENT: Replace {len(high_risk)} high-risk supplier(s)',
|
||||
'reason': 'Significant operational risk from unreliable suppliers',
|
||||
'affected_suppliers': [s['supplier_id'] for s in high_risk]
|
||||
})
|
||||
|
||||
return comparison
|
||||
|
||||
def get_supplier_reliability_score(self, supplier_id: str) -> Optional[int]:
|
||||
"""Get cached reliability score for a supplier."""
|
||||
return self.reliability_scores.get(supplier_id)
|
||||
38
services/procurement/app/models/__init__.py
Normal file
38
services/procurement/app/models/__init__.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/models/__init__.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Service Models
|
||||
"""
|
||||
|
||||
from .procurement_plan import ProcurementPlan, ProcurementRequirement
|
||||
from .purchase_order import (
|
||||
PurchaseOrder,
|
||||
PurchaseOrderItem,
|
||||
PurchaseOrderStatus,
|
||||
Delivery,
|
||||
DeliveryItem,
|
||||
DeliveryStatus,
|
||||
SupplierInvoice,
|
||||
InvoiceStatus,
|
||||
QualityRating,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Procurement Planning
|
||||
"ProcurementPlan",
|
||||
"ProcurementRequirement",
|
||||
# Purchase Orders
|
||||
"PurchaseOrder",
|
||||
"PurchaseOrderItem",
|
||||
"PurchaseOrderStatus",
|
||||
# Deliveries
|
||||
"Delivery",
|
||||
"DeliveryItem",
|
||||
"DeliveryStatus",
|
||||
# Invoices
|
||||
"SupplierInvoice",
|
||||
"InvoiceStatus",
|
||||
# Enums
|
||||
"QualityRating",
|
||||
]
|
||||
234
services/procurement/app/models/procurement_plan.py
Normal file
234
services/procurement/app/models/procurement_plan.py
Normal file
@@ -0,0 +1,234 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/models/procurement_plan.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Planning Models
|
||||
Migrated from Orders Service
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import Column, String, Boolean, DateTime, Date, Numeric, Text, Integer, ForeignKey
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class ProcurementPlan(Base):
|
||||
"""Master procurement plan for coordinating supply needs across orders and production"""
|
||||
__tablename__ = "procurement_plans"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
plan_number = Column(String(50), nullable=False, unique=True, index=True)
|
||||
|
||||
# Plan scope and timing
|
||||
plan_date = Column(Date, nullable=False, index=True)
|
||||
plan_period_start = Column(Date, nullable=False)
|
||||
plan_period_end = Column(Date, nullable=False)
|
||||
planning_horizon_days = Column(Integer, nullable=False, default=14)
|
||||
|
||||
# Plan status and lifecycle
|
||||
status = Column(String(50), nullable=False, default="draft", index=True)
|
||||
# Status values: draft, pending_approval, approved, in_execution, completed, cancelled
|
||||
|
||||
plan_type = Column(String(50), nullable=False, default="regular") # regular, emergency, seasonal
|
||||
priority = Column(String(20), nullable=False, default="normal") # high, normal, low
|
||||
|
||||
# Business model context
|
||||
business_model = Column(String(50), nullable=True) # individual_bakery, central_bakery
|
||||
procurement_strategy = Column(String(50), nullable=False, default="just_in_time") # just_in_time, bulk, mixed
|
||||
|
||||
# Plan totals and summary
|
||||
total_requirements = Column(Integer, nullable=False, default=0)
|
||||
total_estimated_cost = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
total_approved_cost = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
cost_variance = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
|
||||
# Demand analysis
|
||||
total_demand_orders = Column(Integer, nullable=False, default=0)
|
||||
total_demand_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
total_production_requirements = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
safety_stock_buffer = Column(Numeric(5, 2), nullable=False, default=Decimal("20.00")) # Percentage
|
||||
|
||||
# Supplier coordination
|
||||
primary_suppliers_count = Column(Integer, nullable=False, default=0)
|
||||
backup_suppliers_count = Column(Integer, nullable=False, default=0)
|
||||
supplier_diversification_score = Column(Numeric(3, 1), nullable=True) # 1.0 to 10.0
|
||||
|
||||
# Risk assessment
|
||||
supply_risk_level = Column(String(20), nullable=False, default="low") # low, medium, high, critical
|
||||
demand_forecast_confidence = Column(Numeric(3, 1), nullable=True) # 1.0 to 10.0
|
||||
seasonality_adjustment = Column(Numeric(5, 2), nullable=False, default=Decimal("0.00"))
|
||||
|
||||
# Execution tracking
|
||||
approved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
execution_started_at = Column(DateTime(timezone=True), nullable=True)
|
||||
execution_completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Performance metrics
|
||||
fulfillment_rate = Column(Numeric(5, 2), nullable=True) # Percentage
|
||||
on_time_delivery_rate = Column(Numeric(5, 2), nullable=True) # Percentage
|
||||
cost_accuracy = Column(Numeric(5, 2), nullable=True) # Percentage
|
||||
quality_score = Column(Numeric(3, 1), nullable=True) # 1.0 to 10.0
|
||||
|
||||
# Integration data
|
||||
source_orders = Column(JSONB, nullable=True) # Orders that drove this plan
|
||||
production_schedules = Column(JSONB, nullable=True) # Associated production schedules
|
||||
inventory_snapshots = Column(JSONB, nullable=True) # Inventory levels at planning time
|
||||
forecast_data = Column(JSONB, nullable=True) # Forecasting service data used for this plan
|
||||
|
||||
# Communication and collaboration
|
||||
stakeholder_notifications = Column(JSONB, nullable=True) # Who was notified and when
|
||||
approval_workflow = Column(JSONB, nullable=True) # Approval chain and status
|
||||
|
||||
# Special considerations
|
||||
special_requirements = Column(Text, nullable=True)
|
||||
seasonal_adjustments = Column(JSONB, nullable=True)
|
||||
emergency_provisions = Column(JSONB, nullable=True)
|
||||
|
||||
# External references
|
||||
erp_reference = Column(String(100), nullable=True)
|
||||
supplier_portal_reference = Column(String(100), nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Additional metadata
|
||||
plan_metadata = Column(JSONB, nullable=True)
|
||||
|
||||
# Relationships
|
||||
requirements = relationship("ProcurementRequirement", back_populates="plan", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class ProcurementRequirement(Base):
|
||||
"""Individual procurement requirements within a procurement plan"""
|
||||
__tablename__ = "procurement_requirements"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
plan_id = Column(UUID(as_uuid=True), ForeignKey("procurement_plans.id", ondelete="CASCADE"), nullable=False)
|
||||
requirement_number = Column(String(50), nullable=False, index=True)
|
||||
|
||||
# Product/ingredient information
|
||||
product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to products/ingredients
|
||||
product_name = Column(String(200), nullable=False)
|
||||
product_sku = Column(String(100), nullable=True)
|
||||
product_category = Column(String(100), nullable=True)
|
||||
product_type = Column(String(50), nullable=False, default="ingredient") # ingredient, packaging, supplies
|
||||
|
||||
# Local production tracking
|
||||
is_locally_produced = Column(Boolean, nullable=False, default=False) # If true, this is for a locally-produced item
|
||||
recipe_id = Column(UUID(as_uuid=True), nullable=True) # Recipe used for BOM explosion
|
||||
parent_requirement_id = Column(UUID(as_uuid=True), nullable=True) # If this is from BOM explosion
|
||||
bom_explosion_level = Column(Integer, nullable=False, default=0) # Depth in BOM tree
|
||||
|
||||
# Requirement details
|
||||
required_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
unit_of_measure = Column(String(50), nullable=False)
|
||||
safety_stock_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
total_quantity_needed = Column(Numeric(12, 3), nullable=False)
|
||||
|
||||
# Current inventory situation
|
||||
current_stock_level = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
reserved_stock = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
available_stock = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
net_requirement = Column(Numeric(12, 3), nullable=False)
|
||||
|
||||
# Demand breakdown
|
||||
order_demand = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
production_demand = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
forecast_demand = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
buffer_demand = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
|
||||
# Supplier information
|
||||
preferred_supplier_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
backup_supplier_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
supplier_name = Column(String(200), nullable=True)
|
||||
supplier_lead_time_days = Column(Integer, nullable=True)
|
||||
minimum_order_quantity = Column(Numeric(12, 3), nullable=True)
|
||||
|
||||
# Pricing and cost
|
||||
estimated_unit_cost = Column(Numeric(10, 4), nullable=True)
|
||||
estimated_total_cost = Column(Numeric(12, 2), nullable=True)
|
||||
last_purchase_cost = Column(Numeric(10, 4), nullable=True)
|
||||
cost_variance = Column(Numeric(10, 2), nullable=False, default=Decimal("0.00"))
|
||||
|
||||
# Timing requirements
|
||||
required_by_date = Column(Date, nullable=False)
|
||||
lead_time_buffer_days = Column(Integer, nullable=False, default=1)
|
||||
suggested_order_date = Column(Date, nullable=False)
|
||||
latest_order_date = Column(Date, nullable=False)
|
||||
|
||||
# Quality and specifications
|
||||
quality_specifications = Column(JSONB, nullable=True)
|
||||
special_requirements = Column(Text, nullable=True)
|
||||
storage_requirements = Column(String(200), nullable=True)
|
||||
shelf_life_days = Column(Integer, nullable=True)
|
||||
|
||||
# Requirement status
|
||||
status = Column(String(50), nullable=False, default="pending")
|
||||
# Status values: pending, approved, ordered, partially_received, received, cancelled
|
||||
|
||||
priority = Column(String(20), nullable=False, default="normal") # critical, high, normal, low
|
||||
risk_level = Column(String(20), nullable=False, default="low") # low, medium, high, critical
|
||||
|
||||
# Purchase order tracking
|
||||
purchase_order_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
purchase_order_number = Column(String(50), nullable=True)
|
||||
ordered_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
ordered_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Delivery tracking
|
||||
expected_delivery_date = Column(Date, nullable=True)
|
||||
actual_delivery_date = Column(Date, nullable=True)
|
||||
received_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
delivery_status = Column(String(50), nullable=False, default="pending")
|
||||
|
||||
# Performance tracking
|
||||
fulfillment_rate = Column(Numeric(5, 2), nullable=True) # Percentage
|
||||
on_time_delivery = Column(Boolean, nullable=True)
|
||||
quality_rating = Column(Numeric(3, 1), nullable=True) # 1.0 to 10.0
|
||||
|
||||
# Source traceability
|
||||
source_orders = Column(JSONB, nullable=True) # Orders that contributed to this requirement
|
||||
source_production_batches = Column(JSONB, nullable=True) # Production batches needing this
|
||||
demand_analysis = Column(JSONB, nullable=True) # Detailed demand breakdown
|
||||
|
||||
# Smart procurement calculation metadata
|
||||
calculation_method = Column(String(100), nullable=True) # Method used: REORDER_POINT_TRIGGERED, FORECAST_DRIVEN_PROACTIVE, etc.
|
||||
ai_suggested_quantity = Column(Numeric(12, 3), nullable=True) # Pure AI forecast quantity
|
||||
adjusted_quantity = Column(Numeric(12, 3), nullable=True) # Final quantity after applying constraints
|
||||
adjustment_reason = Column(Text, nullable=True) # Human-readable explanation of adjustments
|
||||
price_tier_applied = Column(JSONB, nullable=True) # Price tier information if applicable
|
||||
supplier_minimum_applied = Column(Boolean, nullable=False, default=False) # Whether supplier minimum was enforced
|
||||
storage_limit_applied = Column(Boolean, nullable=False, default=False) # Whether storage limit was hit
|
||||
reorder_rule_applied = Column(Boolean, nullable=False, default=False) # Whether reorder rules were used
|
||||
|
||||
# Approval and authorization
|
||||
approved_quantity = Column(Numeric(12, 3), nullable=True)
|
||||
approved_cost = Column(Numeric(12, 2), nullable=True)
|
||||
approved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Notes and communication
|
||||
procurement_notes = Column(Text, nullable=True)
|
||||
supplier_communication = Column(JSONB, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Additional metadata
|
||||
requirement_metadata = Column(JSONB, nullable=True)
|
||||
|
||||
# Relationships
|
||||
plan = relationship("ProcurementPlan", back_populates="requirements")
|
||||
381
services/procurement/app/models/purchase_order.py
Normal file
381
services/procurement/app/models/purchase_order.py
Normal file
@@ -0,0 +1,381 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/models/purchase_order.py
|
||||
# ================================================================
|
||||
"""
|
||||
Purchase Order Models
|
||||
Migrated from Suppliers Service - Now owned by Procurement Service
|
||||
"""
|
||||
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship, deferred
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class PurchaseOrderStatus(enum.Enum):
|
||||
"""Purchase order lifecycle status"""
|
||||
draft = "draft"
|
||||
pending_approval = "pending_approval"
|
||||
approved = "approved"
|
||||
sent_to_supplier = "sent_to_supplier"
|
||||
confirmed = "confirmed"
|
||||
partially_received = "partially_received"
|
||||
completed = "completed"
|
||||
cancelled = "cancelled"
|
||||
disputed = "disputed"
|
||||
|
||||
|
||||
class DeliveryStatus(enum.Enum):
|
||||
"""Delivery status tracking"""
|
||||
scheduled = "scheduled"
|
||||
in_transit = "in_transit"
|
||||
out_for_delivery = "out_for_delivery"
|
||||
delivered = "delivered"
|
||||
partially_delivered = "partially_delivered"
|
||||
failed_delivery = "failed_delivery"
|
||||
returned = "returned"
|
||||
|
||||
|
||||
class QualityRating(enum.Enum):
|
||||
"""Quality rating scale"""
|
||||
excellent = 5
|
||||
good = 4
|
||||
average = 3
|
||||
poor = 2
|
||||
very_poor = 1
|
||||
|
||||
|
||||
class InvoiceStatus(enum.Enum):
|
||||
"""Invoice processing status"""
|
||||
pending = "pending"
|
||||
approved = "approved"
|
||||
paid = "paid"
|
||||
overdue = "overdue"
|
||||
disputed = "disputed"
|
||||
cancelled = "cancelled"
|
||||
|
||||
|
||||
class PurchaseOrder(Base):
|
||||
"""Purchase orders to suppliers - Core procurement execution"""
|
||||
__tablename__ = "purchase_orders"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to Suppliers Service
|
||||
|
||||
# Order identification
|
||||
po_number = Column(String(50), nullable=False, unique=True, index=True) # Human-readable PO number
|
||||
reference_number = Column(String(100), nullable=True) # Internal reference
|
||||
|
||||
# Link to procurement plan
|
||||
procurement_plan_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Link to ProcurementPlan
|
||||
|
||||
# Order status and workflow
|
||||
status = Column(SQLEnum(PurchaseOrderStatus), nullable=False, default=PurchaseOrderStatus.draft, index=True)
|
||||
priority = Column(String(20), nullable=False, default="normal") # urgent, high, normal, low
|
||||
|
||||
# Order details
|
||||
order_date = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
required_delivery_date = Column(DateTime(timezone=True), nullable=True) # Stored as DateTime for consistency
|
||||
estimated_delivery_date = Column(DateTime(timezone=True), nullable=True)
|
||||
expected_delivery_date = Column(DateTime(timezone=True), nullable=True) # When delivery is actually expected (used for dashboard tracking)
|
||||
|
||||
# Financial information
|
||||
subtotal = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
tax_amount = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
shipping_cost = Column(Numeric(10, 2), nullable=False, default=Decimal("0.00"))
|
||||
discount_amount = Column(Numeric(10, 2), nullable=False, default=Decimal("0.00"))
|
||||
total_amount = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
currency = Column(String(3), nullable=False, default="EUR")
|
||||
|
||||
# Delivery information
|
||||
delivery_address = Column(Text, nullable=True) # Override default address
|
||||
delivery_instructions = Column(Text, nullable=True)
|
||||
delivery_contact = Column(String(200), nullable=True)
|
||||
delivery_phone = Column(String(30), nullable=True)
|
||||
|
||||
# Approval workflow
|
||||
requires_approval = Column(Boolean, nullable=False, default=False)
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
approved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
rejection_reason = Column(Text, nullable=True)
|
||||
|
||||
# Auto-approval tracking
|
||||
auto_approved = Column(Boolean, nullable=False, default=False) # Whether this was auto-approved
|
||||
auto_approval_rule_id = Column(UUID(as_uuid=True), nullable=True) # Which rule approved it
|
||||
|
||||
# Communication tracking
|
||||
sent_to_supplier_at = Column(DateTime(timezone=True), nullable=True)
|
||||
supplier_confirmation_date = Column(DateTime(timezone=True), nullable=True)
|
||||
supplier_reference = Column(String(100), nullable=True) # Supplier's order reference
|
||||
|
||||
# Additional information
|
||||
notes = Column(Text, nullable=True)
|
||||
internal_notes = Column(Text, nullable=True) # Not shared with supplier
|
||||
terms_and_conditions = Column(Text, nullable=True)
|
||||
|
||||
# JTBD Dashboard: Structured reasoning data for i18n support
|
||||
# Backend stores structured data, frontend translates using i18n
|
||||
reasoning_data = Column(JSONB, nullable=True) # Structured reasoning data for multilingual support
|
||||
# reasoning_data structure (see shared/schemas/reasoning_types.py):
|
||||
# {
|
||||
# "type": "low_stock_detection" | "forecast_demand" | "safety_stock_replenishment" | etc.,
|
||||
# "parameters": {
|
||||
# "supplier_name": "Harinas del Norte",
|
||||
# "product_names": ["Flour Type 55", "Flour Type 45"],
|
||||
# "days_until_stockout": 3,
|
||||
# "current_stock": 45.5,
|
||||
# "required_stock": 200
|
||||
# },
|
||||
# "consequence": {
|
||||
# "type": "stockout_risk",
|
||||
# "severity": "high",
|
||||
# "impact_days": 3,
|
||||
# "affected_products": ["Baguette", "Croissant"]
|
||||
# },
|
||||
# "metadata": {
|
||||
# "trigger_source": "orchestrator_auto",
|
||||
# "forecast_confidence": 0.85,
|
||||
# "ai_assisted": true
|
||||
# }
|
||||
# }
|
||||
|
||||
# Internal transfer fields (for enterprise parent-child transfers)
|
||||
is_internal = Column(Boolean, default=False, nullable=False, index=True) # Flag for internal transfers
|
||||
source_tenant_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Parent tenant for internal transfers
|
||||
destination_tenant_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Child tenant for internal transfers
|
||||
transfer_type = Column(String(50), nullable=True) # finished_goods, raw_materials
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
items = relationship("PurchaseOrderItem", back_populates="purchase_order", cascade="all, delete-orphan")
|
||||
deliveries = relationship("Delivery", back_populates="purchase_order", cascade="all, delete-orphan")
|
||||
invoices = relationship("SupplierInvoice", back_populates="purchase_order", cascade="all, delete-orphan")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_purchase_orders_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_purchase_orders_tenant_status', 'tenant_id', 'status'),
|
||||
Index('ix_purchase_orders_tenant_plan', 'tenant_id', 'procurement_plan_id'),
|
||||
Index('ix_purchase_orders_order_date', 'order_date'),
|
||||
Index('ix_purchase_orders_delivery_date', 'required_delivery_date'),
|
||||
)
|
||||
|
||||
|
||||
class PurchaseOrderItem(Base):
|
||||
"""Individual items within purchase orders"""
|
||||
__tablename__ = "purchase_order_items"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
purchase_order_id = Column(UUID(as_uuid=True), ForeignKey('purchase_orders.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
|
||||
# Link to procurement requirement
|
||||
procurement_requirement_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Link to ProcurementRequirement
|
||||
|
||||
# Product identification (references Inventory Service)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_code = Column(String(100), nullable=True) # Supplier's product code
|
||||
product_name = Column(String(200), nullable=False) # Denormalized for convenience
|
||||
|
||||
# Supplier price list reference (from Suppliers Service)
|
||||
supplier_price_list_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
|
||||
# Order quantities
|
||||
ordered_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
unit_of_measure = Column(String(20), nullable=False)
|
||||
unit_price = Column(Numeric(10, 4), nullable=False)
|
||||
line_total = Column(Numeric(12, 2), nullable=False)
|
||||
|
||||
# Delivery tracking
|
||||
received_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
remaining_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
|
||||
# Quality and notes
|
||||
quality_requirements = Column(Text, nullable=True)
|
||||
item_notes = Column(Text, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Relationships
|
||||
purchase_order = relationship("PurchaseOrder", back_populates="items")
|
||||
delivery_items = relationship("DeliveryItem", back_populates="purchase_order_item", cascade="all, delete-orphan")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_po_items_tenant_po', 'tenant_id', 'purchase_order_id'),
|
||||
Index('ix_po_items_inventory_product', 'inventory_product_id'),
|
||||
Index('ix_po_items_requirement', 'procurement_requirement_id'),
|
||||
)
|
||||
|
||||
|
||||
class Delivery(Base):
|
||||
"""Delivery tracking for purchase orders"""
|
||||
__tablename__ = "deliveries"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
purchase_order_id = Column(UUID(as_uuid=True), ForeignKey('purchase_orders.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to Suppliers Service
|
||||
|
||||
# Delivery identification
|
||||
delivery_number = Column(String(50), nullable=False, unique=True, index=True)
|
||||
supplier_delivery_note = Column(String(100), nullable=True) # Supplier's delivery reference
|
||||
|
||||
# Delivery status and tracking
|
||||
status = Column(SQLEnum(DeliveryStatus), nullable=False, default=DeliveryStatus.scheduled, index=True)
|
||||
|
||||
# Scheduling and timing
|
||||
scheduled_date = Column(DateTime(timezone=True), nullable=True)
|
||||
estimated_arrival = Column(DateTime(timezone=True), nullable=True)
|
||||
actual_arrival = Column(DateTime(timezone=True), nullable=True)
|
||||
completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Delivery details
|
||||
delivery_address = Column(Text, nullable=True)
|
||||
delivery_contact = Column(String(200), nullable=True)
|
||||
delivery_phone = Column(String(30), nullable=True)
|
||||
carrier_name = Column(String(200), nullable=True)
|
||||
tracking_number = Column(String(100), nullable=True)
|
||||
|
||||
# Quality inspection
|
||||
inspection_passed = Column(Boolean, nullable=True)
|
||||
inspection_notes = Column(Text, nullable=True)
|
||||
quality_issues = Column(JSONB, nullable=True) # Documented quality problems
|
||||
|
||||
# Received by information
|
||||
received_by = Column(UUID(as_uuid=True), nullable=True) # User who received the delivery
|
||||
received_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Additional information
|
||||
notes = Column(Text, nullable=True)
|
||||
photos = Column(JSONB, nullable=True) # Photo URLs for documentation
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
purchase_order = relationship("PurchaseOrder", back_populates="deliveries")
|
||||
items = relationship("DeliveryItem", back_populates="delivery", cascade="all, delete-orphan")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_deliveries_tenant_status', 'tenant_id', 'status'),
|
||||
Index('ix_deliveries_scheduled_date', 'scheduled_date'),
|
||||
Index('ix_deliveries_tenant_po', 'tenant_id', 'purchase_order_id'),
|
||||
)
|
||||
|
||||
|
||||
class DeliveryItem(Base):
|
||||
"""Individual items within deliveries"""
|
||||
__tablename__ = "delivery_items"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
delivery_id = Column(UUID(as_uuid=True), ForeignKey('deliveries.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
purchase_order_item_id = Column(UUID(as_uuid=True), ForeignKey('purchase_order_items.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
|
||||
# Product identification
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Delivery quantities
|
||||
ordered_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
delivered_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
accepted_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
rejected_quantity = Column(Numeric(12, 3), nullable=False, default=Decimal("0.000"))
|
||||
|
||||
# Quality information
|
||||
batch_lot_number = Column(String(100), nullable=True)
|
||||
expiry_date = Column(DateTime(timezone=True), nullable=True)
|
||||
quality_grade = Column(String(20), nullable=True)
|
||||
|
||||
# Issues and notes
|
||||
quality_issues = Column(Text, nullable=True)
|
||||
rejection_reason = Column(Text, nullable=True)
|
||||
item_notes = Column(Text, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Relationships
|
||||
delivery = relationship("Delivery", back_populates="items")
|
||||
purchase_order_item = relationship("PurchaseOrderItem", back_populates="delivery_items")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_delivery_items_tenant_delivery', 'tenant_id', 'delivery_id'),
|
||||
Index('ix_delivery_items_inventory_product', 'inventory_product_id'),
|
||||
)
|
||||
|
||||
|
||||
class SupplierInvoice(Base):
|
||||
"""Invoices from suppliers"""
|
||||
__tablename__ = "supplier_invoices"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to Suppliers Service
|
||||
purchase_order_id = Column(UUID(as_uuid=True), ForeignKey('purchase_orders.id', ondelete='SET NULL'), nullable=True, index=True)
|
||||
|
||||
# Invoice identification
|
||||
invoice_number = Column(String(50), nullable=False, unique=True, index=True)
|
||||
supplier_invoice_number = Column(String(100), nullable=False)
|
||||
|
||||
# Invoice status and dates
|
||||
status = Column(SQLEnum(InvoiceStatus), nullable=False, default=InvoiceStatus.pending, index=True)
|
||||
invoice_date = Column(DateTime(timezone=True), nullable=False)
|
||||
due_date = Column(DateTime(timezone=True), nullable=False)
|
||||
received_date = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Financial information
|
||||
subtotal = Column(Numeric(12, 2), nullable=False)
|
||||
tax_amount = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
shipping_cost = Column(Numeric(10, 2), nullable=False, default=Decimal("0.00"))
|
||||
discount_amount = Column(Numeric(10, 2), nullable=False, default=Decimal("0.00"))
|
||||
total_amount = Column(Numeric(12, 2), nullable=False)
|
||||
currency = Column(String(3), nullable=False, default="EUR")
|
||||
|
||||
# Payment tracking
|
||||
paid_amount = Column(Numeric(12, 2), nullable=False, default=Decimal("0.00"))
|
||||
payment_date = Column(DateTime(timezone=True), nullable=True)
|
||||
payment_reference = Column(String(100), nullable=True)
|
||||
|
||||
# Invoice validation
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
approved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
rejection_reason = Column(Text, nullable=True)
|
||||
|
||||
# Additional information
|
||||
notes = Column(Text, nullable=True)
|
||||
invoice_document_url = Column(String(500), nullable=True) # PDF storage location
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
purchase_order = relationship("PurchaseOrder", back_populates="invoices")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_invoices_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_invoices_tenant_status', 'tenant_id', 'status'),
|
||||
Index('ix_invoices_due_date', 'due_date'),
|
||||
)
|
||||
194
services/procurement/app/models/replenishment.py
Normal file
194
services/procurement/app/models/replenishment.py
Normal file
@@ -0,0 +1,194 @@
|
||||
"""
|
||||
Database models for replenishment planning.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Numeric, Date, Boolean, ForeignKey, Text, TIMESTAMP, JSON
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from shared.database import Base
|
||||
|
||||
|
||||
class ReplenishmentPlan(Base):
|
||||
"""Replenishment plan master record"""
|
||||
__tablename__ = "replenishment_plans"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Planning metadata
|
||||
planning_date = Column(Date, nullable=False)
|
||||
projection_horizon_days = Column(Integer, nullable=False, default=7)
|
||||
|
||||
# References
|
||||
forecast_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
production_schedule_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Summary statistics
|
||||
total_items = Column(Integer, nullable=False, default=0)
|
||||
urgent_items = Column(Integer, nullable=False, default=0)
|
||||
high_risk_items = Column(Integer, nullable=False, default=0)
|
||||
total_estimated_cost = Column(Numeric(12, 2), nullable=False, default=0)
|
||||
|
||||
# Status
|
||||
status = Column(String(50), nullable=False, default='draft') # draft, approved, executed
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.utcnow)
|
||||
updated_at = Column(TIMESTAMP(timezone=True), nullable=True, onupdate=datetime.utcnow)
|
||||
executed_at = Column(TIMESTAMP(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
items = relationship("ReplenishmentPlanItem", back_populates="plan", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class ReplenishmentPlanItem(Base):
|
||||
"""Individual item in a replenishment plan"""
|
||||
__tablename__ = "replenishment_plan_items"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
replenishment_plan_id = Column(UUID(as_uuid=True), ForeignKey("replenishment_plans.id"), nullable=False, index=True)
|
||||
|
||||
# Ingredient info
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_name = Column(String(200), nullable=False)
|
||||
unit_of_measure = Column(String(20), nullable=False)
|
||||
|
||||
# Quantities
|
||||
base_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
safety_stock_quantity = Column(Numeric(12, 3), nullable=False, default=0)
|
||||
shelf_life_adjusted_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
final_order_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
|
||||
# Dates
|
||||
order_date = Column(Date, nullable=False, index=True)
|
||||
delivery_date = Column(Date, nullable=False)
|
||||
required_by_date = Column(Date, nullable=False)
|
||||
|
||||
# Planning metadata
|
||||
lead_time_days = Column(Integer, nullable=False)
|
||||
is_urgent = Column(Boolean, nullable=False, default=False, index=True)
|
||||
urgency_reason = Column(Text, nullable=True)
|
||||
waste_risk = Column(String(20), nullable=False, default='low') # low, medium, high
|
||||
stockout_risk = Column(String(20), nullable=False, default='low') # low, medium, high, critical
|
||||
|
||||
# Supplier
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Calculation details (stored as JSONB)
|
||||
safety_stock_calculation = Column(JSONB, nullable=True)
|
||||
shelf_life_adjustment = Column(JSONB, nullable=True)
|
||||
inventory_projection = Column(JSONB, nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.utcnow)
|
||||
|
||||
# Relationships
|
||||
plan = relationship("ReplenishmentPlan", back_populates="items")
|
||||
|
||||
|
||||
class InventoryProjection(Base):
|
||||
"""Daily inventory projection"""
|
||||
__tablename__ = "inventory_projections"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Ingredient
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_name = Column(String(200), nullable=False)
|
||||
|
||||
# Projection date
|
||||
projection_date = Column(Date, nullable=False, index=True)
|
||||
|
||||
# Stock levels
|
||||
starting_stock = Column(Numeric(12, 3), nullable=False)
|
||||
forecasted_consumption = Column(Numeric(12, 3), nullable=False, default=0)
|
||||
scheduled_receipts = Column(Numeric(12, 3), nullable=False, default=0)
|
||||
projected_ending_stock = Column(Numeric(12, 3), nullable=False)
|
||||
|
||||
# Flags
|
||||
is_stockout = Column(Boolean, nullable=False, default=False, index=True)
|
||||
coverage_gap = Column(Numeric(12, 3), nullable=False, default=0) # Negative if stockout
|
||||
|
||||
# Reference to replenishment plan
|
||||
replenishment_plan_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.utcnow)
|
||||
|
||||
__table_args__ = (
|
||||
# Unique constraint: one projection per ingredient per date per tenant
|
||||
{'schema': None}
|
||||
)
|
||||
|
||||
|
||||
class SupplierAllocation(Base):
|
||||
"""Supplier allocation for a requirement"""
|
||||
__tablename__ = "supplier_allocations"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
|
||||
# References
|
||||
replenishment_plan_item_id = Column(UUID(as_uuid=True), ForeignKey("replenishment_plan_items.id"), nullable=True, index=True)
|
||||
requirement_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Reference to procurement_requirements
|
||||
|
||||
# Supplier
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_name = Column(String(200), nullable=False)
|
||||
|
||||
# Allocation
|
||||
allocation_type = Column(String(20), nullable=False) # primary, backup, diversification
|
||||
allocated_quantity = Column(Numeric(12, 3), nullable=False)
|
||||
allocation_percentage = Column(Numeric(5, 4), nullable=False) # 0.0000 - 1.0000
|
||||
|
||||
# Pricing
|
||||
unit_price = Column(Numeric(12, 2), nullable=False)
|
||||
total_cost = Column(Numeric(12, 2), nullable=False)
|
||||
|
||||
# Lead time
|
||||
lead_time_days = Column(Integer, nullable=False)
|
||||
|
||||
# Scoring
|
||||
supplier_score = Column(Numeric(5, 2), nullable=False)
|
||||
score_breakdown = Column(JSONB, nullable=True)
|
||||
|
||||
# Reasoning
|
||||
allocation_reason = Column(Text, nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.utcnow)
|
||||
|
||||
|
||||
class SupplierSelectionHistory(Base):
|
||||
"""Historical record of supplier selections for analytics"""
|
||||
__tablename__ = "supplier_selection_history"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Selection details
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_name = Column(String(200), nullable=False)
|
||||
selected_supplier_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
selected_supplier_name = Column(String(200), nullable=False)
|
||||
|
||||
# Order details
|
||||
selection_date = Column(Date, nullable=False, index=True)
|
||||
quantity = Column(Numeric(12, 3), nullable=False)
|
||||
unit_price = Column(Numeric(12, 2), nullable=False)
|
||||
total_cost = Column(Numeric(12, 2), nullable=False)
|
||||
|
||||
# Metrics
|
||||
lead_time_days = Column(Integer, nullable=False)
|
||||
quality_score = Column(Numeric(5, 2), nullable=True)
|
||||
delivery_performance = Column(Numeric(5, 2), nullable=True)
|
||||
|
||||
# Selection strategy
|
||||
selection_strategy = Column(String(50), nullable=False) # single_source, dual_source, multi_source
|
||||
was_primary_choice = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.utcnow)
|
||||
0
services/procurement/app/repositories/__init__.py
Normal file
0
services/procurement/app/repositories/__init__.py
Normal file
62
services/procurement/app/repositories/base_repository.py
Normal file
62
services/procurement/app/repositories/base_repository.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/repositories/base_repository.py
|
||||
# ================================================================
|
||||
"""
|
||||
Base Repository Pattern for Procurement Service
|
||||
"""
|
||||
|
||||
from typing import Generic, TypeVar, Type, Optional, List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from shared.database.base import Base
|
||||
|
||||
ModelType = TypeVar("ModelType", bound=Base)
|
||||
|
||||
|
||||
class BaseRepository(Generic[ModelType]):
|
||||
"""Base repository with common database operations"""
|
||||
|
||||
def __init__(self, model: Type[ModelType]):
|
||||
self.model = model
|
||||
|
||||
async def get_by_id(self, db: AsyncSession, id: Any) -> Optional[ModelType]:
|
||||
"""Get entity by ID"""
|
||||
result = await db.execute(select(self.model).where(self.model.id == id))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_all(self, db: AsyncSession, skip: int = 0, limit: int = 100) -> List[ModelType]:
|
||||
"""Get all entities with pagination"""
|
||||
result = await db.execute(select(self.model).offset(skip).limit(limit))
|
||||
return result.scalars().all()
|
||||
|
||||
async def create(self, db: AsyncSession, **kwargs) -> ModelType:
|
||||
"""Create new entity"""
|
||||
instance = self.model(**kwargs)
|
||||
db.add(instance)
|
||||
await db.flush()
|
||||
await db.refresh(instance)
|
||||
return instance
|
||||
|
||||
async def update(self, db: AsyncSession, id: Any, **kwargs) -> Optional[ModelType]:
|
||||
"""Update entity"""
|
||||
instance = await self.get_by_id(db, id)
|
||||
if not instance:
|
||||
return None
|
||||
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(instance, key):
|
||||
setattr(instance, key, value)
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(instance)
|
||||
return instance
|
||||
|
||||
async def delete(self, db: AsyncSession, id: Any) -> bool:
|
||||
"""Delete entity"""
|
||||
instance = await self.get_by_id(db, id)
|
||||
if not instance:
|
||||
return False
|
||||
|
||||
await db.delete(instance)
|
||||
await db.flush()
|
||||
return True
|
||||
@@ -0,0 +1,254 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/repositories/procurement_plan_repository.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Plan Repository - Database operations for procurement plans and requirements
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, date
|
||||
from typing import List, Optional, Dict, Any
|
||||
from sqlalchemy import select, and_, desc, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
|
||||
from app.repositories.base_repository import BaseRepository
|
||||
|
||||
|
||||
class ProcurementPlanRepository(BaseRepository):
|
||||
"""Repository for procurement plan operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(ProcurementPlan)
|
||||
self.db = db
|
||||
|
||||
async def create_plan(self, plan_data: Dict[str, Any]) -> ProcurementPlan:
|
||||
"""Create a new procurement plan"""
|
||||
plan = ProcurementPlan(**plan_data)
|
||||
self.db.add(plan)
|
||||
await self.db.flush()
|
||||
return plan
|
||||
|
||||
async def get_plan_by_id(self, plan_id: uuid.UUID, tenant_id: uuid.UUID) -> Optional[ProcurementPlan]:
|
||||
"""Get procurement plan by ID"""
|
||||
stmt = select(ProcurementPlan).where(
|
||||
and_(
|
||||
ProcurementPlan.id == plan_id,
|
||||
ProcurementPlan.tenant_id == tenant_id
|
||||
)
|
||||
).options(selectinload(ProcurementPlan.requirements))
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_plan_by_date(self, plan_date: date, tenant_id: uuid.UUID) -> Optional[ProcurementPlan]:
|
||||
"""Get procurement plan for a specific date"""
|
||||
stmt = select(ProcurementPlan).where(
|
||||
and_(
|
||||
ProcurementPlan.plan_date == plan_date,
|
||||
ProcurementPlan.tenant_id == tenant_id
|
||||
)
|
||||
).options(selectinload(ProcurementPlan.requirements))
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_current_plan(self, tenant_id: uuid.UUID) -> Optional[ProcurementPlan]:
|
||||
"""Get the current day's procurement plan"""
|
||||
today = date.today()
|
||||
return await self.get_plan_by_date(today, tenant_id)
|
||||
|
||||
async def list_plans(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
status: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> List[ProcurementPlan]:
|
||||
"""List procurement plans with filters"""
|
||||
conditions = [ProcurementPlan.tenant_id == tenant_id]
|
||||
|
||||
if status:
|
||||
conditions.append(ProcurementPlan.status == status)
|
||||
if start_date:
|
||||
conditions.append(ProcurementPlan.plan_date >= start_date)
|
||||
if end_date:
|
||||
conditions.append(ProcurementPlan.plan_date <= end_date)
|
||||
|
||||
stmt = (
|
||||
select(ProcurementPlan)
|
||||
.where(and_(*conditions))
|
||||
.order_by(desc(ProcurementPlan.plan_date))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.options(selectinload(ProcurementPlan.requirements))
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_plans_by_tenant(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> List[ProcurementPlan]:
|
||||
"""Get all procurement plans for a tenant with optional date filters"""
|
||||
conditions = [ProcurementPlan.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
conditions.append(ProcurementPlan.created_at >= start_date)
|
||||
if end_date:
|
||||
conditions.append(ProcurementPlan.created_at <= end_date)
|
||||
|
||||
stmt = (
|
||||
select(ProcurementPlan)
|
||||
.where(and_(*conditions))
|
||||
.order_by(desc(ProcurementPlan.created_at))
|
||||
.options(selectinload(ProcurementPlan.requirements))
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update_plan(self, plan_id: uuid.UUID, tenant_id: uuid.UUID, updates: Dict[str, Any]) -> Optional[ProcurementPlan]:
|
||||
"""Update procurement plan"""
|
||||
plan = await self.get_plan_by_id(plan_id, tenant_id)
|
||||
if not plan:
|
||||
return None
|
||||
|
||||
for key, value in updates.items():
|
||||
if hasattr(plan, key):
|
||||
setattr(plan, key, value)
|
||||
|
||||
plan.updated_at = datetime.utcnow()
|
||||
await self.db.flush()
|
||||
return plan
|
||||
|
||||
async def delete_plan(self, plan_id: uuid.UUID, tenant_id: uuid.UUID) -> bool:
|
||||
"""Delete procurement plan"""
|
||||
plan = await self.get_plan_by_id(plan_id, tenant_id)
|
||||
if not plan:
|
||||
return False
|
||||
|
||||
await self.db.delete(plan)
|
||||
return True
|
||||
|
||||
async def generate_plan_number(self, tenant_id: uuid.UUID, plan_date: date) -> str:
|
||||
"""Generate unique plan number"""
|
||||
date_str = plan_date.strftime("%Y%m%d")
|
||||
|
||||
# Count existing plans for the same date
|
||||
stmt = select(func.count(ProcurementPlan.id)).where(
|
||||
and_(
|
||||
ProcurementPlan.tenant_id == tenant_id,
|
||||
ProcurementPlan.plan_date == plan_date
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
return f"PP-{date_str}-{count + 1:03d}"
|
||||
|
||||
|
||||
class ProcurementRequirementRepository(BaseRepository):
|
||||
"""Repository for procurement requirement operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(ProcurementRequirement)
|
||||
self.db = db
|
||||
|
||||
async def create_requirement(self, requirement_data: Dict[str, Any]) -> ProcurementRequirement:
|
||||
"""Create a new procurement requirement"""
|
||||
requirement = ProcurementRequirement(**requirement_data)
|
||||
self.db.add(requirement)
|
||||
await self.db.flush()
|
||||
return requirement
|
||||
|
||||
async def create_requirements_batch(self, requirements_data: List[Dict[str, Any]]) -> List[ProcurementRequirement]:
|
||||
"""Create multiple procurement requirements"""
|
||||
requirements = [ProcurementRequirement(**data) for data in requirements_data]
|
||||
self.db.add_all(requirements)
|
||||
await self.db.flush()
|
||||
return requirements
|
||||
|
||||
async def get_requirement_by_id(self, requirement_id: uuid.UUID, tenant_id: uuid.UUID) -> Optional[ProcurementRequirement]:
|
||||
"""Get procurement requirement by ID"""
|
||||
stmt = select(ProcurementRequirement).join(ProcurementPlan).where(
|
||||
and_(
|
||||
ProcurementRequirement.id == requirement_id,
|
||||
ProcurementPlan.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_requirements_by_plan(self, plan_id: uuid.UUID) -> List[ProcurementRequirement]:
|
||||
"""Get all requirements for a specific plan"""
|
||||
stmt = select(ProcurementRequirement).where(
|
||||
ProcurementRequirement.plan_id == plan_id
|
||||
).order_by(ProcurementRequirement.priority.desc(), ProcurementRequirement.required_by_date)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update_requirement(
|
||||
self,
|
||||
requirement_id: uuid.UUID,
|
||||
updates: Dict[str, Any]
|
||||
) -> Optional[ProcurementRequirement]:
|
||||
"""Update procurement requirement"""
|
||||
stmt = select(ProcurementRequirement).where(
|
||||
ProcurementRequirement.id == requirement_id
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
requirement = result.scalar_one_or_none()
|
||||
|
||||
if not requirement:
|
||||
return None
|
||||
|
||||
for key, value in updates.items():
|
||||
if hasattr(requirement, key):
|
||||
setattr(requirement, key, value)
|
||||
|
||||
requirement.updated_at = datetime.utcnow()
|
||||
await self.db.flush()
|
||||
return requirement
|
||||
|
||||
async def generate_requirement_number(self, plan_id: uuid.UUID) -> str:
|
||||
"""Generate unique requirement number within a plan"""
|
||||
stmt = select(func.count(ProcurementRequirement.id)).where(
|
||||
ProcurementRequirement.plan_id == plan_id
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
return f"REQ-{count + 1:05d}"
|
||||
|
||||
async def get_requirements_by_tenant(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> List[ProcurementRequirement]:
|
||||
"""Get all procurement requirements for a tenant with optional date filters"""
|
||||
conditions = [ProcurementPlan.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
conditions.append(ProcurementRequirement.created_at >= start_date)
|
||||
if end_date:
|
||||
conditions.append(ProcurementRequirement.created_at <= end_date)
|
||||
|
||||
stmt = (
|
||||
select(ProcurementRequirement)
|
||||
.join(ProcurementPlan)
|
||||
.where(and_(*conditions))
|
||||
.order_by(desc(ProcurementRequirement.created_at))
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
@@ -0,0 +1,318 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/repositories/purchase_order_repository.py
|
||||
# ================================================================
|
||||
"""
|
||||
Purchase Order Repository - Database operations for purchase orders
|
||||
Migrated from Suppliers Service
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, date
|
||||
from typing import List, Optional, Dict, Any
|
||||
from sqlalchemy import select, and_, or_, desc, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.purchase_order import (
|
||||
PurchaseOrder,
|
||||
PurchaseOrderItem,
|
||||
PurchaseOrderStatus,
|
||||
Delivery,
|
||||
DeliveryStatus,
|
||||
SupplierInvoice,
|
||||
)
|
||||
from app.repositories.base_repository import BaseRepository
|
||||
|
||||
|
||||
class PurchaseOrderRepository(BaseRepository):
|
||||
"""Repository for purchase order operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(PurchaseOrder)
|
||||
self.db = db
|
||||
|
||||
async def create_po(self, po_data: Dict[str, Any]) -> PurchaseOrder:
|
||||
"""Create a new purchase order"""
|
||||
po = PurchaseOrder(**po_data)
|
||||
self.db.add(po)
|
||||
await self.db.flush()
|
||||
return po
|
||||
|
||||
async def get_po_by_id(self, po_id: uuid.UUID, tenant_id: uuid.UUID) -> Optional[PurchaseOrder]:
|
||||
"""Get purchase order by ID with items loaded"""
|
||||
stmt = select(PurchaseOrder).where(
|
||||
and_(
|
||||
PurchaseOrder.id == po_id,
|
||||
PurchaseOrder.tenant_id == tenant_id
|
||||
)
|
||||
).options(
|
||||
selectinload(PurchaseOrder.items),
|
||||
selectinload(PurchaseOrder.deliveries),
|
||||
selectinload(PurchaseOrder.invoices)
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_po_by_number(self, po_number: str, tenant_id: uuid.UUID) -> Optional[PurchaseOrder]:
|
||||
"""Get purchase order by PO number"""
|
||||
stmt = select(PurchaseOrder).where(
|
||||
and_(
|
||||
PurchaseOrder.po_number == po_number,
|
||||
PurchaseOrder.tenant_id == tenant_id
|
||||
)
|
||||
).options(selectinload(PurchaseOrder.items))
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def list_purchase_orders(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
status: Optional[PurchaseOrderStatus] = None,
|
||||
supplier_id: Optional[uuid.UUID] = None,
|
||||
priority: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> List[PurchaseOrder]:
|
||||
"""List purchase orders with filters"""
|
||||
conditions = [PurchaseOrder.tenant_id == tenant_id]
|
||||
|
||||
if status:
|
||||
conditions.append(PurchaseOrder.status == status)
|
||||
if supplier_id:
|
||||
conditions.append(PurchaseOrder.supplier_id == supplier_id)
|
||||
if priority:
|
||||
conditions.append(PurchaseOrder.priority == priority)
|
||||
if start_date:
|
||||
conditions.append(PurchaseOrder.order_date >= start_date)
|
||||
if end_date:
|
||||
conditions.append(PurchaseOrder.order_date <= end_date)
|
||||
|
||||
stmt = (
|
||||
select(PurchaseOrder)
|
||||
.where(and_(*conditions))
|
||||
.order_by(desc(PurchaseOrder.order_date))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.options(selectinload(PurchaseOrder.items))
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_pending_approval(self, tenant_id: uuid.UUID) -> List[PurchaseOrder]:
|
||||
"""Get purchase orders pending approval"""
|
||||
stmt = select(PurchaseOrder).where(
|
||||
and_(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.status == PurchaseOrderStatus.pending_approval
|
||||
)
|
||||
).order_by(PurchaseOrder.total_amount.desc())
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update_po(self, po_id: uuid.UUID, tenant_id: uuid.UUID, updates: Dict[str, Any]) -> Optional[PurchaseOrder]:
|
||||
"""Update purchase order"""
|
||||
po = await self.get_po_by_id(po_id, tenant_id)
|
||||
if not po:
|
||||
return None
|
||||
|
||||
for key, value in updates.items():
|
||||
if hasattr(po, key):
|
||||
setattr(po, key, value)
|
||||
|
||||
po.updated_at = datetime.utcnow()
|
||||
await self.db.flush()
|
||||
return po
|
||||
|
||||
async def generate_po_number(self, tenant_id: uuid.UUID) -> str:
|
||||
"""Generate unique PO number"""
|
||||
today = date.today()
|
||||
date_str = today.strftime("%Y%m%d")
|
||||
|
||||
# Count existing POs for today
|
||||
stmt = select(func.count(PurchaseOrder.id)).where(
|
||||
and_(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
func.date(PurchaseOrder.order_date) == today
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
return f"PO-{date_str}-{count + 1:04d}"
|
||||
|
||||
|
||||
class PurchaseOrderItemRepository(BaseRepository):
|
||||
"""Repository for purchase order item operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(PurchaseOrderItem)
|
||||
self.db = db
|
||||
|
||||
async def create_item(self, item_data: Dict[str, Any]) -> PurchaseOrderItem:
|
||||
"""Create a purchase order item"""
|
||||
item = PurchaseOrderItem(**item_data)
|
||||
self.db.add(item)
|
||||
await self.db.flush()
|
||||
return item
|
||||
|
||||
async def create_items_batch(self, items_data: List[Dict[str, Any]]) -> List[PurchaseOrderItem]:
|
||||
"""Create multiple purchase order items"""
|
||||
items = [PurchaseOrderItem(**data) for data in items_data]
|
||||
self.db.add_all(items)
|
||||
await self.db.flush()
|
||||
return items
|
||||
|
||||
async def get_items_by_po(self, po_id: uuid.UUID) -> List[PurchaseOrderItem]:
|
||||
"""Get all items for a purchase order"""
|
||||
stmt = select(PurchaseOrderItem).where(
|
||||
PurchaseOrderItem.purchase_order_id == po_id
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
|
||||
class DeliveryRepository(BaseRepository):
|
||||
"""Repository for delivery operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(Delivery)
|
||||
self.db = db
|
||||
|
||||
async def create_delivery(self, delivery_data: Dict[str, Any]) -> Delivery:
|
||||
"""Create a new delivery"""
|
||||
delivery = Delivery(**delivery_data)
|
||||
self.db.add(delivery)
|
||||
await self.db.flush()
|
||||
return delivery
|
||||
|
||||
async def get_delivery_by_id(self, delivery_id: uuid.UUID, tenant_id: uuid.UUID) -> Optional[Delivery]:
|
||||
"""Get delivery by ID with items loaded"""
|
||||
stmt = select(Delivery).where(
|
||||
and_(
|
||||
Delivery.id == delivery_id,
|
||||
Delivery.tenant_id == tenant_id
|
||||
)
|
||||
).options(selectinload(Delivery.items))
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_deliveries_by_po(self, po_id: uuid.UUID) -> List[Delivery]:
|
||||
"""Get all deliveries for a purchase order"""
|
||||
stmt = select(Delivery).where(
|
||||
Delivery.purchase_order_id == po_id
|
||||
).options(selectinload(Delivery.items))
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def create_delivery_item(self, item_data: Dict[str, Any]):
|
||||
"""Create a delivery item"""
|
||||
from app.models.purchase_order import DeliveryItem
|
||||
item = DeliveryItem(**item_data)
|
||||
self.db.add(item)
|
||||
await self.db.flush()
|
||||
return item
|
||||
|
||||
async def update_delivery(
|
||||
self,
|
||||
delivery_id: uuid.UUID,
|
||||
tenant_id: uuid.UUID,
|
||||
updates: Dict[str, Any]
|
||||
) -> Optional[Delivery]:
|
||||
"""Update delivery"""
|
||||
delivery = await self.get_delivery_by_id(delivery_id, tenant_id)
|
||||
if not delivery:
|
||||
return None
|
||||
|
||||
for key, value in updates.items():
|
||||
if hasattr(delivery, key):
|
||||
setattr(delivery, key, value)
|
||||
|
||||
delivery.updated_at = datetime.utcnow()
|
||||
await self.db.flush()
|
||||
return delivery
|
||||
|
||||
async def generate_delivery_number(self, tenant_id: uuid.UUID) -> str:
|
||||
"""Generate unique delivery number"""
|
||||
today = date.today()
|
||||
date_str = today.strftime("%Y%m%d")
|
||||
|
||||
stmt = select(func.count(Delivery.id)).where(
|
||||
and_(
|
||||
Delivery.tenant_id == tenant_id,
|
||||
func.date(Delivery.created_at) == today
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
return f"DEL-{date_str}-{count + 1:04d}"
|
||||
|
||||
|
||||
class SupplierInvoiceRepository(BaseRepository):
|
||||
"""Repository for supplier invoice operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(SupplierInvoice)
|
||||
self.db = db
|
||||
|
||||
async def create_invoice(self, invoice_data: Dict[str, Any]) -> SupplierInvoice:
|
||||
"""Create a new supplier invoice"""
|
||||
invoice = SupplierInvoice(**invoice_data)
|
||||
self.db.add(invoice)
|
||||
await self.db.flush()
|
||||
return invoice
|
||||
|
||||
async def get_invoice_by_id(self, invoice_id: uuid.UUID, tenant_id: uuid.UUID) -> Optional[SupplierInvoice]:
|
||||
"""Get invoice by ID"""
|
||||
stmt = select(SupplierInvoice).where(
|
||||
and_(
|
||||
SupplierInvoice.id == invoice_id,
|
||||
SupplierInvoice.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_invoices_by_po(self, po_id: uuid.UUID) -> List[SupplierInvoice]:
|
||||
"""Get all invoices for a purchase order"""
|
||||
stmt = select(SupplierInvoice).where(
|
||||
SupplierInvoice.purchase_order_id == po_id
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_invoices_by_supplier(self, supplier_id: uuid.UUID, tenant_id: uuid.UUID) -> List[SupplierInvoice]:
|
||||
"""Get all invoices for a supplier"""
|
||||
stmt = select(SupplierInvoice).where(
|
||||
and_(
|
||||
SupplierInvoice.supplier_id == supplier_id,
|
||||
SupplierInvoice.tenant_id == tenant_id
|
||||
)
|
||||
).order_by(SupplierInvoice.invoice_date.desc())
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def generate_invoice_number(self, tenant_id: uuid.UUID) -> str:
|
||||
"""Generate unique invoice number"""
|
||||
today = date.today()
|
||||
date_str = today.strftime("%Y%m%d")
|
||||
|
||||
stmt = select(func.count(SupplierInvoice.id)).where(
|
||||
and_(
|
||||
SupplierInvoice.tenant_id == tenant_id,
|
||||
func.date(SupplierInvoice.created_at) == today
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
return f"INV-{date_str}-{count + 1:04d}"
|
||||
@@ -0,0 +1,315 @@
|
||||
"""
|
||||
Replenishment Plan Repository
|
||||
|
||||
Provides database operations for replenishment planning, inventory projections,
|
||||
and supplier allocations.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date
|
||||
from uuid import UUID
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, func
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.replenishment import (
|
||||
ReplenishmentPlan,
|
||||
ReplenishmentPlanItem,
|
||||
InventoryProjection,
|
||||
SupplierAllocation
|
||||
)
|
||||
from app.repositories.base_repository import BaseRepository
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ReplenishmentPlanRepository(BaseRepository[ReplenishmentPlan]):
|
||||
"""Repository for replenishment plan operations"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(ReplenishmentPlan)
|
||||
|
||||
async def list_plans(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
status: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List replenishment plans for a tenant"""
|
||||
try:
|
||||
query = select(ReplenishmentPlan).where(
|
||||
ReplenishmentPlan.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
if status:
|
||||
query = query.where(ReplenishmentPlan.status == status)
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(
|
||||
ReplenishmentPlan.created_at.desc()
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
plans = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(plan.id),
|
||||
"tenant_id": str(plan.tenant_id),
|
||||
"planning_date": plan.planning_date,
|
||||
"projection_horizon_days": plan.projection_horizon_days,
|
||||
"total_items": plan.total_items,
|
||||
"urgent_items": plan.urgent_items,
|
||||
"high_risk_items": plan.high_risk_items,
|
||||
"total_estimated_cost": float(plan.total_estimated_cost),
|
||||
"status": plan.status,
|
||||
"created_at": plan.created_at,
|
||||
"updated_at": plan.updated_at
|
||||
}
|
||||
for plan in plans
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list replenishment plans", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_plan_by_id(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
plan_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get a specific replenishment plan with items"""
|
||||
try:
|
||||
query = select(ReplenishmentPlan).where(
|
||||
and_(
|
||||
ReplenishmentPlan.id == plan_id,
|
||||
ReplenishmentPlan.tenant_id == tenant_id
|
||||
)
|
||||
).options(selectinload(ReplenishmentPlan.items))
|
||||
|
||||
result = await db.execute(query)
|
||||
plan = result.scalar_one_or_none()
|
||||
|
||||
if not plan:
|
||||
return None
|
||||
|
||||
return {
|
||||
"id": str(plan.id),
|
||||
"tenant_id": str(plan.tenant_id),
|
||||
"planning_date": plan.planning_date,
|
||||
"projection_horizon_days": plan.projection_horizon_days,
|
||||
"forecast_id": str(plan.forecast_id) if plan.forecast_id else None,
|
||||
"production_schedule_id": str(plan.production_schedule_id) if plan.production_schedule_id else None,
|
||||
"total_items": plan.total_items,
|
||||
"urgent_items": plan.urgent_items,
|
||||
"high_risk_items": plan.high_risk_items,
|
||||
"total_estimated_cost": float(plan.total_estimated_cost),
|
||||
"status": plan.status,
|
||||
"created_at": plan.created_at,
|
||||
"updated_at": plan.updated_at,
|
||||
"executed_at": plan.executed_at,
|
||||
"items": [
|
||||
{
|
||||
"id": str(item.id),
|
||||
"ingredient_id": str(item.ingredient_id),
|
||||
"ingredient_name": item.ingredient_name,
|
||||
"unit_of_measure": item.unit_of_measure,
|
||||
"base_quantity": float(item.base_quantity),
|
||||
"safety_stock_quantity": float(item.safety_stock_quantity),
|
||||
"final_order_quantity": float(item.final_order_quantity),
|
||||
"order_date": item.order_date,
|
||||
"delivery_date": item.delivery_date,
|
||||
"required_by_date": item.required_by_date,
|
||||
"lead_time_days": item.lead_time_days,
|
||||
"is_urgent": item.is_urgent,
|
||||
"urgency_reason": item.urgency_reason,
|
||||
"waste_risk": item.waste_risk,
|
||||
"stockout_risk": item.stockout_risk,
|
||||
"supplier_id": str(item.supplier_id) if item.supplier_id else None
|
||||
}
|
||||
for item in plan.items
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment plan", error=str(e), plan_id=plan_id)
|
||||
raise
|
||||
|
||||
|
||||
class InventoryProjectionRepository(BaseRepository[InventoryProjection]):
|
||||
"""Repository for inventory projection operations"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(InventoryProjection)
|
||||
|
||||
async def list_projections(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
projection_date: Optional[date] = None,
|
||||
stockout_only: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List inventory projections"""
|
||||
try:
|
||||
query = select(InventoryProjection).where(
|
||||
InventoryProjection.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
if ingredient_id:
|
||||
query = query.where(InventoryProjection.ingredient_id == ingredient_id)
|
||||
|
||||
if projection_date:
|
||||
query = query.where(InventoryProjection.projection_date == projection_date)
|
||||
|
||||
if stockout_only:
|
||||
query = query.where(InventoryProjection.is_stockout == True)
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(
|
||||
InventoryProjection.projection_date.asc()
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
projections = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(proj.id),
|
||||
"tenant_id": str(proj.tenant_id),
|
||||
"ingredient_id": str(proj.ingredient_id),
|
||||
"ingredient_name": proj.ingredient_name,
|
||||
"projection_date": proj.projection_date,
|
||||
"starting_stock": float(proj.starting_stock),
|
||||
"forecasted_consumption": float(proj.forecasted_consumption),
|
||||
"scheduled_receipts": float(proj.scheduled_receipts),
|
||||
"projected_ending_stock": float(proj.projected_ending_stock),
|
||||
"is_stockout": proj.is_stockout,
|
||||
"coverage_gap": float(proj.coverage_gap),
|
||||
"created_at": proj.created_at
|
||||
}
|
||||
for proj in projections
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list inventory projections", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
|
||||
class SupplierAllocationRepository(BaseRepository[SupplierAllocation]):
|
||||
"""Repository for supplier allocation operations"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(SupplierAllocation)
|
||||
|
||||
async def list_allocations(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
requirement_id: Optional[UUID] = None,
|
||||
supplier_id: Optional[UUID] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List supplier allocations
|
||||
|
||||
Note: SupplierAllocation model doesn't have tenant_id, so we filter by requirements
|
||||
"""
|
||||
try:
|
||||
# Build base query - no tenant_id filter since model doesn't have it
|
||||
query = select(SupplierAllocation)
|
||||
|
||||
if requirement_id:
|
||||
query = query.where(SupplierAllocation.requirement_id == requirement_id)
|
||||
|
||||
if supplier_id:
|
||||
query = query.where(SupplierAllocation.supplier_id == supplier_id)
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(
|
||||
SupplierAllocation.created_at.desc()
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
allocations = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(alloc.id),
|
||||
"requirement_id": str(alloc.requirement_id) if alloc.requirement_id else None,
|
||||
"replenishment_plan_item_id": str(alloc.replenishment_plan_item_id) if alloc.replenishment_plan_item_id else None,
|
||||
"supplier_id": str(alloc.supplier_id),
|
||||
"supplier_name": alloc.supplier_name,
|
||||
"allocation_type": alloc.allocation_type,
|
||||
"allocated_quantity": float(alloc.allocated_quantity),
|
||||
"allocation_percentage": float(alloc.allocation_percentage),
|
||||
"unit_price": float(alloc.unit_price),
|
||||
"total_cost": float(alloc.total_cost),
|
||||
"lead_time_days": alloc.lead_time_days,
|
||||
"supplier_score": float(alloc.supplier_score),
|
||||
"allocation_reason": alloc.allocation_reason,
|
||||
"created_at": alloc.created_at
|
||||
}
|
||||
for alloc in allocations
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list supplier allocations", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
class ReplenishmentAnalyticsRepository:
|
||||
"""Repository for replenishment analytics"""
|
||||
|
||||
async def get_analytics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get replenishment planning analytics"""
|
||||
try:
|
||||
# Build base query
|
||||
query = select(ReplenishmentPlan).where(
|
||||
ReplenishmentPlan.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
if start_date:
|
||||
query = query.where(ReplenishmentPlan.planning_date >= start_date)
|
||||
|
||||
if end_date:
|
||||
query = query.where(ReplenishmentPlan.planning_date <= end_date)
|
||||
|
||||
result = await db.execute(query)
|
||||
plans = result.scalars().all()
|
||||
|
||||
# Calculate analytics
|
||||
total_plans = len(plans)
|
||||
total_items = sum(plan.total_items for plan in plans)
|
||||
total_urgent = sum(plan.urgent_items for plan in plans)
|
||||
total_high_risk = sum(plan.high_risk_items for plan in plans)
|
||||
total_cost = sum(plan.total_estimated_cost for plan in plans)
|
||||
|
||||
# Status breakdown
|
||||
status_counts = {}
|
||||
for plan in plans:
|
||||
status_counts[plan.status] = status_counts.get(plan.status, 0) + 1
|
||||
|
||||
return {
|
||||
"total_plans": total_plans,
|
||||
"total_items": total_items,
|
||||
"total_urgent_items": total_urgent,
|
||||
"total_high_risk_items": total_high_risk,
|
||||
"total_estimated_cost": float(total_cost),
|
||||
"status_breakdown": status_counts,
|
||||
"average_items_per_plan": total_items / total_plans if total_plans > 0 else 0,
|
||||
"urgent_item_percentage": (total_urgent / total_items * 100) if total_items > 0 else 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
79
services/procurement/app/schemas/__init__.py
Normal file
79
services/procurement/app/schemas/__init__.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/schemas/__init__.py
|
||||
# ================================================================
|
||||
"""
|
||||
Pydantic schemas for Procurement Service
|
||||
"""
|
||||
|
||||
from .procurement_schemas import (
|
||||
ProcurementRequirementBase,
|
||||
ProcurementRequirementCreate,
|
||||
ProcurementRequirementUpdate,
|
||||
ProcurementRequirementResponse,
|
||||
ProcurementPlanBase,
|
||||
ProcurementPlanCreate,
|
||||
ProcurementPlanUpdate,
|
||||
ProcurementPlanResponse,
|
||||
ProcurementSummary,
|
||||
DashboardData,
|
||||
GeneratePlanRequest,
|
||||
GeneratePlanResponse,
|
||||
AutoGenerateProcurementRequest,
|
||||
AutoGenerateProcurementResponse,
|
||||
PaginatedProcurementPlans,
|
||||
)
|
||||
|
||||
from .purchase_order_schemas import (
|
||||
PurchaseOrderCreate,
|
||||
PurchaseOrderUpdate,
|
||||
PurchaseOrderApproval,
|
||||
PurchaseOrderResponse,
|
||||
PurchaseOrderSummary,
|
||||
PurchaseOrderItemCreate,
|
||||
PurchaseOrderItemResponse,
|
||||
DeliveryCreate,
|
||||
DeliveryUpdate,
|
||||
DeliveryResponse,
|
||||
DeliveryItemCreate,
|
||||
DeliveryItemResponse,
|
||||
SupplierInvoiceCreate,
|
||||
SupplierInvoiceUpdate,
|
||||
SupplierInvoiceResponse,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Procurement Plan schemas
|
||||
"ProcurementRequirementBase",
|
||||
"ProcurementRequirementCreate",
|
||||
"ProcurementRequirementUpdate",
|
||||
"ProcurementRequirementResponse",
|
||||
"ProcurementPlanBase",
|
||||
"ProcurementPlanCreate",
|
||||
"ProcurementPlanUpdate",
|
||||
"ProcurementPlanResponse",
|
||||
"ProcurementSummary",
|
||||
"DashboardData",
|
||||
"GeneratePlanRequest",
|
||||
"GeneratePlanResponse",
|
||||
"AutoGenerateProcurementRequest",
|
||||
"AutoGenerateProcurementResponse",
|
||||
"PaginatedProcurementPlans",
|
||||
# Purchase Order schemas
|
||||
"PurchaseOrderCreate",
|
||||
"PurchaseOrderUpdate",
|
||||
"PurchaseOrderApproval",
|
||||
"PurchaseOrderResponse",
|
||||
"PurchaseOrderSummary",
|
||||
"PurchaseOrderItemCreate",
|
||||
"PurchaseOrderItemResponse",
|
||||
# Delivery schemas
|
||||
"DeliveryCreate",
|
||||
"DeliveryUpdate",
|
||||
"DeliveryResponse",
|
||||
"DeliveryItemCreate",
|
||||
"DeliveryItemResponse",
|
||||
# Invoice schemas
|
||||
"SupplierInvoiceCreate",
|
||||
"SupplierInvoiceUpdate",
|
||||
"SupplierInvoiceResponse",
|
||||
]
|
||||
368
services/procurement/app/schemas/procurement_schemas.py
Normal file
368
services/procurement/app/schemas/procurement_schemas.py
Normal file
@@ -0,0 +1,368 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/schemas/procurement_schemas.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Schemas - Request/response models for procurement plans
|
||||
Migrated from Orders Service with additions for local production support
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
# ================================================================
|
||||
# BASE SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class ProcurementBase(BaseModel):
|
||||
"""Base schema for procurement entities"""
|
||||
model_config = ConfigDict(from_attributes=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT REQUIREMENT SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class ProcurementRequirementBase(ProcurementBase):
|
||||
"""Base procurement requirement schema"""
|
||||
product_id: uuid.UUID
|
||||
product_name: str = Field(..., min_length=1, max_length=200)
|
||||
product_sku: Optional[str] = Field(None, max_length=100)
|
||||
product_category: Optional[str] = Field(None, max_length=100)
|
||||
product_type: str = Field(default="ingredient", max_length=50)
|
||||
|
||||
required_quantity: Decimal = Field(..., gt=0)
|
||||
unit_of_measure: str = Field(..., min_length=1, max_length=50)
|
||||
safety_stock_quantity: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
total_quantity_needed: Decimal = Field(..., gt=0)
|
||||
|
||||
current_stock_level: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
reserved_stock: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
available_stock: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
net_requirement: Decimal = Field(..., ge=0)
|
||||
|
||||
order_demand: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
production_demand: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
forecast_demand: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
buffer_demand: Decimal = Field(default=Decimal("0.000"), ge=0)
|
||||
|
||||
required_by_date: date
|
||||
lead_time_buffer_days: int = Field(default=1, ge=0)
|
||||
suggested_order_date: date
|
||||
latest_order_date: date
|
||||
|
||||
priority: str = Field(default="normal", pattern="^(critical|high|normal|low)$")
|
||||
risk_level: str = Field(default="low", pattern="^(low|medium|high|critical)$")
|
||||
|
||||
preferred_supplier_id: Optional[uuid.UUID] = None
|
||||
backup_supplier_id: Optional[uuid.UUID] = None
|
||||
supplier_name: Optional[str] = Field(None, max_length=200)
|
||||
supplier_lead_time_days: Optional[int] = Field(None, ge=0)
|
||||
minimum_order_quantity: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
estimated_unit_cost: Optional[Decimal] = Field(None, ge=0)
|
||||
estimated_total_cost: Optional[Decimal] = Field(None, ge=0)
|
||||
last_purchase_cost: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
|
||||
class ProcurementRequirementCreate(ProcurementRequirementBase):
|
||||
"""Schema for creating procurement requirements"""
|
||||
special_requirements: Optional[str] = None
|
||||
storage_requirements: Optional[str] = Field(None, max_length=200)
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0)
|
||||
quality_specifications: Optional[Dict[str, Any]] = None
|
||||
procurement_notes: Optional[str] = None
|
||||
|
||||
# Smart procurement calculation metadata
|
||||
calculation_method: Optional[str] = Field(None, max_length=100)
|
||||
ai_suggested_quantity: Optional[Decimal] = Field(None, ge=0)
|
||||
adjusted_quantity: Optional[Decimal] = Field(None, ge=0)
|
||||
adjustment_reason: Optional[str] = None
|
||||
price_tier_applied: Optional[Dict[str, Any]] = None
|
||||
supplier_minimum_applied: bool = False
|
||||
storage_limit_applied: bool = False
|
||||
reorder_rule_applied: bool = False
|
||||
|
||||
# NEW: Local production support fields
|
||||
is_locally_produced: bool = False
|
||||
recipe_id: Optional[uuid.UUID] = None
|
||||
parent_requirement_id: Optional[uuid.UUID] = None
|
||||
bom_explosion_level: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
class ProcurementRequirementUpdate(ProcurementBase):
|
||||
"""Schema for updating procurement requirements"""
|
||||
status: Optional[str] = Field(None, pattern="^(pending|approved|ordered|partially_received|received|cancelled)$")
|
||||
priority: Optional[str] = Field(None, pattern="^(critical|high|normal|low)$")
|
||||
|
||||
approved_quantity: Optional[Decimal] = Field(None, ge=0)
|
||||
approved_cost: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
purchase_order_id: Optional[uuid.UUID] = None
|
||||
purchase_order_number: Optional[str] = Field(None, max_length=50)
|
||||
ordered_quantity: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
expected_delivery_date: Optional[date] = None
|
||||
actual_delivery_date: Optional[date] = None
|
||||
received_quantity: Optional[Decimal] = Field(None, ge=0)
|
||||
delivery_status: Optional[str] = Field(None, pattern="^(pending|in_transit|delivered|delayed|cancelled)$")
|
||||
|
||||
procurement_notes: Optional[str] = None
|
||||
|
||||
|
||||
class ProcurementRequirementResponse(ProcurementRequirementBase):
|
||||
"""Schema for procurement requirement responses"""
|
||||
id: uuid.UUID
|
||||
plan_id: uuid.UUID
|
||||
requirement_number: str
|
||||
|
||||
status: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
purchase_order_id: Optional[uuid.UUID] = None
|
||||
purchase_order_number: Optional[str] = None
|
||||
ordered_quantity: Decimal
|
||||
ordered_at: Optional[datetime] = None
|
||||
|
||||
expected_delivery_date: Optional[date] = None
|
||||
actual_delivery_date: Optional[date] = None
|
||||
received_quantity: Decimal
|
||||
delivery_status: str
|
||||
|
||||
fulfillment_rate: Optional[Decimal] = None
|
||||
on_time_delivery: Optional[bool] = None
|
||||
quality_rating: Optional[Decimal] = None
|
||||
|
||||
approved_quantity: Optional[Decimal] = None
|
||||
approved_cost: Optional[Decimal] = None
|
||||
approved_at: Optional[datetime] = None
|
||||
approved_by: Optional[uuid.UUID] = None
|
||||
|
||||
special_requirements: Optional[str] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
shelf_life_days: Optional[int] = None
|
||||
quality_specifications: Optional[Dict[str, Any]] = None
|
||||
procurement_notes: Optional[str] = None
|
||||
|
||||
# Smart procurement calculation metadata
|
||||
calculation_method: Optional[str] = None
|
||||
ai_suggested_quantity: Optional[Decimal] = None
|
||||
adjusted_quantity: Optional[Decimal] = None
|
||||
adjustment_reason: Optional[str] = None
|
||||
price_tier_applied: Optional[Dict[str, Any]] = None
|
||||
supplier_minimum_applied: bool = False
|
||||
storage_limit_applied: bool = False
|
||||
reorder_rule_applied: bool = False
|
||||
|
||||
# NEW: Local production support fields
|
||||
is_locally_produced: bool = False
|
||||
recipe_id: Optional[uuid.UUID] = None
|
||||
parent_requirement_id: Optional[uuid.UUID] = None
|
||||
bom_explosion_level: int = 0
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT PLAN SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class ProcurementPlanBase(ProcurementBase):
|
||||
"""Base procurement plan schema"""
|
||||
plan_date: date
|
||||
plan_period_start: date
|
||||
plan_period_end: date
|
||||
planning_horizon_days: int = Field(default=14, gt=0)
|
||||
|
||||
plan_type: str = Field(default="regular", pattern="^(regular|emergency|seasonal|urgent)$")
|
||||
priority: str = Field(default="normal", pattern="^(critical|high|normal|low)$")
|
||||
|
||||
business_model: Optional[str] = Field(None, pattern="^(individual_bakery|central_bakery)$")
|
||||
procurement_strategy: str = Field(default="just_in_time", pattern="^(just_in_time|bulk|mixed|bulk_order)$")
|
||||
|
||||
safety_stock_buffer: Decimal = Field(default=Decimal("20.00"), ge=0, le=100)
|
||||
supply_risk_level: str = Field(default="low", pattern="^(low|medium|high|critical)$")
|
||||
demand_forecast_confidence: Optional[Decimal] = Field(None, ge=1, le=10)
|
||||
seasonality_adjustment: Decimal = Field(default=Decimal("0.00"))
|
||||
|
||||
special_requirements: Optional[str] = None
|
||||
|
||||
|
||||
class ProcurementPlanCreate(ProcurementPlanBase):
|
||||
"""Schema for creating procurement plans"""
|
||||
tenant_id: uuid.UUID
|
||||
requirements: Optional[List[ProcurementRequirementCreate]] = []
|
||||
|
||||
|
||||
class ProcurementPlanUpdate(ProcurementBase):
|
||||
"""Schema for updating procurement plans"""
|
||||
status: Optional[str] = Field(None, pattern="^(draft|pending_approval|approved|in_execution|completed|cancelled)$")
|
||||
priority: Optional[str] = Field(None, pattern="^(critical|high|normal|low)$")
|
||||
|
||||
approved_at: Optional[datetime] = None
|
||||
approved_by: Optional[uuid.UUID] = None
|
||||
execution_started_at: Optional[datetime] = None
|
||||
execution_completed_at: Optional[datetime] = None
|
||||
|
||||
special_requirements: Optional[str] = None
|
||||
seasonal_adjustments: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class ProcurementPlanResponse(ProcurementPlanBase):
|
||||
"""Schema for procurement plan responses"""
|
||||
id: uuid.UUID
|
||||
tenant_id: uuid.UUID
|
||||
plan_number: str
|
||||
status: str
|
||||
|
||||
total_requirements: int
|
||||
total_estimated_cost: Decimal
|
||||
total_approved_cost: Decimal
|
||||
cost_variance: Decimal
|
||||
|
||||
total_demand_orders: int
|
||||
total_demand_quantity: Decimal
|
||||
total_production_requirements: Decimal
|
||||
|
||||
primary_suppliers_count: int
|
||||
backup_suppliers_count: int
|
||||
supplier_diversification_score: Optional[Decimal] = None
|
||||
|
||||
approved_at: Optional[datetime] = None
|
||||
approved_by: Optional[uuid.UUID] = None
|
||||
execution_started_at: Optional[datetime] = None
|
||||
execution_completed_at: Optional[datetime] = None
|
||||
|
||||
fulfillment_rate: Optional[Decimal] = None
|
||||
on_time_delivery_rate: Optional[Decimal] = None
|
||||
cost_accuracy: Optional[Decimal] = None
|
||||
quality_score: Optional[Decimal] = None
|
||||
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[uuid.UUID] = None
|
||||
updated_by: Optional[uuid.UUID] = None
|
||||
|
||||
# NEW: Track forecast and production schedule links
|
||||
forecast_id: Optional[uuid.UUID] = None
|
||||
production_schedule_id: Optional[uuid.UUID] = None
|
||||
|
||||
requirements: List[ProcurementRequirementResponse] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# SUMMARY SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class ProcurementSummary(ProcurementBase):
|
||||
"""Summary of procurement plans"""
|
||||
total_plans: int
|
||||
active_plans: int
|
||||
total_requirements: int
|
||||
pending_requirements: int
|
||||
critical_requirements: int
|
||||
|
||||
total_estimated_cost: Decimal
|
||||
total_approved_cost: Decimal
|
||||
cost_variance: Decimal
|
||||
|
||||
average_fulfillment_rate: Optional[Decimal] = None
|
||||
average_on_time_delivery: Optional[Decimal] = None
|
||||
|
||||
top_suppliers: List[Dict[str, Any]] = []
|
||||
critical_items: List[Dict[str, Any]] = []
|
||||
|
||||
|
||||
class DashboardData(ProcurementBase):
|
||||
"""Dashboard data for procurement overview"""
|
||||
current_plan: Optional[ProcurementPlanResponse] = None
|
||||
summary: ProcurementSummary
|
||||
|
||||
upcoming_deliveries: List[Dict[str, Any]] = []
|
||||
overdue_requirements: List[Dict[str, Any]] = []
|
||||
low_stock_alerts: List[Dict[str, Any]] = []
|
||||
|
||||
performance_metrics: Dict[str, Any] = {}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class GeneratePlanRequest(ProcurementBase):
|
||||
"""Request to generate procurement plan"""
|
||||
plan_date: Optional[date] = None
|
||||
force_regenerate: bool = False
|
||||
planning_horizon_days: int = Field(default=14, gt=0, le=30)
|
||||
include_safety_stock: bool = True
|
||||
safety_stock_percentage: Decimal = Field(default=Decimal("20.00"), ge=0, le=100)
|
||||
|
||||
|
||||
class AutoGenerateProcurementRequest(ProcurementBase):
|
||||
"""
|
||||
Request to auto-generate procurement plan (called by Orchestrator)
|
||||
|
||||
This is the main entry point for orchestrated procurement planning.
|
||||
The Orchestrator calls Forecasting Service first, then passes forecast data here.
|
||||
|
||||
NEW: Accepts cached data snapshots from Orchestrator to eliminate duplicate API calls.
|
||||
"""
|
||||
forecast_data: Dict[str, Any] = Field(..., description="Forecast data from Forecasting Service")
|
||||
production_schedule_id: Optional[uuid.UUID] = Field(None, description="Production schedule ID if available")
|
||||
target_date: Optional[date] = Field(None, description="Target date for the plan")
|
||||
planning_horizon_days: int = Field(default=14, gt=0, le=30)
|
||||
safety_stock_percentage: Decimal = Field(default=Decimal("20.00"), ge=0, le=100)
|
||||
auto_create_pos: bool = Field(True, description="Automatically create purchase orders")
|
||||
auto_approve_pos: bool = Field(False, description="Auto-approve qualifying purchase orders")
|
||||
|
||||
# NEW: Cached data from Orchestrator
|
||||
inventory_data: Optional[Dict[str, Any]] = Field(None, description="Cached inventory snapshot from Orchestrator")
|
||||
suppliers_data: Optional[Dict[str, Any]] = Field(None, description="Cached suppliers snapshot from Orchestrator")
|
||||
recipes_data: Optional[Dict[str, Any]] = Field(None, description="Cached recipes snapshot from Orchestrator")
|
||||
|
||||
|
||||
class ForecastRequest(ProcurementBase):
|
||||
"""Request parameters for demand forecasting"""
|
||||
target_date: date
|
||||
horizon_days: int = Field(default=1, gt=0, le=7)
|
||||
include_confidence_intervals: bool = True
|
||||
product_ids: Optional[List[uuid.UUID]] = None
|
||||
|
||||
|
||||
# ================================================================
|
||||
# RESPONSE SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class GeneratePlanResponse(ProcurementBase):
|
||||
"""Response from plan generation"""
|
||||
success: bool
|
||||
message: str
|
||||
plan: Optional[ProcurementPlanResponse] = None
|
||||
warnings: List[str] = []
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
class AutoGenerateProcurementResponse(ProcurementBase):
|
||||
"""Response from auto-generate procurement (called by Orchestrator)"""
|
||||
success: bool
|
||||
message: str
|
||||
plan_id: Optional[uuid.UUID] = None
|
||||
plan_number: Optional[str] = None
|
||||
requirements_created: int = 0
|
||||
purchase_orders_created: int = 0
|
||||
purchase_orders_auto_approved: int = 0
|
||||
total_estimated_cost: Decimal = Decimal("0")
|
||||
warnings: List[str] = []
|
||||
errors: List[str] = []
|
||||
created_pos: List[Dict[str, Any]] = []
|
||||
|
||||
|
||||
class PaginatedProcurementPlans(ProcurementBase):
|
||||
"""Paginated list of procurement plans"""
|
||||
plans: List[ProcurementPlanResponse]
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
||||
has_more: bool
|
||||
395
services/procurement/app/schemas/purchase_order_schemas.py
Normal file
395
services/procurement/app/schemas/purchase_order_schemas.py
Normal file
@@ -0,0 +1,395 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/schemas/purchase_order_schemas.py
|
||||
# ================================================================
|
||||
"""
|
||||
Purchase Order Schemas - Request/response models for purchase orders
|
||||
Migrated from Suppliers Service with procurement-specific additions
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
|
||||
# ================================================================
|
||||
# BASE SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class PurchaseOrderBase(BaseModel):
|
||||
"""Base schema for purchase order entities"""
|
||||
model_config = ConfigDict(from_attributes=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER ITEM SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class PurchaseOrderItemCreate(PurchaseOrderBase):
|
||||
"""Schema for creating purchase order items"""
|
||||
inventory_product_id: uuid.UUID # Changed from ingredient_id to match model
|
||||
ordered_quantity: Decimal = Field(..., gt=0)
|
||||
unit_price: Decimal = Field(..., gt=0)
|
||||
unit_of_measure: str = Field(..., max_length=50)
|
||||
quality_requirements: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderItemUpdate(PurchaseOrderBase):
|
||||
"""Schema for updating purchase order items"""
|
||||
ordered_quantity: Optional[Decimal] = Field(None, gt=0)
|
||||
unit_price: Optional[Decimal] = Field(None, gt=0)
|
||||
quality_requirements: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderItemResponse(PurchaseOrderBase):
|
||||
"""Schema for purchase order item responses"""
|
||||
id: uuid.UUID
|
||||
tenant_id: uuid.UUID
|
||||
purchase_order_id: uuid.UUID
|
||||
inventory_product_id: uuid.UUID # Changed from ingredient_id to match model
|
||||
product_name: Optional[str] = None
|
||||
ordered_quantity: Decimal
|
||||
received_quantity: Decimal
|
||||
unit_price: Decimal
|
||||
unit_of_measure: str
|
||||
line_total: Decimal
|
||||
quality_requirements: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class PurchaseOrderCreate(PurchaseOrderBase):
|
||||
"""Schema for creating purchase orders"""
|
||||
supplier_id: uuid.UUID
|
||||
required_delivery_date: datetime # Use datetime with timezone
|
||||
priority: str = Field(default="normal", pattern="^(low|normal|high|critical)$")
|
||||
|
||||
# Financial information
|
||||
tax_amount: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
shipping_cost: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
discount_amount: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
subtotal: Decimal = Field(..., ge=0)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
# NEW: Procurement-specific fields
|
||||
procurement_plan_id: Optional[uuid.UUID] = None
|
||||
|
||||
# Items
|
||||
items: List[PurchaseOrderItemCreate] = Field(..., min_length=1)
|
||||
|
||||
|
||||
class PurchaseOrderUpdate(PurchaseOrderBase):
|
||||
"""Schema for updating purchase orders"""
|
||||
required_delivery_date: Optional[datetime] = None # Use datetime with timezone
|
||||
priority: Optional[str] = Field(None, pattern="^(low|normal|high|critical)$")
|
||||
|
||||
# Financial information
|
||||
tax_amount: Optional[Decimal] = Field(None, ge=0)
|
||||
shipping_cost: Optional[Decimal] = Field(None, ge=0)
|
||||
discount_amount: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderApproval(PurchaseOrderBase):
|
||||
"""Schema for purchase order approval/rejection"""
|
||||
action: str = Field(..., pattern="^(approve|reject)$")
|
||||
notes: Optional[str] = None
|
||||
approved_by: Optional[uuid.UUID] = None
|
||||
|
||||
|
||||
class SupplierSummary(PurchaseOrderBase):
|
||||
"""Schema for supplier summary - matches the structure returned by suppliers service"""
|
||||
id: str
|
||||
name: str
|
||||
supplier_code: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
phone: Optional[str] = None
|
||||
contact_person: Optional[str] = None
|
||||
address_line1: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
supplier_type: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
mobile: Optional[str] = None
|
||||
website: Optional[str] = None
|
||||
payment_terms: Optional[str] = None
|
||||
standard_lead_time: Optional[int] = None
|
||||
quality_rating: Optional[float] = None
|
||||
delivery_rating: Optional[float] = None
|
||||
total_orders: Optional[int] = None
|
||||
total_amount: Optional[float] = None
|
||||
|
||||
|
||||
class PurchaseOrderResponse(PurchaseOrderBase):
|
||||
"""Schema for purchase order responses"""
|
||||
id: uuid.UUID
|
||||
tenant_id: uuid.UUID
|
||||
supplier_id: uuid.UUID
|
||||
supplier_name: Optional[str] = None
|
||||
po_number: str
|
||||
status: str
|
||||
priority: str
|
||||
|
||||
order_date: datetime
|
||||
required_delivery_date: Optional[datetime] = None # Use datetime with timezone
|
||||
estimated_delivery_date: Optional[datetime] = None # Use datetime with timezone
|
||||
actual_delivery_date: Optional[datetime] = None # Use datetime with timezone
|
||||
|
||||
# Financial information
|
||||
subtotal: Decimal
|
||||
tax_amount: Decimal
|
||||
shipping_cost: Decimal
|
||||
discount_amount: Decimal
|
||||
total_amount: Decimal
|
||||
currency: str
|
||||
|
||||
# Approval workflow
|
||||
approved_by: Optional[uuid.UUID] = None
|
||||
approved_at: Optional[datetime] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
|
||||
# NEW: Procurement-specific fields
|
||||
procurement_plan_id: Optional[uuid.UUID] = None
|
||||
auto_approved: bool = False
|
||||
auto_approval_rule_id: Optional[uuid.UUID] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
# AI/ML reasoning for procurement decisions (JTBD dashboard support)
|
||||
reasoning_data: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Audit fields
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[uuid.UUID] = None
|
||||
updated_by: Optional[uuid.UUID] = None
|
||||
|
||||
# Related data
|
||||
items: List[PurchaseOrderItemResponse] = []
|
||||
|
||||
|
||||
class PurchaseOrderWithSupplierResponse(PurchaseOrderResponse):
|
||||
"""Schema for purchase order responses with supplier information"""
|
||||
supplier: Optional[SupplierSummary] = None
|
||||
|
||||
|
||||
class PurchaseOrderSummary(PurchaseOrderBase):
|
||||
"""Schema for purchase order summary (list view)"""
|
||||
id: uuid.UUID
|
||||
po_number: str
|
||||
supplier_id: uuid.UUID
|
||||
supplier_name: Optional[str] = None
|
||||
status: str
|
||||
priority: str
|
||||
order_date: datetime
|
||||
required_delivery_date: datetime # Use datetime with timezone
|
||||
total_amount: Decimal
|
||||
currency: str
|
||||
auto_approved: bool = False
|
||||
created_at: datetime
|
||||
|
||||
|
||||
# ================================================================
|
||||
# DELIVERY SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class DeliveryItemCreate(PurchaseOrderBase):
|
||||
"""Schema for creating delivery items"""
|
||||
purchase_order_item_id: uuid.UUID
|
||||
inventory_product_id: uuid.UUID # Changed from ingredient_id to match model
|
||||
ordered_quantity: Decimal = Field(..., gt=0)
|
||||
delivered_quantity: Decimal = Field(..., ge=0)
|
||||
accepted_quantity: Decimal = Field(..., ge=0)
|
||||
rejected_quantity: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
|
||||
# Quality information
|
||||
batch_lot_number: Optional[str] = Field(None, max_length=100)
|
||||
expiry_date: Optional[datetime] = None # Use datetime with timezone
|
||||
quality_grade: Optional[str] = Field(None, max_length=20)
|
||||
|
||||
# Issues and notes
|
||||
quality_issues: Optional[str] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
|
||||
|
||||
class DeliveryItemResponse(PurchaseOrderBase):
|
||||
"""Schema for delivery item responses"""
|
||||
id: uuid.UUID
|
||||
tenant_id: uuid.UUID
|
||||
delivery_id: uuid.UUID
|
||||
purchase_order_item_id: uuid.UUID
|
||||
inventory_product_id: uuid.UUID # Changed from ingredient_id to match model
|
||||
ingredient_name: Optional[str] = None
|
||||
ordered_quantity: Decimal
|
||||
delivered_quantity: Decimal
|
||||
accepted_quantity: Decimal
|
||||
rejected_quantity: Decimal
|
||||
batch_lot_number: Optional[str] = None
|
||||
expiry_date: Optional[datetime] = None # Use datetime with timezone
|
||||
quality_grade: Optional[str] = None
|
||||
quality_issues: Optional[str] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class DeliveryCreate(PurchaseOrderBase):
|
||||
"""Schema for creating deliveries"""
|
||||
purchase_order_id: uuid.UUID
|
||||
supplier_id: uuid.UUID
|
||||
supplier_delivery_note: Optional[str] = Field(None, max_length=100)
|
||||
scheduled_date: Optional[datetime] = None # Use datetime with timezone
|
||||
estimated_arrival: Optional[datetime] = None
|
||||
|
||||
# Delivery details
|
||||
carrier_name: Optional[str] = Field(None, max_length=200)
|
||||
tracking_number: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
# Items
|
||||
items: List[DeliveryItemCreate] = Field(..., min_length=1)
|
||||
|
||||
|
||||
class DeliveryUpdate(PurchaseOrderBase):
|
||||
"""Schema for updating deliveries"""
|
||||
supplier_delivery_note: Optional[str] = Field(None, max_length=100)
|
||||
scheduled_date: Optional[datetime] = None # Use datetime with timezone
|
||||
estimated_arrival: Optional[datetime] = None
|
||||
actual_arrival: Optional[datetime] = None
|
||||
|
||||
# Delivery details
|
||||
carrier_name: Optional[str] = Field(None, max_length=200)
|
||||
tracking_number: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Quality inspection
|
||||
inspection_passed: Optional[bool] = None
|
||||
inspection_notes: Optional[str] = None
|
||||
quality_issues: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class DeliveryResponse(PurchaseOrderBase):
|
||||
"""Schema for delivery responses"""
|
||||
id: uuid.UUID
|
||||
tenant_id: uuid.UUID
|
||||
purchase_order_id: uuid.UUID
|
||||
supplier_id: uuid.UUID
|
||||
supplier_name: Optional[str] = None
|
||||
delivery_number: str
|
||||
supplier_delivery_note: Optional[str] = None
|
||||
status: str
|
||||
|
||||
# Timing
|
||||
scheduled_date: Optional[datetime] = None # Use datetime with timezone
|
||||
estimated_arrival: Optional[datetime] = None
|
||||
actual_arrival: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
# Delivery details
|
||||
carrier_name: Optional[str] = None
|
||||
tracking_number: Optional[str] = None
|
||||
|
||||
# Quality inspection
|
||||
inspection_passed: Optional[bool] = None
|
||||
inspection_notes: Optional[str] = None
|
||||
quality_issues: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Receipt information
|
||||
received_by: Optional[uuid.UUID] = None
|
||||
received_at: Optional[datetime] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
# Audit fields
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: uuid.UUID
|
||||
|
||||
# Related data
|
||||
items: List[DeliveryItemResponse] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INVOICE SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class SupplierInvoiceCreate(PurchaseOrderBase):
|
||||
"""Schema for creating supplier invoices"""
|
||||
purchase_order_id: uuid.UUID
|
||||
supplier_id: uuid.UUID
|
||||
invoice_number: str = Field(..., max_length=100)
|
||||
invoice_date: datetime # Use datetime with timezone
|
||||
due_date: datetime # Use datetime with timezone
|
||||
|
||||
# Financial information
|
||||
subtotal: Decimal = Field(..., ge=0)
|
||||
tax_amount: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
shipping_cost: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
discount_amount: Decimal = Field(default=Decimal("0"), ge=0)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
payment_reference: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
|
||||
class SupplierInvoiceUpdate(PurchaseOrderBase):
|
||||
"""Schema for updating supplier invoices"""
|
||||
due_date: Optional[datetime] = None # Use datetime with timezone
|
||||
payment_reference: Optional[str] = Field(None, max_length=100)
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class SupplierInvoiceResponse(PurchaseOrderBase):
|
||||
"""Schema for supplier invoice responses"""
|
||||
id: uuid.UUID
|
||||
tenant_id: uuid.UUID
|
||||
purchase_order_id: uuid.UUID
|
||||
supplier_id: uuid.UUID
|
||||
supplier_name: Optional[str] = None
|
||||
invoice_number: str
|
||||
status: str
|
||||
invoice_date: datetime # Use datetime with timezone
|
||||
due_date: datetime # Use datetime with timezone
|
||||
|
||||
# Financial information
|
||||
subtotal: Decimal
|
||||
tax_amount: Decimal
|
||||
shipping_cost: Decimal
|
||||
discount_amount: Decimal
|
||||
total_amount: Decimal
|
||||
currency: str
|
||||
|
||||
# Payment tracking
|
||||
paid_amount: Decimal
|
||||
remaining_amount: Decimal
|
||||
payment_date: Optional[datetime] = None # Use datetime with timezone
|
||||
payment_reference: Optional[str] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
# Audit fields
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: uuid.UUID
|
||||
updated_by: uuid.UUID
|
||||
440
services/procurement/app/schemas/replenishment.py
Normal file
440
services/procurement/app/schemas/replenishment.py
Normal file
@@ -0,0 +1,440 @@
|
||||
"""
|
||||
Pydantic schemas for replenishment planning.
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Replenishment Plan Schemas
|
||||
# ============================================================================
|
||||
|
||||
class ReplenishmentPlanItemBase(BaseModel):
|
||||
"""Base schema for replenishment plan item"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
unit_of_measure: str
|
||||
|
||||
base_quantity: Decimal
|
||||
safety_stock_quantity: Decimal
|
||||
shelf_life_adjusted_quantity: Decimal
|
||||
final_order_quantity: Decimal
|
||||
|
||||
order_date: date
|
||||
delivery_date: date
|
||||
required_by_date: date
|
||||
|
||||
lead_time_days: int
|
||||
is_urgent: bool
|
||||
urgency_reason: Optional[str] = None
|
||||
waste_risk: str
|
||||
stockout_risk: str
|
||||
|
||||
supplier_id: Optional[UUID] = None
|
||||
|
||||
safety_stock_calculation: Optional[Dict[str, Any]] = None
|
||||
shelf_life_adjustment: Optional[Dict[str, Any]] = None
|
||||
inventory_projection: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class ReplenishmentPlanItemCreate(ReplenishmentPlanItemBase):
|
||||
"""Schema for creating replenishment plan item"""
|
||||
replenishment_plan_id: UUID
|
||||
|
||||
|
||||
class ReplenishmentPlanItemResponse(ReplenishmentPlanItemBase):
|
||||
"""Schema for replenishment plan item response"""
|
||||
id: UUID
|
||||
replenishment_plan_id: UUID
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ReplenishmentPlanBase(BaseModel):
|
||||
"""Base schema for replenishment plan"""
|
||||
planning_date: date
|
||||
projection_horizon_days: int = 7
|
||||
|
||||
forecast_id: Optional[UUID] = None
|
||||
production_schedule_id: Optional[UUID] = None
|
||||
|
||||
total_items: int
|
||||
urgent_items: int
|
||||
high_risk_items: int
|
||||
total_estimated_cost: Decimal
|
||||
|
||||
|
||||
class ReplenishmentPlanCreate(ReplenishmentPlanBase):
|
||||
"""Schema for creating replenishment plan"""
|
||||
tenant_id: UUID
|
||||
items: List[Dict[str, Any]] = []
|
||||
|
||||
|
||||
class ReplenishmentPlanResponse(ReplenishmentPlanBase):
|
||||
"""Schema for replenishment plan response"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
status: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
executed_at: Optional[datetime] = None
|
||||
|
||||
items: List[ReplenishmentPlanItemResponse] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ReplenishmentPlanSummary(BaseModel):
|
||||
"""Summary schema for list views"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
planning_date: date
|
||||
total_items: int
|
||||
urgent_items: int
|
||||
high_risk_items: int
|
||||
total_estimated_cost: Decimal
|
||||
status: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Inventory Projection Schemas
|
||||
# ============================================================================
|
||||
|
||||
class InventoryProjectionBase(BaseModel):
|
||||
"""Base schema for inventory projection"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
projection_date: date
|
||||
|
||||
starting_stock: Decimal
|
||||
forecasted_consumption: Decimal
|
||||
scheduled_receipts: Decimal
|
||||
projected_ending_stock: Decimal
|
||||
|
||||
is_stockout: bool
|
||||
coverage_gap: Decimal
|
||||
|
||||
|
||||
class InventoryProjectionCreate(InventoryProjectionBase):
|
||||
"""Schema for creating inventory projection"""
|
||||
tenant_id: UUID
|
||||
replenishment_plan_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class InventoryProjectionResponse(InventoryProjectionBase):
|
||||
"""Schema for inventory projection response"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
replenishment_plan_id: Optional[UUID] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class IngredientProjectionSummary(BaseModel):
|
||||
"""Summary of projections for one ingredient"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
current_stock: Decimal
|
||||
unit_of_measure: str
|
||||
projection_horizon_days: int
|
||||
total_consumption: Decimal
|
||||
total_receipts: Decimal
|
||||
stockout_days: int
|
||||
stockout_risk: str
|
||||
daily_projections: List[Dict[str, Any]]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Supplier Allocation Schemas
|
||||
# ============================================================================
|
||||
|
||||
class SupplierAllocationBase(BaseModel):
|
||||
"""Base schema for supplier allocation"""
|
||||
supplier_id: UUID
|
||||
supplier_name: str
|
||||
|
||||
allocation_type: str
|
||||
allocated_quantity: Decimal
|
||||
allocation_percentage: Decimal
|
||||
|
||||
unit_price: Decimal
|
||||
total_cost: Decimal
|
||||
lead_time_days: int
|
||||
|
||||
supplier_score: Decimal
|
||||
score_breakdown: Optional[Dict[str, float]] = None
|
||||
allocation_reason: Optional[str] = None
|
||||
|
||||
|
||||
class SupplierAllocationCreate(SupplierAllocationBase):
|
||||
"""Schema for creating supplier allocation"""
|
||||
replenishment_plan_item_id: Optional[UUID] = None
|
||||
requirement_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class SupplierAllocationResponse(SupplierAllocationBase):
|
||||
"""Schema for supplier allocation response"""
|
||||
id: UUID
|
||||
replenishment_plan_item_id: Optional[UUID] = None
|
||||
requirement_id: Optional[UUID] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Supplier Selection Schemas
|
||||
# ============================================================================
|
||||
|
||||
class SupplierSelectionRequest(BaseModel):
|
||||
"""Request to select suppliers for an ingredient"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
required_quantity: Decimal
|
||||
supplier_options: List[Dict[str, Any]]
|
||||
|
||||
|
||||
class SupplierSelectionResult(BaseModel):
|
||||
"""Result of supplier selection"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
required_quantity: Decimal
|
||||
allocations: List[Dict[str, Any]]
|
||||
total_cost: Decimal
|
||||
weighted_lead_time: float
|
||||
risk_score: float
|
||||
diversification_applied: bool
|
||||
selection_strategy: str
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Replenishment Planning Request Schemas
|
||||
# ============================================================================
|
||||
|
||||
class IngredientRequirementInput(BaseModel):
|
||||
"""Input for a single ingredient requirement"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
required_quantity: Decimal
|
||||
required_by_date: date
|
||||
|
||||
supplier_id: Optional[UUID] = None
|
||||
lead_time_days: int = 3
|
||||
shelf_life_days: Optional[int] = None
|
||||
is_perishable: bool = False
|
||||
category: str = 'dry'
|
||||
unit_of_measure: str = 'kg'
|
||||
|
||||
current_stock: Decimal = Decimal('0')
|
||||
daily_consumption_rate: float = 0.0
|
||||
demand_std_dev: float = 0.0
|
||||
|
||||
|
||||
class GenerateReplenishmentPlanRequest(BaseModel):
|
||||
"""Request to generate replenishment plan"""
|
||||
tenant_id: UUID
|
||||
requirements: List[IngredientRequirementInput]
|
||||
forecast_id: Optional[UUID] = None
|
||||
production_schedule_id: Optional[UUID] = None
|
||||
|
||||
projection_horizon_days: int = 7
|
||||
service_level: float = 0.95
|
||||
buffer_days: int = 1
|
||||
|
||||
|
||||
class GenerateReplenishmentPlanResponse(BaseModel):
|
||||
"""Response from generating replenishment plan"""
|
||||
plan_id: UUID
|
||||
tenant_id: UUID
|
||||
planning_date: date
|
||||
projection_horizon_days: int
|
||||
|
||||
total_items: int
|
||||
urgent_items: int
|
||||
high_risk_items: int
|
||||
total_estimated_cost: Decimal
|
||||
|
||||
created_at: datetime
|
||||
|
||||
items: List[Dict[str, Any]]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MOQ Aggregation Schemas
|
||||
# ============================================================================
|
||||
|
||||
class MOQAggregationRequest(BaseModel):
|
||||
"""Request for MOQ aggregation"""
|
||||
requirements: List[Dict[str, Any]]
|
||||
supplier_constraints: Dict[str, Dict[str, Any]]
|
||||
|
||||
|
||||
class MOQAggregationResponse(BaseModel):
|
||||
"""Response from MOQ aggregation"""
|
||||
aggregated_orders: List[Dict[str, Any]]
|
||||
efficiency_metrics: Dict[str, Any]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Safety Stock Calculation Schemas
|
||||
# ============================================================================
|
||||
|
||||
class SafetyStockRequest(BaseModel):
|
||||
"""Request for safety stock calculation"""
|
||||
ingredient_id: UUID
|
||||
daily_demands: List[float]
|
||||
lead_time_days: int
|
||||
service_level: float = 0.95
|
||||
|
||||
|
||||
class SafetyStockResponse(BaseModel):
|
||||
"""Response from safety stock calculation"""
|
||||
safety_stock_quantity: Decimal
|
||||
service_level: float
|
||||
z_score: float
|
||||
demand_std_dev: float
|
||||
lead_time_days: int
|
||||
calculation_method: str
|
||||
confidence: str
|
||||
reasoning: str
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Inventory Projection Request Schemas
|
||||
# ============================================================================
|
||||
|
||||
class ProjectInventoryRequest(BaseModel):
|
||||
"""Request to project inventory"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
current_stock: Decimal
|
||||
unit_of_measure: str
|
||||
daily_demand: List[Dict[str, Any]]
|
||||
scheduled_receipts: List[Dict[str, Any]] = []
|
||||
projection_horizon_days: int = 7
|
||||
|
||||
|
||||
class ProjectInventoryResponse(BaseModel):
|
||||
"""Response from inventory projection"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
current_stock: Decimal
|
||||
unit_of_measure: str
|
||||
projection_horizon_days: int
|
||||
total_consumption: Decimal
|
||||
total_receipts: Decimal
|
||||
stockout_days: int
|
||||
stockout_risk: str
|
||||
daily_projections: List[Dict[str, Any]]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Supplier Selection History Schemas
|
||||
# ============================================================================
|
||||
|
||||
class SupplierSelectionHistoryBase(BaseModel):
|
||||
"""Base schema for supplier selection history"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: str
|
||||
selected_supplier_id: UUID
|
||||
selected_supplier_name: str
|
||||
|
||||
selection_date: date
|
||||
quantity: Decimal
|
||||
unit_price: Decimal
|
||||
total_cost: Decimal
|
||||
|
||||
lead_time_days: int
|
||||
quality_score: Optional[Decimal] = None
|
||||
delivery_performance: Optional[Decimal] = None
|
||||
|
||||
selection_strategy: str
|
||||
was_primary_choice: bool = True
|
||||
|
||||
|
||||
class SupplierSelectionHistoryCreate(SupplierSelectionHistoryBase):
|
||||
"""Schema for creating supplier selection history"""
|
||||
tenant_id: UUID
|
||||
|
||||
|
||||
class SupplierSelectionHistoryResponse(SupplierSelectionHistoryBase):
|
||||
"""Schema for supplier selection history response"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Analytics Schemas
|
||||
# ============================================================================
|
||||
|
||||
class ReplenishmentAnalytics(BaseModel):
|
||||
"""Analytics for replenishment planning"""
|
||||
total_plans: int
|
||||
total_items_planned: int
|
||||
total_estimated_value: Decimal
|
||||
|
||||
urgent_items_percentage: float
|
||||
high_risk_items_percentage: float
|
||||
|
||||
average_lead_time_days: float
|
||||
average_safety_stock_percentage: float
|
||||
|
||||
stockout_prevention_rate: float
|
||||
moq_optimization_savings: Decimal
|
||||
|
||||
supplier_diversification_rate: float
|
||||
average_suppliers_per_ingredient: float
|
||||
|
||||
|
||||
class InventoryProjectionAnalytics(BaseModel):
|
||||
"""Analytics for inventory projections"""
|
||||
total_ingredients: int
|
||||
stockout_ingredients: int
|
||||
stockout_percentage: float
|
||||
|
||||
risk_breakdown: Dict[str, int]
|
||||
|
||||
total_stockout_days: int
|
||||
total_consumption: Decimal
|
||||
total_receipts: Decimal
|
||||
|
||||
projection_horizon_days: int
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Validators
|
||||
# ============================================================================
|
||||
|
||||
@validator('required_quantity', 'current_stock', 'allocated_quantity',
|
||||
'safety_stock_quantity', 'base_quantity', 'final_order_quantity')
|
||||
def validate_positive_quantity(cls, v):
|
||||
"""Validate that quantities are positive"""
|
||||
if v < 0:
|
||||
raise ValueError('Quantity must be non-negative')
|
||||
return v
|
||||
|
||||
|
||||
@validator('service_level')
|
||||
def validate_service_level(cls, v):
|
||||
"""Validate service level is between 0 and 1"""
|
||||
if not 0 <= v <= 1:
|
||||
raise ValueError('Service level must be between 0 and 1')
|
||||
return v
|
||||
18
services/procurement/app/services/__init__.py
Normal file
18
services/procurement/app/services/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/services/__init__.py
|
||||
# ================================================================
|
||||
"""
|
||||
Services for Procurement Service
|
||||
"""
|
||||
|
||||
from .procurement_service import ProcurementService
|
||||
from .purchase_order_service import PurchaseOrderService
|
||||
from .recipe_explosion_service import RecipeExplosionService
|
||||
from .smart_procurement_calculator import SmartProcurementCalculator
|
||||
|
||||
__all__ = [
|
||||
"ProcurementService",
|
||||
"PurchaseOrderService",
|
||||
"RecipeExplosionService",
|
||||
"SmartProcurementCalculator",
|
||||
]
|
||||
560
services/procurement/app/services/delivery_tracking_service.py
Normal file
560
services/procurement/app/services/delivery_tracking_service.py
Normal file
@@ -0,0 +1,560 @@
|
||||
"""
|
||||
Delivery Tracking Service - With Leader Election
|
||||
|
||||
Tracks purchase order deliveries and generates appropriate alerts using EventPublisher:
|
||||
- DELIVERY_ARRIVING_SOON: 2 hours before delivery window
|
||||
- DELIVERY_OVERDUE: 30 minutes after expected delivery time
|
||||
- STOCK_RECEIPT_INCOMPLETE: If delivery not marked as received
|
||||
|
||||
Runs as internal scheduler with leader election for horizontal scaling.
|
||||
Domain ownership: Procurement service owns all PO and delivery tracking.
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID, uuid4
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DeliveryTrackingService:
|
||||
"""
|
||||
Monitors PO deliveries and generates time-based alerts using EventPublisher.
|
||||
|
||||
Uses APScheduler with leader election to run hourly checks.
|
||||
Only one pod executes checks - leader election ensures no duplicate alerts.
|
||||
"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher, config, database_manager=None):
|
||||
self.publisher = event_publisher
|
||||
self.config = config
|
||||
self.database_manager = database_manager
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
self._leader_election = None
|
||||
self._redis_client = None
|
||||
self._scheduler_started = False
|
||||
self.instance_id = str(uuid4())[:8] # Short instance ID for logging
|
||||
|
||||
async def start(self):
|
||||
"""Start the delivery tracking scheduler with leader election"""
|
||||
try:
|
||||
# Initialize leader election
|
||||
await self._setup_leader_election()
|
||||
except Exception as e:
|
||||
logger.error("Failed to setup leader election, starting in standalone mode",
|
||||
error=str(e))
|
||||
# Fallback: start scheduler without leader election
|
||||
await self._start_scheduler()
|
||||
|
||||
async def _setup_leader_election(self):
|
||||
"""Setup Redis-based leader election for horizontal scaling"""
|
||||
from shared.leader_election import LeaderElectionService
|
||||
import redis.asyncio as redis
|
||||
|
||||
# Build Redis URL from config
|
||||
redis_url = getattr(self.config, 'REDIS_URL', None)
|
||||
if not redis_url:
|
||||
redis_password = getattr(self.config, 'REDIS_PASSWORD', '')
|
||||
redis_host = getattr(self.config, 'REDIS_HOST', 'localhost')
|
||||
redis_port = getattr(self.config, 'REDIS_PORT', 6379)
|
||||
redis_db = getattr(self.config, 'REDIS_DB', 0)
|
||||
redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}/{redis_db}"
|
||||
|
||||
self._redis_client = redis.from_url(redis_url, decode_responses=False)
|
||||
await self._redis_client.ping()
|
||||
|
||||
# Create leader election service
|
||||
self._leader_election = LeaderElectionService(
|
||||
self._redis_client,
|
||||
service_name="procurement-delivery-tracking"
|
||||
)
|
||||
|
||||
# Start leader election with callbacks
|
||||
await self._leader_election.start(
|
||||
on_become_leader=self._on_become_leader,
|
||||
on_lose_leader=self._on_lose_leader
|
||||
)
|
||||
|
||||
logger.info("Leader election initialized for delivery tracking",
|
||||
is_leader=self._leader_election.is_leader,
|
||||
instance_id=self.instance_id)
|
||||
|
||||
async def _on_become_leader(self):
|
||||
"""Called when this instance becomes the leader"""
|
||||
logger.info("Became leader for delivery tracking - starting scheduler",
|
||||
instance_id=self.instance_id)
|
||||
await self._start_scheduler()
|
||||
|
||||
async def _on_lose_leader(self):
|
||||
"""Called when this instance loses leadership"""
|
||||
logger.warning("Lost leadership for delivery tracking - stopping scheduler",
|
||||
instance_id=self.instance_id)
|
||||
await self._stop_scheduler()
|
||||
|
||||
async def _start_scheduler(self):
|
||||
"""Start the APScheduler with delivery tracking jobs"""
|
||||
if self._scheduler_started:
|
||||
logger.debug("Scheduler already started", instance_id=self.instance_id)
|
||||
return
|
||||
|
||||
if not self.scheduler.running:
|
||||
# Add hourly job to check deliveries
|
||||
self.scheduler.add_job(
|
||||
self._check_all_tenants,
|
||||
trigger=CronTrigger(minute=30), # Run every hour at :30
|
||||
id='hourly_delivery_check',
|
||||
name='Hourly Delivery Tracking',
|
||||
replace_existing=True,
|
||||
max_instances=1,
|
||||
coalesce=True
|
||||
)
|
||||
|
||||
self.scheduler.start()
|
||||
self._scheduler_started = True
|
||||
|
||||
next_run = self.scheduler.get_job('hourly_delivery_check').next_run_time
|
||||
logger.info("Delivery tracking scheduler started",
|
||||
instance_id=self.instance_id,
|
||||
next_run=next_run.isoformat() if next_run else None)
|
||||
|
||||
async def _stop_scheduler(self):
|
||||
"""Stop the APScheduler"""
|
||||
if not self._scheduler_started:
|
||||
return
|
||||
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown(wait=False)
|
||||
self._scheduler_started = False
|
||||
logger.info("Delivery tracking scheduler stopped", instance_id=self.instance_id)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the scheduler and leader election"""
|
||||
# Stop leader election first
|
||||
if self._leader_election:
|
||||
await self._leader_election.stop()
|
||||
logger.info("Leader election stopped", instance_id=self.instance_id)
|
||||
|
||||
# Stop scheduler
|
||||
await self._stop_scheduler()
|
||||
|
||||
# Close Redis
|
||||
if self._redis_client:
|
||||
await self._redis_client.close()
|
||||
|
||||
@property
|
||||
def is_leader(self) -> bool:
|
||||
"""Check if this instance is the leader"""
|
||||
return self._leader_election.is_leader if self._leader_election else True
|
||||
|
||||
async def _check_all_tenants(self):
|
||||
"""
|
||||
Check deliveries for all active tenants.
|
||||
|
||||
This method is only called by the leader pod (via APScheduler).
|
||||
Leader election is handled at the scheduler level, not here.
|
||||
"""
|
||||
logger.info("Starting delivery checks", instance_id=self.instance_id)
|
||||
|
||||
try:
|
||||
# Get all active tenants from database
|
||||
tenants = await self._get_active_tenants()
|
||||
|
||||
total_alerts = 0
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
result = await self.check_expected_deliveries(tenant_id)
|
||||
total_alerts += sum(result.values())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Delivery check failed for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Delivery checks completed",
|
||||
instance_id=self.instance_id,
|
||||
tenants_checked=len(tenants),
|
||||
total_alerts=total_alerts
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Delivery checks failed", error=str(e), exc_info=True)
|
||||
|
||||
async def _get_active_tenants(self) -> List[UUID]:
|
||||
"""
|
||||
Get all active tenants from database.
|
||||
|
||||
Returns list of tenant UUIDs that have purchase orders.
|
||||
"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
# Get distinct tenant_ids that have purchase orders
|
||||
query = select(PurchaseOrder.tenant_id).distinct()
|
||||
result = await session.execute(query)
|
||||
tenant_ids = [row[0] for row in result.all()]
|
||||
|
||||
logger.debug("Active tenants retrieved", count=len(tenant_ids))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants", error=str(e))
|
||||
return []
|
||||
|
||||
async def check_expected_deliveries(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Check all expected deliveries for a tenant and generate appropriate alerts.
|
||||
|
||||
DIRECT DATABASE ACCESS - No API calls needed!
|
||||
|
||||
Called by:
|
||||
- Scheduled job (hourly at :30)
|
||||
- Manual trigger endpoint (demo cloning)
|
||||
|
||||
Returns:
|
||||
Dict with counts: {
|
||||
'arriving_soon': int,
|
||||
'overdue': int,
|
||||
'receipt_incomplete': int,
|
||||
'total_alerts': int
|
||||
}
|
||||
"""
|
||||
logger.info("Checking expected deliveries", tenant_id=str(tenant_id))
|
||||
|
||||
counts = {
|
||||
'arriving_soon': 0,
|
||||
'overdue': 0,
|
||||
'receipt_incomplete': 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Get expected deliveries directly from database
|
||||
deliveries = await self._get_expected_deliveries_from_db(tenant_id)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for delivery in deliveries:
|
||||
po_id = delivery.get('po_id')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
delivery_window_hours = delivery.get('delivery_window_hours', 4)
|
||||
status = delivery.get('status')
|
||||
|
||||
if not expected_date:
|
||||
continue
|
||||
|
||||
# Parse expected date
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
|
||||
# Make timezone-aware
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
# Calculate delivery window
|
||||
window_start = expected_date
|
||||
window_end = expected_date + timedelta(hours=delivery_window_hours)
|
||||
|
||||
# Check if arriving soon (2 hours before window)
|
||||
arriving_soon_time = window_start - timedelta(hours=2)
|
||||
if arriving_soon_time <= now < window_start and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_arriving_soon_alert(tenant_id, delivery):
|
||||
counts['arriving_soon'] += 1
|
||||
|
||||
# Check if overdue (30 min after window end)
|
||||
overdue_time = window_end + timedelta(minutes=30)
|
||||
if now >= overdue_time and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_overdue_alert(tenant_id, delivery):
|
||||
counts['overdue'] += 1
|
||||
|
||||
# Check if receipt incomplete (delivery window passed, not marked received)
|
||||
if now > window_end and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_receipt_incomplete_alert(tenant_id, delivery):
|
||||
counts['receipt_incomplete'] += 1
|
||||
|
||||
counts['total_alerts'] = sum([counts['arriving_soon'], counts['overdue'], counts['receipt_incomplete']])
|
||||
|
||||
logger.info(
|
||||
"Delivery check completed",
|
||||
tenant_id=str(tenant_id),
|
||||
**counts
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking deliveries",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return counts
|
||||
|
||||
async def _get_expected_deliveries_from_db(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int = 1,
|
||||
include_overdue: bool = True
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Query expected deliveries DIRECTLY from database (no HTTP call).
|
||||
|
||||
This replaces the HTTP call to /api/internal/expected-deliveries.
|
||||
|
||||
Returns:
|
||||
List of delivery dicts with same structure as API endpoint
|
||||
"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
query = query.where(PurchaseOrder.expected_delivery_date <= end_date)
|
||||
else:
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await session.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries query executed",
|
||||
tenant_id=str(tenant_id),
|
||||
po_count=len(purchase_orders),
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue,
|
||||
now=now.isoformat(),
|
||||
end_date=end_date.isoformat()
|
||||
)
|
||||
|
||||
# Format deliveries (same structure as API endpoint)
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Simple supplier name extraction
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Extract from notes if available
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
|
||||
# Format line items
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat() if po.expected_delivery_date else None,
|
||||
"delivery_window_hours": 4, # Default
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
return deliveries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries from database",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
return []
|
||||
|
||||
async def _send_arriving_soon_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send DELIVERY_ARRIVING_SOON alert (2h before delivery window).
|
||||
|
||||
This appears in the action queue with "Mark as Received" action.
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
line_items = delivery.get('line_items', [])
|
||||
|
||||
# Format product list
|
||||
products = [item['product_name'] for item in line_items[:3]]
|
||||
product_list = ", ".join(products)
|
||||
if len(line_items) > 3:
|
||||
product_list += f" (+{len(line_items) - 3} more)"
|
||||
|
||||
# Calculate time until arrival
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
hours_until = (expected_date - datetime.now(timezone.utc)).total_seconds() / 3600
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": delivery.get('supplier_phone'),
|
||||
"expected_delivery_date": expected_date.isoformat(),
|
||||
"line_items": line_items,
|
||||
"hours_until_arrival": hours_until,
|
||||
}
|
||||
|
||||
# Send alert using UnifiedEventPublisher
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_arriving_soon",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sent arriving soon alert",
|
||||
po_number=po_number,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def _send_overdue_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send DELIVERY_OVERDUE alert (30min after expected window).
|
||||
|
||||
Critical priority - needs immediate action (call supplier).
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
|
||||
# Calculate how late
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
hours_late = (datetime.now(timezone.utc) - expected_date).total_seconds() / 3600
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": delivery.get('supplier_phone'),
|
||||
"expected_delivery_date": expected_date.isoformat(),
|
||||
"hours_late": hours_late,
|
||||
"financial_impact": delivery.get('total_amount', 0),
|
||||
"affected_orders": len(delivery.get('affected_production_batches', [])),
|
||||
}
|
||||
|
||||
# Send alert with high severity
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.warning(
|
||||
"Sent overdue delivery alert",
|
||||
po_number=po_number,
|
||||
supplier=supplier_name,
|
||||
hours_late=hours_late
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def _send_receipt_incomplete_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send STOCK_RECEIPT_INCOMPLETE alert.
|
||||
|
||||
Delivery window has passed but stock not marked as received.
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": delivery.get('expected_delivery_date'),
|
||||
}
|
||||
|
||||
# Send alert using UnifiedEventPublisher
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.stock_receipt_incomplete",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sent receipt incomplete alert",
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
409
services/procurement/app/services/internal_transfer_service.py
Normal file
409
services/procurement/app/services/internal_transfer_service.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
Internal Transfer Service for managing internal purchase orders between parent and child tenants
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime, date
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from app.repositories.purchase_order_repository import PurchaseOrderRepository
|
||||
from shared.clients.recipes_client import RecipesServiceClient
|
||||
from shared.clients.production_client import ProductionServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InternalTransferService:
|
||||
"""
|
||||
Service for managing internal transfer workflow between parent and child tenants
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
purchase_order_repository: PurchaseOrderRepository,
|
||||
recipe_client: RecipesServiceClient,
|
||||
production_client: ProductionServiceClient,
|
||||
inventory_client: InventoryServiceClient
|
||||
):
|
||||
self.purchase_order_repository = purchase_order_repository
|
||||
self.recipe_client = recipe_client
|
||||
self.production_client = production_client
|
||||
self.inventory_client = inventory_client
|
||||
|
||||
async def create_internal_purchase_order(
|
||||
self,
|
||||
child_tenant_id: str,
|
||||
parent_tenant_id: str,
|
||||
items: List[Dict[str, Any]],
|
||||
delivery_date: date,
|
||||
requested_by_user_id: str,
|
||||
notes: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create an internal purchase order from child tenant to parent tenant
|
||||
|
||||
Args:
|
||||
child_tenant_id: Child tenant ID (requesting/destination)
|
||||
parent_tenant_id: Parent tenant ID (fulfilling/supplier)
|
||||
items: List of items with product_id, quantity, unit_of_measure
|
||||
delivery_date: When child needs delivery
|
||||
requested_by_user_id: User ID creating the request
|
||||
notes: Optional notes for the transfer
|
||||
|
||||
Returns:
|
||||
Dict with created purchase order details
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Creating internal PO from child {child_tenant_id} to parent {parent_tenant_id}")
|
||||
|
||||
# Calculate transfer pricing for each item
|
||||
priced_items = []
|
||||
subtotal = Decimal("0.00")
|
||||
|
||||
for item in items:
|
||||
product_id = item['product_id']
|
||||
quantity = item['quantity']
|
||||
unit_of_measure = item.get('unit_of_measure', 'units')
|
||||
|
||||
# Calculate transfer price using cost-based pricing
|
||||
unit_cost = await self._calculate_transfer_pricing(
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
|
||||
line_total = unit_cost * Decimal(str(quantity))
|
||||
|
||||
priced_items.append({
|
||||
'product_id': product_id,
|
||||
'product_name': item.get('product_name', f'Product {product_id}'), # Would fetch from inventory
|
||||
'quantity': quantity,
|
||||
'unit_of_measure': unit_of_measure,
|
||||
'unit_price': unit_cost,
|
||||
'line_total': line_total
|
||||
})
|
||||
|
||||
subtotal += line_total
|
||||
|
||||
# Create purchase order
|
||||
po_data = {
|
||||
'tenant_id': child_tenant_id, # The requesting tenant
|
||||
'supplier_id': parent_tenant_id, # The parent tenant acts as supplier
|
||||
'po_number': f"INT-{datetime.now().strftime('%Y%m%d')}-{str(uuid.uuid4())[:8].upper()}",
|
||||
'status': PurchaseOrderStatus.draft,
|
||||
'priority': 'normal',
|
||||
'order_date': datetime.now(),
|
||||
'required_delivery_date': datetime.combine(delivery_date, datetime.min.time()),
|
||||
'subtotal': subtotal,
|
||||
'tax_amount': Decimal("0.00"), # No tax for internal transfers
|
||||
'shipping_cost': Decimal("0.00"), # Included in transfer price
|
||||
'discount_amount': Decimal("0.00"),
|
||||
'total_amount': subtotal,
|
||||
'currency': 'EUR',
|
||||
'notes': notes,
|
||||
'created_by': requested_by_user_id,
|
||||
'updated_by': requested_by_user_id,
|
||||
|
||||
# Internal transfer specific fields
|
||||
'is_internal': True,
|
||||
'source_tenant_id': parent_tenant_id,
|
||||
'destination_tenant_id': child_tenant_id,
|
||||
'transfer_type': item.get('transfer_type', 'finished_goods') # Default to finished goods
|
||||
}
|
||||
|
||||
# Create the purchase order
|
||||
purchase_order = await self.purchase_order_repository.create_purchase_order(po_data)
|
||||
|
||||
# Create purchase order items
|
||||
for item_data in priced_items:
|
||||
po_item_data = {
|
||||
'tenant_id': child_tenant_id,
|
||||
'purchase_order_id': purchase_order['id'],
|
||||
'inventory_product_id': item_data['product_id'],
|
||||
'product_name': item_data['product_name'],
|
||||
'ordered_quantity': item_data['quantity'],
|
||||
'unit_of_measure': item_data['unit_of_measure'],
|
||||
'unit_price': item_data['unit_price'],
|
||||
'line_total': item_data['line_total'],
|
||||
'received_quantity': 0 # Not received yet
|
||||
}
|
||||
|
||||
await self.purchase_order_repository.create_purchase_order_item(po_item_data)
|
||||
|
||||
# Fetch the complete PO with items
|
||||
complete_po = await self.purchase_order_repository.get_purchase_order_by_id(purchase_order['id'])
|
||||
|
||||
logger.info(f"Created internal PO {complete_po['po_number']} from {child_tenant_id} to {parent_tenant_id}")
|
||||
|
||||
# Publish internal_transfer.created event
|
||||
await self._publish_internal_transfer_event(
|
||||
event_type='internal_transfer.created',
|
||||
transfer_data={
|
||||
'po_id': complete_po['id'],
|
||||
'child_tenant_id': child_tenant_id,
|
||||
'parent_tenant_id': parent_tenant_id,
|
||||
'delivery_date': delivery_date.isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
return complete_po
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating internal purchase order: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def _calculate_transfer_pricing(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
product_id: str
|
||||
) -> Decimal:
|
||||
"""
|
||||
Calculate transfer price using cost-based pricing
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Parent tenant ID
|
||||
product_id: Product ID to price
|
||||
|
||||
Returns:
|
||||
Decimal with unit cost for transfer
|
||||
"""
|
||||
try:
|
||||
# Check if product is produced locally by parent
|
||||
is_locally_produced = await self._check_if_locally_produced(parent_tenant_id, product_id)
|
||||
|
||||
if is_locally_produced:
|
||||
# Fetch recipe for the product
|
||||
recipe = await self.recipe_client.get_recipe_by_id(parent_tenant_id, product_id)
|
||||
|
||||
if recipe:
|
||||
# Calculate raw material cost
|
||||
raw_material_cost = await self._calculate_raw_material_cost(
|
||||
parent_tenant_id,
|
||||
recipe
|
||||
)
|
||||
|
||||
# Fetch production cost per unit
|
||||
production_cost = await self._get_production_cost_per_unit(
|
||||
parent_tenant_id,
|
||||
product_id
|
||||
)
|
||||
|
||||
# Unit cost = raw material cost + production cost
|
||||
unit_cost = raw_material_cost + production_cost
|
||||
else:
|
||||
# Fallback to average cost from inventory
|
||||
unit_cost = await self._get_average_cost_from_inventory(
|
||||
parent_tenant_id,
|
||||
product_id
|
||||
)
|
||||
else:
|
||||
# Not produced locally, use average cost from inventory
|
||||
unit_cost = await self._get_average_cost_from_inventory(
|
||||
parent_tenant_id,
|
||||
product_id
|
||||
)
|
||||
|
||||
# Apply optional markup (default 0%, configurable in tenant settings)
|
||||
markup_percentage = await self._get_transfer_markup_percentage(parent_tenant_id)
|
||||
markup_amount = unit_cost * Decimal(str(markup_percentage / 100))
|
||||
final_unit_price = unit_cost + markup_amount
|
||||
|
||||
return final_unit_price
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating transfer pricing for product {product_id}: {e}", exc_info=True)
|
||||
# Fallback to average cost
|
||||
return await self._get_average_cost_from_inventory(parent_tenant_id, product_id)
|
||||
|
||||
async def _check_if_locally_produced(self, tenant_id: str, product_id: str) -> bool:
|
||||
"""
|
||||
Check if a product is locally produced by the tenant
|
||||
"""
|
||||
try:
|
||||
# This would check the recipes service to see if the tenant has a recipe for this product
|
||||
# In a real implementation, this would call the recipes service
|
||||
recipe = await self.recipe_client.get_recipe_by_id(tenant_id, product_id)
|
||||
return recipe is not None
|
||||
except Exception:
|
||||
logger.warning(f"Could not verify if product {product_id} is locally produced by tenant {tenant_id}")
|
||||
return False
|
||||
|
||||
async def _calculate_raw_material_cost(self, tenant_id: str, recipe: Dict[str, Any]) -> Decimal:
|
||||
"""
|
||||
Calculate total raw material cost based on recipe
|
||||
"""
|
||||
total_cost = Decimal("0.00")
|
||||
|
||||
try:
|
||||
for ingredient in recipe.get('ingredients', []):
|
||||
ingredient_id = ingredient['ingredient_id']
|
||||
required_quantity = Decimal(str(ingredient.get('quantity', 0)))
|
||||
|
||||
# Get cost of this ingredient
|
||||
ingredient_cost = await self._get_average_cost_from_inventory(
|
||||
tenant_id,
|
||||
ingredient_id
|
||||
)
|
||||
|
||||
ingredient_total_cost = ingredient_cost * required_quantity
|
||||
total_cost += ingredient_total_cost
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating raw material cost: {e}", exc_info=True)
|
||||
# Return 0 to avoid blocking the process
|
||||
return Decimal("0.00")
|
||||
|
||||
return total_cost
|
||||
|
||||
async def _get_production_cost_per_unit(self, tenant_id: str, product_id: str) -> Decimal:
|
||||
"""
|
||||
Get the production cost per unit for a specific product
|
||||
"""
|
||||
try:
|
||||
# In a real implementation, this would call the production service
|
||||
# to get actual production costs
|
||||
# For now, return a placeholder value
|
||||
return Decimal("0.50") # Placeholder: EUR 0.50 per unit production cost
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting production cost for product {product_id}: {e}", exc_info=True)
|
||||
return Decimal("0.00")
|
||||
|
||||
async def _get_average_cost_from_inventory(self, tenant_id: str, product_id: str) -> Decimal:
|
||||
"""
|
||||
Get average cost for a product from inventory
|
||||
"""
|
||||
try:
|
||||
# This would call the inventory service to get average cost
|
||||
# For now, return a placeholder
|
||||
return Decimal("2.00") # Placeholder: EUR 2.00 average cost
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting average cost for product {product_id}: {e}", exc_info=True)
|
||||
return Decimal("1.00")
|
||||
|
||||
async def _get_transfer_markup_percentage(self, tenant_id: str) -> float:
|
||||
"""
|
||||
Get transfer markup percentage from tenant settings
|
||||
"""
|
||||
try:
|
||||
# This would fetch tenant-specific settings
|
||||
# For now, default to 0% markup
|
||||
return 0.0
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting transfer markup for tenant {tenant_id}: {e}")
|
||||
return 0.0
|
||||
|
||||
async def approve_internal_transfer(self, po_id: str, approved_by_user_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Approve an internal transfer request
|
||||
"""
|
||||
try:
|
||||
# Get the purchase order
|
||||
po = await self.purchase_order_repository.get_purchase_order_by_id(po_id)
|
||||
if not po:
|
||||
raise ValueError(f"Purchase order {po_id} not found")
|
||||
|
||||
if not po.get('is_internal'):
|
||||
raise ValueError("Cannot approve non-internal purchase order as internal transfer")
|
||||
|
||||
# Update status to approved
|
||||
approved_po = await self.purchase_order_repository.update_purchase_order_status(
|
||||
po_id=po_id,
|
||||
status=PurchaseOrderStatus.approved,
|
||||
updated_by=approved_by_user_id
|
||||
)
|
||||
|
||||
logger.info(f"Approved internal transfer PO {po_id} by user {approved_by_user_id}")
|
||||
|
||||
# Publish internal_transfer.approved event
|
||||
await self._publish_internal_transfer_event(
|
||||
event_type='internal_transfer.approved',
|
||||
transfer_data={
|
||||
'po_id': po_id,
|
||||
'child_tenant_id': po.get('tenant_id'),
|
||||
'parent_tenant_id': po.get('source_tenant_id'),
|
||||
'approved_by': approved_by_user_id
|
||||
}
|
||||
)
|
||||
|
||||
return approved_po
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error approving internal transfer: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def _publish_internal_transfer_event(self, event_type: str, transfer_data: Dict[str, Any]):
|
||||
"""
|
||||
Publish internal transfer event to message queue
|
||||
"""
|
||||
# In a real implementation, this would publish to RabbitMQ
|
||||
logger.info(f"Internal transfer event published: {event_type} - {transfer_data}")
|
||||
|
||||
async def get_pending_internal_transfers(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all pending internal transfers for a tenant (as parent supplier or child requester)
|
||||
"""
|
||||
try:
|
||||
pending_pos = await self.purchase_order_repository.get_purchase_orders_by_tenant_and_status(
|
||||
tenant_id=tenant_id,
|
||||
status=PurchaseOrderStatus.draft,
|
||||
is_internal=True
|
||||
)
|
||||
|
||||
# Filter based on whether this tenant is parent or child
|
||||
parent_pos = []
|
||||
child_pos = []
|
||||
|
||||
for po in pending_pos:
|
||||
if po.get('source_tenant_id') == tenant_id:
|
||||
# This tenant is the supplier (parent) - needs to approve
|
||||
parent_pos.append(po)
|
||||
elif po.get('destination_tenant_id') == tenant_id:
|
||||
# This tenant is the requester (child) - tracking status
|
||||
child_pos.append(po)
|
||||
|
||||
return {
|
||||
'pending_approval_as_parent': parent_pos,
|
||||
'pending_status_as_child': child_pos
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting pending internal transfers: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_internal_transfer_history(
|
||||
self,
|
||||
tenant_id: str,
|
||||
parent_tenant_id: Optional[str] = None,
|
||||
child_tenant_id: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get internal transfer history with filtering options
|
||||
"""
|
||||
try:
|
||||
# Build filters
|
||||
filters = {'is_internal': True}
|
||||
|
||||
if parent_tenant_id:
|
||||
filters['source_tenant_id'] = parent_tenant_id
|
||||
if child_tenant_id:
|
||||
filters['destination_tenant_id'] = child_tenant_id
|
||||
if start_date:
|
||||
filters['start_date'] = start_date
|
||||
if end_date:
|
||||
filters['end_date'] = end_date
|
||||
|
||||
history = await self.purchase_order_repository.get_purchase_orders_by_tenant_and_filters(
|
||||
tenant_id=tenant_id,
|
||||
filters=filters
|
||||
)
|
||||
|
||||
return history
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting internal transfer history: {e}", exc_info=True)
|
||||
raise
|
||||
429
services/procurement/app/services/inventory_projector.py
Normal file
429
services/procurement/app/services/inventory_projector.py
Normal file
@@ -0,0 +1,429 @@
|
||||
"""
|
||||
Inventory Projector
|
||||
|
||||
Projects future inventory levels day-by-day to identify coverage gaps
|
||||
and stockout risks before they occur.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
|
||||
from shared.utils.time_series_utils import generate_future_dates
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyDemand:
|
||||
"""Daily demand forecast for an ingredient"""
|
||||
ingredient_id: str
|
||||
date: date
|
||||
quantity: Decimal
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScheduledReceipt:
|
||||
"""Planned receipt (PO, production, etc.)"""
|
||||
ingredient_id: str
|
||||
date: date
|
||||
quantity: Decimal
|
||||
source: str # 'purchase_order', 'production', 'transfer'
|
||||
reference_id: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class InventoryLevel:
|
||||
"""Current inventory level"""
|
||||
ingredient_id: str
|
||||
quantity: Decimal
|
||||
unit_of_measure: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyProjection:
|
||||
"""Daily inventory projection"""
|
||||
date: date
|
||||
starting_stock: Decimal
|
||||
forecasted_consumption: Decimal
|
||||
scheduled_receipts: Decimal
|
||||
projected_ending_stock: Decimal
|
||||
is_stockout: bool
|
||||
coverage_gap: Decimal # Negative amount if stockout
|
||||
|
||||
|
||||
@dataclass
|
||||
class IngredientProjection:
|
||||
"""Complete projection for one ingredient"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
current_stock: Decimal
|
||||
unit_of_measure: str
|
||||
projection_horizon_days: int
|
||||
daily_projections: List[DailyProjection] = field(default_factory=list)
|
||||
total_consumption: Decimal = Decimal('0')
|
||||
total_receipts: Decimal = Decimal('0')
|
||||
stockout_days: int = 0
|
||||
stockout_risk: str = "low" # low, medium, high
|
||||
|
||||
|
||||
class InventoryProjector:
|
||||
"""
|
||||
Projects inventory levels over time to identify coverage gaps.
|
||||
|
||||
Algorithm:
|
||||
For each day in horizon:
|
||||
Starting Stock = Previous Day's Ending Stock
|
||||
Consumption = Forecasted Demand
|
||||
Receipts = Scheduled Deliveries + Production
|
||||
Ending Stock = Starting Stock - Consumption + Receipts
|
||||
|
||||
Identifies:
|
||||
- Days when stock goes negative (stockouts)
|
||||
- Coverage gaps (how much short)
|
||||
- Stockout risk level
|
||||
"""
|
||||
|
||||
def __init__(self, projection_horizon_days: int = 7):
|
||||
"""
|
||||
Initialize inventory projector.
|
||||
|
||||
Args:
|
||||
projection_horizon_days: Number of days to project
|
||||
"""
|
||||
self.projection_horizon_days = projection_horizon_days
|
||||
|
||||
def project_inventory(
|
||||
self,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
current_stock: Decimal,
|
||||
unit_of_measure: str,
|
||||
daily_demand: List[DailyDemand],
|
||||
scheduled_receipts: List[ScheduledReceipt],
|
||||
start_date: Optional[date] = None
|
||||
) -> IngredientProjection:
|
||||
"""
|
||||
Project inventory levels for one ingredient.
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient ID
|
||||
ingredient_name: Ingredient name
|
||||
current_stock: Current inventory level
|
||||
unit_of_measure: Unit of measure
|
||||
daily_demand: List of daily demand forecasts
|
||||
scheduled_receipts: List of scheduled receipts
|
||||
start_date: Starting date (defaults to today)
|
||||
|
||||
Returns:
|
||||
IngredientProjection with daily projections
|
||||
"""
|
||||
if start_date is None:
|
||||
start_date = date.today()
|
||||
|
||||
# Generate projection dates
|
||||
projection_dates = generate_future_dates(start_date, self.projection_horizon_days)
|
||||
|
||||
# Build demand lookup
|
||||
demand_by_date = {d.date: d.quantity for d in daily_demand}
|
||||
|
||||
# Build receipts lookup
|
||||
receipts_by_date: Dict[date, Decimal] = {}
|
||||
for receipt in scheduled_receipts:
|
||||
if receipt.date not in receipts_by_date:
|
||||
receipts_by_date[receipt.date] = Decimal('0')
|
||||
receipts_by_date[receipt.date] += receipt.quantity
|
||||
|
||||
# Project day by day
|
||||
daily_projections = []
|
||||
running_stock = current_stock
|
||||
total_consumption = Decimal('0')
|
||||
total_receipts = Decimal('0')
|
||||
stockout_days = 0
|
||||
|
||||
for projection_date in projection_dates:
|
||||
starting_stock = running_stock
|
||||
|
||||
# Get consumption for this day
|
||||
consumption = demand_by_date.get(projection_date, Decimal('0'))
|
||||
|
||||
# Get receipts for this day
|
||||
receipts = receipts_by_date.get(projection_date, Decimal('0'))
|
||||
|
||||
# Calculate ending stock
|
||||
ending_stock = starting_stock - consumption + receipts
|
||||
|
||||
# Check for stockout
|
||||
is_stockout = ending_stock < Decimal('0')
|
||||
coverage_gap = min(Decimal('0'), ending_stock)
|
||||
|
||||
if is_stockout:
|
||||
stockout_days += 1
|
||||
|
||||
# Create daily projection
|
||||
daily_proj = DailyProjection(
|
||||
date=projection_date,
|
||||
starting_stock=starting_stock,
|
||||
forecasted_consumption=consumption,
|
||||
scheduled_receipts=receipts,
|
||||
projected_ending_stock=ending_stock,
|
||||
is_stockout=is_stockout,
|
||||
coverage_gap=coverage_gap
|
||||
)
|
||||
|
||||
daily_projections.append(daily_proj)
|
||||
|
||||
# Update running totals
|
||||
total_consumption += consumption
|
||||
total_receipts += receipts
|
||||
running_stock = ending_stock
|
||||
|
||||
# Calculate stockout risk
|
||||
stockout_risk = self._calculate_stockout_risk(
|
||||
stockout_days=stockout_days,
|
||||
total_days=len(projection_dates),
|
||||
final_stock=running_stock
|
||||
)
|
||||
|
||||
return IngredientProjection(
|
||||
ingredient_id=ingredient_id,
|
||||
ingredient_name=ingredient_name,
|
||||
current_stock=current_stock,
|
||||
unit_of_measure=unit_of_measure,
|
||||
projection_horizon_days=self.projection_horizon_days,
|
||||
daily_projections=daily_projections,
|
||||
total_consumption=total_consumption,
|
||||
total_receipts=total_receipts,
|
||||
stockout_days=stockout_days,
|
||||
stockout_risk=stockout_risk
|
||||
)
|
||||
|
||||
def project_multiple_ingredients(
|
||||
self,
|
||||
ingredients_data: List[Dict]
|
||||
) -> List[IngredientProjection]:
|
||||
"""
|
||||
Project inventory for multiple ingredients.
|
||||
|
||||
Args:
|
||||
ingredients_data: List of dicts with ingredient data
|
||||
|
||||
Returns:
|
||||
List of ingredient projections
|
||||
"""
|
||||
projections = []
|
||||
|
||||
for data in ingredients_data:
|
||||
projection = self.project_inventory(
|
||||
ingredient_id=data['ingredient_id'],
|
||||
ingredient_name=data['ingredient_name'],
|
||||
current_stock=data['current_stock'],
|
||||
unit_of_measure=data['unit_of_measure'],
|
||||
daily_demand=data.get('daily_demand', []),
|
||||
scheduled_receipts=data.get('scheduled_receipts', []),
|
||||
start_date=data.get('start_date')
|
||||
)
|
||||
|
||||
projections.append(projection)
|
||||
|
||||
return projections
|
||||
|
||||
def identify_coverage_gaps(
|
||||
self,
|
||||
projection: IngredientProjection
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Identify all coverage gaps in projection.
|
||||
|
||||
Args:
|
||||
projection: Ingredient projection
|
||||
|
||||
Returns:
|
||||
List of coverage gap details
|
||||
"""
|
||||
gaps = []
|
||||
|
||||
for daily_proj in projection.daily_projections:
|
||||
if daily_proj.is_stockout:
|
||||
gap = {
|
||||
'date': daily_proj.date,
|
||||
'shortage_quantity': abs(daily_proj.coverage_gap),
|
||||
'starting_stock': daily_proj.starting_stock,
|
||||
'consumption': daily_proj.forecasted_consumption,
|
||||
'receipts': daily_proj.scheduled_receipts
|
||||
}
|
||||
gaps.append(gap)
|
||||
|
||||
if gaps:
|
||||
logger.warning(
|
||||
f"{projection.ingredient_name}: {len(gaps)} stockout days detected"
|
||||
)
|
||||
|
||||
return gaps
|
||||
|
||||
def calculate_required_order_quantity(
|
||||
self,
|
||||
projection: IngredientProjection,
|
||||
target_coverage_days: int = 7
|
||||
) -> Decimal:
|
||||
"""
|
||||
Calculate how much to order to achieve target coverage.
|
||||
|
||||
Args:
|
||||
projection: Ingredient projection
|
||||
target_coverage_days: Target days of coverage
|
||||
|
||||
Returns:
|
||||
Required order quantity
|
||||
"""
|
||||
# Calculate average daily consumption
|
||||
if projection.daily_projections:
|
||||
avg_daily_consumption = projection.total_consumption / len(projection.daily_projections)
|
||||
else:
|
||||
return Decimal('0')
|
||||
|
||||
# Target stock level
|
||||
target_stock = avg_daily_consumption * Decimal(str(target_coverage_days))
|
||||
|
||||
# Calculate shortfall
|
||||
final_projected_stock = projection.daily_projections[-1].projected_ending_stock if projection.daily_projections else Decimal('0')
|
||||
|
||||
required_order = max(Decimal('0'), target_stock - final_projected_stock)
|
||||
|
||||
return required_order
|
||||
|
||||
def _calculate_stockout_risk(
|
||||
self,
|
||||
stockout_days: int,
|
||||
total_days: int,
|
||||
final_stock: Decimal
|
||||
) -> str:
|
||||
"""
|
||||
Calculate stockout risk level.
|
||||
|
||||
Args:
|
||||
stockout_days: Number of stockout days
|
||||
total_days: Total projection days
|
||||
final_stock: Final projected stock
|
||||
|
||||
Returns:
|
||||
Risk level: 'low', 'medium', 'high', 'critical'
|
||||
"""
|
||||
if stockout_days == 0 and final_stock > Decimal('0'):
|
||||
return "low"
|
||||
|
||||
stockout_ratio = stockout_days / total_days if total_days > 0 else 0
|
||||
|
||||
if stockout_ratio >= 0.5 or final_stock < Decimal('-100'):
|
||||
return "critical"
|
||||
elif stockout_ratio >= 0.3 or final_stock < Decimal('-50'):
|
||||
return "high"
|
||||
elif stockout_ratio > 0 or final_stock < Decimal('0'):
|
||||
return "medium"
|
||||
else:
|
||||
return "low"
|
||||
|
||||
def get_high_risk_ingredients(
|
||||
self,
|
||||
projections: List[IngredientProjection]
|
||||
) -> List[IngredientProjection]:
|
||||
"""
|
||||
Filter to high/critical risk ingredients.
|
||||
|
||||
Args:
|
||||
projections: List of ingredient projections
|
||||
|
||||
Returns:
|
||||
List of high-risk projections
|
||||
"""
|
||||
high_risk = [
|
||||
p for p in projections
|
||||
if p.stockout_risk in ['high', 'critical']
|
||||
]
|
||||
|
||||
if high_risk:
|
||||
logger.warning(f"Found {len(high_risk)} high-risk ingredients")
|
||||
for proj in high_risk:
|
||||
logger.warning(
|
||||
f" - {proj.ingredient_name}: {proj.stockout_days} stockout days, "
|
||||
f"risk={proj.stockout_risk}"
|
||||
)
|
||||
|
||||
return high_risk
|
||||
|
||||
def get_summary_statistics(
|
||||
self,
|
||||
projections: List[IngredientProjection]
|
||||
) -> Dict:
|
||||
"""
|
||||
Get summary statistics across all projections.
|
||||
|
||||
Args:
|
||||
projections: List of ingredient projections
|
||||
|
||||
Returns:
|
||||
Summary statistics
|
||||
"""
|
||||
total_ingredients = len(projections)
|
||||
stockout_ingredients = sum(1 for p in projections if p.stockout_days > 0)
|
||||
|
||||
risk_breakdown = {
|
||||
'low': sum(1 for p in projections if p.stockout_risk == 'low'),
|
||||
'medium': sum(1 for p in projections if p.stockout_risk == 'medium'),
|
||||
'high': sum(1 for p in projections if p.stockout_risk == 'high'),
|
||||
'critical': sum(1 for p in projections if p.stockout_risk == 'critical')
|
||||
}
|
||||
|
||||
total_stockout_days = sum(p.stockout_days for p in projections)
|
||||
total_consumption = sum(p.total_consumption for p in projections)
|
||||
total_receipts = sum(p.total_receipts for p in projections)
|
||||
|
||||
return {
|
||||
'total_ingredients': total_ingredients,
|
||||
'stockout_ingredients': stockout_ingredients,
|
||||
'stockout_percentage': (stockout_ingredients / total_ingredients * 100) if total_ingredients > 0 else 0,
|
||||
'risk_breakdown': risk_breakdown,
|
||||
'total_stockout_days': total_stockout_days,
|
||||
'total_consumption': float(total_consumption),
|
||||
'total_receipts': float(total_receipts),
|
||||
'projection_horizon_days': self.projection_horizon_days
|
||||
}
|
||||
|
||||
def export_projection_to_dict(
|
||||
self,
|
||||
projection: IngredientProjection
|
||||
) -> Dict:
|
||||
"""
|
||||
Export projection to dictionary for API response.
|
||||
|
||||
Args:
|
||||
projection: Ingredient projection
|
||||
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
return {
|
||||
'ingredient_id': projection.ingredient_id,
|
||||
'ingredient_name': projection.ingredient_name,
|
||||
'current_stock': float(projection.current_stock),
|
||||
'unit_of_measure': projection.unit_of_measure,
|
||||
'projection_horizon_days': projection.projection_horizon_days,
|
||||
'total_consumption': float(projection.total_consumption),
|
||||
'total_receipts': float(projection.total_receipts),
|
||||
'stockout_days': projection.stockout_days,
|
||||
'stockout_risk': projection.stockout_risk,
|
||||
'daily_projections': [
|
||||
{
|
||||
'date': dp.date.isoformat(),
|
||||
'starting_stock': float(dp.starting_stock),
|
||||
'forecasted_consumption': float(dp.forecasted_consumption),
|
||||
'scheduled_receipts': float(dp.scheduled_receipts),
|
||||
'projected_ending_stock': float(dp.projected_ending_stock),
|
||||
'is_stockout': dp.is_stockout,
|
||||
'coverage_gap': float(dp.coverage_gap)
|
||||
}
|
||||
for dp in projection.daily_projections
|
||||
]
|
||||
}
|
||||
366
services/procurement/app/services/lead_time_planner.py
Normal file
366
services/procurement/app/services/lead_time_planner.py
Normal file
@@ -0,0 +1,366 @@
|
||||
"""
|
||||
Lead Time Planner
|
||||
|
||||
Calculates order dates based on supplier lead times to ensure timely delivery.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LeadTimeRequirement:
|
||||
"""Requirement with lead time information"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
required_quantity: Decimal
|
||||
required_by_date: date
|
||||
supplier_id: Optional[str] = None
|
||||
lead_time_days: int = 0
|
||||
buffer_days: int = 1
|
||||
|
||||
|
||||
@dataclass
|
||||
class LeadTimePlan:
|
||||
"""Planned order with dates"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
order_quantity: Decimal
|
||||
order_date: date
|
||||
delivery_date: date
|
||||
required_by_date: date
|
||||
lead_time_days: int
|
||||
buffer_days: int
|
||||
is_urgent: bool
|
||||
urgency_reason: Optional[str] = None
|
||||
supplier_id: Optional[str] = None
|
||||
|
||||
|
||||
class LeadTimePlanner:
|
||||
"""
|
||||
Plans order dates based on supplier lead times.
|
||||
|
||||
Ensures that:
|
||||
1. Orders are placed early enough for on-time delivery
|
||||
2. Buffer days are added for risk mitigation
|
||||
3. Urgent orders are identified
|
||||
4. Weekend/holiday adjustments can be applied
|
||||
"""
|
||||
|
||||
def __init__(self, default_buffer_days: int = 1):
|
||||
"""
|
||||
Initialize lead time planner.
|
||||
|
||||
Args:
|
||||
default_buffer_days: Default buffer days to add
|
||||
"""
|
||||
self.default_buffer_days = default_buffer_days
|
||||
|
||||
def calculate_order_date(
|
||||
self,
|
||||
required_by_date: date,
|
||||
lead_time_days: int,
|
||||
buffer_days: Optional[int] = None
|
||||
) -> date:
|
||||
"""
|
||||
Calculate when order should be placed.
|
||||
|
||||
Order Date = Required Date - Lead Time - Buffer
|
||||
|
||||
Args:
|
||||
required_by_date: Date when item is needed
|
||||
lead_time_days: Supplier lead time in days
|
||||
buffer_days: Additional buffer days (uses default if None)
|
||||
|
||||
Returns:
|
||||
Order date
|
||||
"""
|
||||
buffer = buffer_days if buffer_days is not None else self.default_buffer_days
|
||||
total_days = lead_time_days + buffer
|
||||
|
||||
order_date = required_by_date - timedelta(days=total_days)
|
||||
|
||||
return order_date
|
||||
|
||||
def calculate_delivery_date(
|
||||
self,
|
||||
order_date: date,
|
||||
lead_time_days: int
|
||||
) -> date:
|
||||
"""
|
||||
Calculate expected delivery date.
|
||||
|
||||
Delivery Date = Order Date + Lead Time
|
||||
|
||||
Args:
|
||||
order_date: Date when order is placed
|
||||
lead_time_days: Supplier lead time in days
|
||||
|
||||
Returns:
|
||||
Expected delivery date
|
||||
"""
|
||||
return order_date + timedelta(days=lead_time_days)
|
||||
|
||||
def is_urgent(
|
||||
self,
|
||||
order_date: date,
|
||||
today: date,
|
||||
urgency_threshold_days: int = 2
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Determine if order is urgent.
|
||||
|
||||
Args:
|
||||
order_date: Calculated order date
|
||||
today: Current date
|
||||
urgency_threshold_days: Days threshold for urgency
|
||||
|
||||
Returns:
|
||||
Tuple of (is_urgent, reason)
|
||||
"""
|
||||
days_until_order = (order_date - today).days
|
||||
|
||||
if days_until_order < 0:
|
||||
return True, f"Order should have been placed {abs(days_until_order)} days ago"
|
||||
elif days_until_order <= urgency_threshold_days:
|
||||
return True, f"Order must be placed within {days_until_order} days"
|
||||
else:
|
||||
return False, None
|
||||
|
||||
def plan_requirements(
|
||||
self,
|
||||
requirements: List[LeadTimeRequirement],
|
||||
today: Optional[date] = None
|
||||
) -> List[LeadTimePlan]:
|
||||
"""
|
||||
Plan order dates for multiple requirements.
|
||||
|
||||
Args:
|
||||
requirements: List of requirements with lead time info
|
||||
today: Current date (defaults to today)
|
||||
|
||||
Returns:
|
||||
List of lead time plans
|
||||
"""
|
||||
if today is None:
|
||||
today = date.today()
|
||||
|
||||
plans = []
|
||||
|
||||
for req in requirements:
|
||||
# Calculate order date
|
||||
order_date = self.calculate_order_date(
|
||||
required_by_date=req.required_by_date,
|
||||
lead_time_days=req.lead_time_days,
|
||||
buffer_days=req.buffer_days if hasattr(req, 'buffer_days') else None
|
||||
)
|
||||
|
||||
# Calculate delivery date
|
||||
delivery_date = self.calculate_delivery_date(
|
||||
order_date=order_date,
|
||||
lead_time_days=req.lead_time_days
|
||||
)
|
||||
|
||||
# Check urgency
|
||||
is_urgent, urgency_reason = self.is_urgent(
|
||||
order_date=order_date,
|
||||
today=today
|
||||
)
|
||||
|
||||
# Create plan
|
||||
plan = LeadTimePlan(
|
||||
ingredient_id=req.ingredient_id,
|
||||
ingredient_name=req.ingredient_name,
|
||||
order_quantity=req.required_quantity,
|
||||
order_date=max(order_date, today), # Can't order in the past
|
||||
delivery_date=delivery_date,
|
||||
required_by_date=req.required_by_date,
|
||||
lead_time_days=req.lead_time_days,
|
||||
buffer_days=self.default_buffer_days,
|
||||
is_urgent=is_urgent,
|
||||
urgency_reason=urgency_reason,
|
||||
supplier_id=req.supplier_id
|
||||
)
|
||||
|
||||
plans.append(plan)
|
||||
|
||||
if is_urgent:
|
||||
logger.warning(
|
||||
f"URGENT: {req.ingredient_name} - {urgency_reason}"
|
||||
)
|
||||
|
||||
# Sort by order date (urgent first)
|
||||
plans.sort(key=lambda p: (not p.is_urgent, p.order_date))
|
||||
|
||||
return plans
|
||||
|
||||
def adjust_for_working_days(
|
||||
self,
|
||||
target_date: date,
|
||||
non_working_days: List[int] = None
|
||||
) -> date:
|
||||
"""
|
||||
Adjust date to skip non-working days (e.g., weekends).
|
||||
|
||||
Args:
|
||||
target_date: Original date
|
||||
non_working_days: List of weekday numbers (0=Monday, 6=Sunday)
|
||||
|
||||
Returns:
|
||||
Adjusted date
|
||||
"""
|
||||
if non_working_days is None:
|
||||
non_working_days = [5, 6] # Saturday, Sunday
|
||||
|
||||
adjusted = target_date
|
||||
|
||||
# Move backwards to previous working day
|
||||
while adjusted.weekday() in non_working_days:
|
||||
adjusted -= timedelta(days=1)
|
||||
|
||||
return adjusted
|
||||
|
||||
def consolidate_orders_by_date(
|
||||
self,
|
||||
plans: List[LeadTimePlan],
|
||||
consolidation_window_days: int = 3
|
||||
) -> Dict[date, List[LeadTimePlan]]:
|
||||
"""
|
||||
Group orders that can be placed together.
|
||||
|
||||
Args:
|
||||
plans: List of lead time plans
|
||||
consolidation_window_days: Days within which to consolidate
|
||||
|
||||
Returns:
|
||||
Dictionary mapping order date to list of plans
|
||||
"""
|
||||
if not plans:
|
||||
return {}
|
||||
|
||||
# Sort plans by order date
|
||||
sorted_plans = sorted(plans, key=lambda p: p.order_date)
|
||||
|
||||
consolidated: Dict[date, List[LeadTimePlan]] = {}
|
||||
current_date = None
|
||||
current_batch = []
|
||||
|
||||
for plan in sorted_plans:
|
||||
if current_date is None:
|
||||
current_date = plan.order_date
|
||||
current_batch = [plan]
|
||||
else:
|
||||
days_diff = (plan.order_date - current_date).days
|
||||
|
||||
if days_diff <= consolidation_window_days:
|
||||
# Within consolidation window
|
||||
current_batch.append(plan)
|
||||
else:
|
||||
# Save current batch
|
||||
consolidated[current_date] = current_batch
|
||||
|
||||
# Start new batch
|
||||
current_date = plan.order_date
|
||||
current_batch = [plan]
|
||||
|
||||
# Save last batch
|
||||
if current_batch:
|
||||
consolidated[current_date] = current_batch
|
||||
|
||||
logger.info(
|
||||
f"Consolidated {len(plans)} orders into {len(consolidated)} order dates"
|
||||
)
|
||||
|
||||
return consolidated
|
||||
|
||||
def calculate_coverage_window(
|
||||
self,
|
||||
order_date: date,
|
||||
delivery_date: date,
|
||||
required_by_date: date
|
||||
) -> Dict[str, int]:
|
||||
"""
|
||||
Calculate time windows for an order.
|
||||
|
||||
Args:
|
||||
order_date: When order is placed
|
||||
delivery_date: When order arrives
|
||||
required_by_date: When item is needed
|
||||
|
||||
Returns:
|
||||
Dictionary with time windows
|
||||
"""
|
||||
return {
|
||||
"order_to_delivery_days": (delivery_date - order_date).days,
|
||||
"delivery_to_required_days": (required_by_date - delivery_date).days,
|
||||
"total_lead_time_days": (delivery_date - order_date).days,
|
||||
"buffer_time_days": (required_by_date - delivery_date).days
|
||||
}
|
||||
|
||||
def validate_plan(
|
||||
self,
|
||||
plan: LeadTimePlan,
|
||||
today: Optional[date] = None
|
||||
) -> Tuple[bool, List[str]]:
|
||||
"""
|
||||
Validate a lead time plan for feasibility.
|
||||
|
||||
Args:
|
||||
plan: Lead time plan to validate
|
||||
today: Current date
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, list of issues)
|
||||
"""
|
||||
if today is None:
|
||||
today = date.today()
|
||||
|
||||
issues = []
|
||||
|
||||
# Check if order date is in the past
|
||||
if plan.order_date < today:
|
||||
issues.append(f"Order date {plan.order_date} is in the past")
|
||||
|
||||
# Check if delivery date is before required date
|
||||
if plan.delivery_date > plan.required_by_date:
|
||||
days_late = (plan.delivery_date - plan.required_by_date).days
|
||||
issues.append(
|
||||
f"Delivery will be {days_late} days late (arrives {plan.delivery_date}, needed {plan.required_by_date})"
|
||||
)
|
||||
|
||||
# Check if lead time is reasonable
|
||||
if plan.lead_time_days > 90:
|
||||
issues.append(f"Lead time of {plan.lead_time_days} days seems unusually long")
|
||||
|
||||
# Check if order quantity is valid
|
||||
if plan.order_quantity <= 0:
|
||||
issues.append(f"Order quantity {plan.order_quantity} is invalid")
|
||||
|
||||
is_valid = len(issues) == 0
|
||||
|
||||
return is_valid, issues
|
||||
|
||||
def get_urgent_orders(
|
||||
self,
|
||||
plans: List[LeadTimePlan]
|
||||
) -> List[LeadTimePlan]:
|
||||
"""
|
||||
Filter to only urgent orders.
|
||||
|
||||
Args:
|
||||
plans: List of lead time plans
|
||||
|
||||
Returns:
|
||||
List of urgent plans
|
||||
"""
|
||||
urgent = [p for p in plans if p.is_urgent]
|
||||
|
||||
if urgent:
|
||||
logger.warning(f"Found {len(urgent)} urgent orders requiring immediate attention")
|
||||
|
||||
return urgent
|
||||
458
services/procurement/app/services/moq_aggregator.py
Normal file
458
services/procurement/app/services/moq_aggregator.py
Normal file
@@ -0,0 +1,458 @@
|
||||
"""
|
||||
MOQ Aggregator
|
||||
|
||||
Aggregates multiple procurement requirements to meet Minimum Order Quantities (MOQ)
|
||||
and optimize order sizes.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from shared.utils.optimization import (
|
||||
round_to_moq,
|
||||
round_to_package_size,
|
||||
aggregate_requirements_for_moq
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcurementRequirement:
|
||||
"""Single procurement requirement"""
|
||||
id: str
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
quantity: Decimal
|
||||
required_date: date
|
||||
supplier_id: str
|
||||
unit_of_measure: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class SupplierConstraints:
|
||||
"""Supplier ordering constraints"""
|
||||
supplier_id: str
|
||||
supplier_name: str
|
||||
min_order_quantity: Optional[Decimal] = None
|
||||
min_order_value: Optional[Decimal] = None
|
||||
package_size: Optional[Decimal] = None
|
||||
max_order_quantity: Optional[Decimal] = None
|
||||
economic_order_multiple: Optional[Decimal] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AggregatedOrder:
|
||||
"""Aggregated order for a supplier"""
|
||||
id: str
|
||||
supplier_id: str
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
aggregated_quantity: Decimal
|
||||
original_quantity: Decimal
|
||||
order_date: date
|
||||
unit_of_measure: str
|
||||
requirements: List[ProcurementRequirement]
|
||||
adjustment_reason: str
|
||||
moq_applied: bool
|
||||
package_rounding_applied: bool
|
||||
|
||||
|
||||
class MOQAggregator:
|
||||
"""
|
||||
Aggregates procurement requirements to meet MOQ constraints.
|
||||
|
||||
Strategies:
|
||||
1. Combine multiple requirements for same ingredient
|
||||
2. Round up to meet MOQ
|
||||
3. Round to package sizes
|
||||
4. Consolidate orders within time window
|
||||
5. Optimize order timing
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
consolidation_window_days: int = 7,
|
||||
allow_early_ordering: bool = True
|
||||
):
|
||||
"""
|
||||
Initialize MOQ aggregator.
|
||||
|
||||
Args:
|
||||
consolidation_window_days: Days within which to consolidate orders
|
||||
allow_early_ordering: Whether to allow ordering early to meet MOQ
|
||||
"""
|
||||
self.consolidation_window_days = consolidation_window_days
|
||||
self.allow_early_ordering = allow_early_ordering
|
||||
|
||||
def aggregate_requirements(
|
||||
self,
|
||||
requirements: List[ProcurementRequirement],
|
||||
supplier_constraints: Dict[str, SupplierConstraints]
|
||||
) -> List[AggregatedOrder]:
|
||||
"""
|
||||
Aggregate requirements to meet MOQ constraints.
|
||||
|
||||
Args:
|
||||
requirements: List of procurement requirements
|
||||
supplier_constraints: Dictionary of supplier constraints by supplier_id
|
||||
|
||||
Returns:
|
||||
List of aggregated orders
|
||||
"""
|
||||
if not requirements:
|
||||
return []
|
||||
|
||||
logger.info(f"Aggregating {len(requirements)} procurement requirements")
|
||||
|
||||
# Group requirements by supplier and ingredient
|
||||
grouped = self._group_requirements(requirements)
|
||||
|
||||
aggregated_orders = []
|
||||
|
||||
for (supplier_id, ingredient_id), reqs in grouped.items():
|
||||
constraints = supplier_constraints.get(supplier_id)
|
||||
|
||||
if not constraints:
|
||||
logger.warning(
|
||||
f"No constraints found for supplier {supplier_id}, "
|
||||
f"processing without MOQ"
|
||||
)
|
||||
constraints = SupplierConstraints(
|
||||
supplier_id=supplier_id,
|
||||
supplier_name=f"Supplier {supplier_id}"
|
||||
)
|
||||
|
||||
# Aggregate this group
|
||||
orders = self._aggregate_ingredient_requirements(
|
||||
reqs,
|
||||
constraints
|
||||
)
|
||||
|
||||
aggregated_orders.extend(orders)
|
||||
|
||||
logger.info(
|
||||
f"Created {len(aggregated_orders)} aggregated orders "
|
||||
f"from {len(requirements)} requirements"
|
||||
)
|
||||
|
||||
return aggregated_orders
|
||||
|
||||
def _group_requirements(
|
||||
self,
|
||||
requirements: List[ProcurementRequirement]
|
||||
) -> Dict[Tuple[str, str], List[ProcurementRequirement]]:
|
||||
"""
|
||||
Group requirements by supplier and ingredient.
|
||||
|
||||
Args:
|
||||
requirements: List of requirements
|
||||
|
||||
Returns:
|
||||
Dictionary mapping (supplier_id, ingredient_id) to list of requirements
|
||||
"""
|
||||
grouped: Dict[Tuple[str, str], List[ProcurementRequirement]] = {}
|
||||
|
||||
for req in requirements:
|
||||
key = (req.supplier_id, req.ingredient_id)
|
||||
if key not in grouped:
|
||||
grouped[key] = []
|
||||
grouped[key].append(req)
|
||||
|
||||
return grouped
|
||||
|
||||
def _aggregate_ingredient_requirements(
|
||||
self,
|
||||
requirements: List[ProcurementRequirement],
|
||||
constraints: SupplierConstraints
|
||||
) -> List[AggregatedOrder]:
|
||||
"""
|
||||
Aggregate requirements for one ingredient from one supplier.
|
||||
|
||||
Args:
|
||||
requirements: List of requirements for same ingredient/supplier
|
||||
constraints: Supplier constraints
|
||||
|
||||
Returns:
|
||||
List of aggregated orders
|
||||
"""
|
||||
if not requirements:
|
||||
return []
|
||||
|
||||
# Sort by required date
|
||||
sorted_reqs = sorted(requirements, key=lambda r: r.required_date)
|
||||
|
||||
# Try to consolidate within time window
|
||||
batches = self._consolidate_by_time_window(sorted_reqs)
|
||||
|
||||
orders = []
|
||||
|
||||
for batch in batches:
|
||||
order = self._create_aggregated_order(batch, constraints)
|
||||
orders.append(order)
|
||||
|
||||
return orders
|
||||
|
||||
def _consolidate_by_time_window(
|
||||
self,
|
||||
requirements: List[ProcurementRequirement]
|
||||
) -> List[List[ProcurementRequirement]]:
|
||||
"""
|
||||
Consolidate requirements within time window.
|
||||
|
||||
Args:
|
||||
requirements: Sorted list of requirements
|
||||
|
||||
Returns:
|
||||
List of requirement batches
|
||||
"""
|
||||
if not requirements:
|
||||
return []
|
||||
|
||||
batches = []
|
||||
current_batch = [requirements[0]]
|
||||
batch_start_date = requirements[0].required_date
|
||||
|
||||
for req in requirements[1:]:
|
||||
days_diff = (req.required_date - batch_start_date).days
|
||||
|
||||
if days_diff <= self.consolidation_window_days:
|
||||
# Within window, add to current batch
|
||||
current_batch.append(req)
|
||||
else:
|
||||
# Outside window, start new batch
|
||||
batches.append(current_batch)
|
||||
current_batch = [req]
|
||||
batch_start_date = req.required_date
|
||||
|
||||
# Add final batch
|
||||
if current_batch:
|
||||
batches.append(current_batch)
|
||||
|
||||
return batches
|
||||
|
||||
def _create_aggregated_order(
|
||||
self,
|
||||
requirements: List[ProcurementRequirement],
|
||||
constraints: SupplierConstraints
|
||||
) -> AggregatedOrder:
|
||||
"""
|
||||
Create aggregated order from requirements.
|
||||
|
||||
Args:
|
||||
requirements: List of requirements to aggregate
|
||||
constraints: Supplier constraints
|
||||
|
||||
Returns:
|
||||
Aggregated order
|
||||
"""
|
||||
# Sum quantities
|
||||
total_quantity = sum(req.quantity for req in requirements)
|
||||
original_quantity = total_quantity
|
||||
|
||||
# Get earliest required date
|
||||
order_date = min(req.required_date for req in requirements)
|
||||
|
||||
# Get ingredient info from first requirement
|
||||
first_req = requirements[0]
|
||||
ingredient_id = first_req.ingredient_id
|
||||
ingredient_name = first_req.ingredient_name
|
||||
unit_of_measure = first_req.unit_of_measure
|
||||
|
||||
# Apply constraints
|
||||
adjustment_reason = []
|
||||
moq_applied = False
|
||||
package_rounding_applied = False
|
||||
|
||||
# 1. Check MOQ
|
||||
if constraints.min_order_quantity:
|
||||
if total_quantity < constraints.min_order_quantity:
|
||||
total_quantity = constraints.min_order_quantity
|
||||
moq_applied = True
|
||||
adjustment_reason.append(
|
||||
f"Rounded up to MOQ: {constraints.min_order_quantity} {unit_of_measure}"
|
||||
)
|
||||
|
||||
# 2. Check package size
|
||||
if constraints.package_size:
|
||||
rounded_qty = round_to_package_size(
|
||||
total_quantity,
|
||||
constraints.package_size,
|
||||
allow_partial=False
|
||||
)
|
||||
if rounded_qty != total_quantity:
|
||||
total_quantity = rounded_qty
|
||||
package_rounding_applied = True
|
||||
adjustment_reason.append(
|
||||
f"Rounded to package size: {constraints.package_size} {unit_of_measure}"
|
||||
)
|
||||
|
||||
# 3. Check max order quantity
|
||||
if constraints.max_order_quantity:
|
||||
if total_quantity > constraints.max_order_quantity:
|
||||
logger.warning(
|
||||
f"{ingredient_name}: Order quantity {total_quantity} exceeds "
|
||||
f"max {constraints.max_order_quantity}, capping"
|
||||
)
|
||||
total_quantity = constraints.max_order_quantity
|
||||
adjustment_reason.append(
|
||||
f"Capped at maximum: {constraints.max_order_quantity} {unit_of_measure}"
|
||||
)
|
||||
|
||||
# 4. Apply economic order multiple
|
||||
if constraints.economic_order_multiple:
|
||||
multiple = constraints.economic_order_multiple
|
||||
rounded = round_to_moq(total_quantity, multiple, round_up=True)
|
||||
if rounded != total_quantity:
|
||||
total_quantity = rounded
|
||||
adjustment_reason.append(
|
||||
f"Rounded to economic multiple: {multiple} {unit_of_measure}"
|
||||
)
|
||||
|
||||
# Create aggregated order
|
||||
order = AggregatedOrder(
|
||||
id=f"agg_{requirements[0].id}",
|
||||
supplier_id=constraints.supplier_id,
|
||||
ingredient_id=ingredient_id,
|
||||
ingredient_name=ingredient_name,
|
||||
aggregated_quantity=total_quantity,
|
||||
original_quantity=original_quantity,
|
||||
order_date=order_date,
|
||||
unit_of_measure=unit_of_measure,
|
||||
requirements=requirements,
|
||||
adjustment_reason=" | ".join(adjustment_reason) if adjustment_reason else "No adjustments",
|
||||
moq_applied=moq_applied,
|
||||
package_rounding_applied=package_rounding_applied
|
||||
)
|
||||
|
||||
if total_quantity != original_quantity:
|
||||
logger.info(
|
||||
f"{ingredient_name}: Aggregated {len(requirements)} requirements "
|
||||
f"({original_quantity} → {total_quantity} {unit_of_measure})"
|
||||
)
|
||||
|
||||
return order
|
||||
|
||||
def calculate_order_efficiency(
|
||||
self,
|
||||
orders: List[AggregatedOrder]
|
||||
) -> Dict:
|
||||
"""
|
||||
Calculate efficiency metrics for aggregated orders.
|
||||
|
||||
Args:
|
||||
orders: List of aggregated orders
|
||||
|
||||
Returns:
|
||||
Efficiency metrics
|
||||
"""
|
||||
total_orders = len(orders)
|
||||
total_requirements = sum(len(order.requirements) for order in orders)
|
||||
|
||||
orders_with_moq = sum(1 for order in orders if order.moq_applied)
|
||||
orders_with_rounding = sum(1 for order in orders if order.package_rounding_applied)
|
||||
|
||||
total_original_qty = sum(order.original_quantity for order in orders)
|
||||
total_aggregated_qty = sum(order.aggregated_quantity for order in orders)
|
||||
|
||||
overhead_qty = total_aggregated_qty - total_original_qty
|
||||
overhead_percentage = (
|
||||
(overhead_qty / total_original_qty * 100)
|
||||
if total_original_qty > 0 else 0
|
||||
)
|
||||
|
||||
consolidation_ratio = (
|
||||
total_requirements / total_orders
|
||||
if total_orders > 0 else 0
|
||||
)
|
||||
|
||||
return {
|
||||
'total_orders': total_orders,
|
||||
'total_requirements': total_requirements,
|
||||
'consolidation_ratio': float(consolidation_ratio),
|
||||
'orders_with_moq_adjustment': orders_with_moq,
|
||||
'orders_with_package_rounding': orders_with_rounding,
|
||||
'total_original_quantity': float(total_original_qty),
|
||||
'total_aggregated_quantity': float(total_aggregated_qty),
|
||||
'overhead_quantity': float(overhead_qty),
|
||||
'overhead_percentage': float(overhead_percentage)
|
||||
}
|
||||
|
||||
def split_oversized_order(
|
||||
self,
|
||||
order: AggregatedOrder,
|
||||
max_quantity: Decimal,
|
||||
split_window_days: int = 7
|
||||
) -> List[AggregatedOrder]:
|
||||
"""
|
||||
Split an oversized order into multiple smaller orders.
|
||||
|
||||
Args:
|
||||
order: Order to split
|
||||
max_quantity: Maximum quantity per order
|
||||
split_window_days: Days between split orders
|
||||
|
||||
Returns:
|
||||
List of split orders
|
||||
"""
|
||||
if order.aggregated_quantity <= max_quantity:
|
||||
return [order]
|
||||
|
||||
logger.info(
|
||||
f"Splitting oversized order: {order.aggregated_quantity} > {max_quantity}"
|
||||
)
|
||||
|
||||
num_splits = int((order.aggregated_quantity / max_quantity)) + 1
|
||||
qty_per_order = order.aggregated_quantity / Decimal(str(num_splits))
|
||||
|
||||
split_orders = []
|
||||
|
||||
for i in range(num_splits):
|
||||
split_date = order.order_date + timedelta(days=i * split_window_days)
|
||||
|
||||
split_order = AggregatedOrder(
|
||||
id=f"{order.id}_split_{i+1}",
|
||||
supplier_id=order.supplier_id,
|
||||
ingredient_id=order.ingredient_id,
|
||||
ingredient_name=order.ingredient_name,
|
||||
aggregated_quantity=qty_per_order,
|
||||
original_quantity=order.original_quantity / Decimal(str(num_splits)),
|
||||
order_date=split_date,
|
||||
unit_of_measure=order.unit_of_measure,
|
||||
requirements=order.requirements, # Share requirements
|
||||
adjustment_reason=f"Split {i+1}/{num_splits} due to capacity constraint",
|
||||
moq_applied=order.moq_applied,
|
||||
package_rounding_applied=order.package_rounding_applied
|
||||
)
|
||||
|
||||
split_orders.append(split_order)
|
||||
|
||||
return split_orders
|
||||
|
||||
def export_to_dict(self, order: AggregatedOrder) -> Dict:
|
||||
"""
|
||||
Export aggregated order to dictionary.
|
||||
|
||||
Args:
|
||||
order: Aggregated order
|
||||
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
return {
|
||||
'id': order.id,
|
||||
'supplier_id': order.supplier_id,
|
||||
'ingredient_id': order.ingredient_id,
|
||||
'ingredient_name': order.ingredient_name,
|
||||
'aggregated_quantity': float(order.aggregated_quantity),
|
||||
'original_quantity': float(order.original_quantity),
|
||||
'order_date': order.order_date.isoformat(),
|
||||
'unit_of_measure': order.unit_of_measure,
|
||||
'num_requirements_aggregated': len(order.requirements),
|
||||
'adjustment_reason': order.adjustment_reason,
|
||||
'moq_applied': order.moq_applied,
|
||||
'package_rounding_applied': order.package_rounding_applied
|
||||
}
|
||||
266
services/procurement/app/services/overdue_po_detector.py
Normal file
266
services/procurement/app/services/overdue_po_detector.py
Normal file
@@ -0,0 +1,266 @@
|
||||
"""
|
||||
Overdue Purchase Order Detector
|
||||
|
||||
Detects POs that are past their estimated delivery date and triggers alerts.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Dict, Any, Optional
|
||||
import structlog
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderStatus
|
||||
from app.core.database import database_manager
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class OverduePODetector:
|
||||
"""
|
||||
Detects and reports overdue purchase orders.
|
||||
|
||||
A PO is considered overdue if:
|
||||
- Status is 'approved' or 'sent_to_supplier' (not yet delivered)
|
||||
- estimated_delivery_date is in the past
|
||||
- Has not been marked as completed or cancelled
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize overdue PO detector"""
|
||||
self.overdue_threshold_hours = 24 # Grace period before marking overdue
|
||||
|
||||
async def detect_overdue_pos(
|
||||
self,
|
||||
tenant_id: Optional[uuid.UUID] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Detect all overdue POs.
|
||||
|
||||
Args:
|
||||
tenant_id: Optional tenant filter. If None, checks all tenants.
|
||||
|
||||
Returns:
|
||||
List of overdue PO summaries
|
||||
"""
|
||||
try:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with database_manager.get_session() as session:
|
||||
# Build query for overdue POs
|
||||
query = select(PurchaseOrder).where(
|
||||
and_(
|
||||
# Only check POs that are in-flight (approved or sent to supplier)
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
]),
|
||||
# Must have an estimated delivery date
|
||||
PurchaseOrder.estimated_delivery_date.isnot(None),
|
||||
# Delivery date is in the past
|
||||
PurchaseOrder.estimated_delivery_date < now
|
||||
)
|
||||
)
|
||||
|
||||
# Add tenant filter if provided
|
||||
if tenant_id:
|
||||
query = query.where(PurchaseOrder.tenant_id == tenant_id)
|
||||
|
||||
result = await session.execute(query)
|
||||
overdue_pos = result.scalars().all()
|
||||
|
||||
# Calculate days overdue for each PO
|
||||
overdue_summaries = []
|
||||
for po in overdue_pos:
|
||||
days_overdue = (now - po.estimated_delivery_date).days
|
||||
hours_overdue = (now - po.estimated_delivery_date).total_seconds() / 3600
|
||||
|
||||
overdue_summaries.append({
|
||||
'po_id': str(po.id),
|
||||
'tenant_id': str(po.tenant_id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'status': po.status.value,
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'approved_at': po.approved_at.isoformat() if po.approved_at else None,
|
||||
'estimated_delivery_date': po.estimated_delivery_date.isoformat(),
|
||||
'days_overdue': days_overdue,
|
||||
'hours_overdue': round(hours_overdue, 1),
|
||||
'severity': self._calculate_severity(days_overdue),
|
||||
'priority': po.priority
|
||||
})
|
||||
|
||||
if overdue_summaries:
|
||||
logger.warning(
|
||||
"Detected overdue purchase orders",
|
||||
count=len(overdue_summaries),
|
||||
tenant_id=str(tenant_id) if tenant_id else "all"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"No overdue purchase orders detected",
|
||||
tenant_id=str(tenant_id) if tenant_id else "all"
|
||||
)
|
||||
|
||||
return overdue_summaries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error detecting overdue POs",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id) if tenant_id else "all",
|
||||
exc_info=True
|
||||
)
|
||||
return []
|
||||
|
||||
async def get_overdue_count_by_tenant(self) -> Dict[str, int]:
|
||||
"""
|
||||
Get count of overdue POs grouped by tenant.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping tenant_id to overdue count
|
||||
"""
|
||||
try:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with database_manager.get_session() as session:
|
||||
query = select(
|
||||
PurchaseOrder.tenant_id,
|
||||
PurchaseOrder.id
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
]),
|
||||
PurchaseOrder.estimated_delivery_date.isnot(None),
|
||||
PurchaseOrder.estimated_delivery_date < now
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
rows = result.all()
|
||||
|
||||
# Count by tenant
|
||||
tenant_counts: Dict[str, int] = {}
|
||||
for tenant_id, _ in rows:
|
||||
tenant_id_str = str(tenant_id)
|
||||
tenant_counts[tenant_id_str] = tenant_counts.get(tenant_id_str, 0) + 1
|
||||
|
||||
return tenant_counts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting overdue counts", error=str(e), exc_info=True)
|
||||
return {}
|
||||
|
||||
def _calculate_severity(self, days_overdue: int) -> str:
|
||||
"""
|
||||
Calculate severity level based on days overdue.
|
||||
|
||||
Args:
|
||||
days_overdue: Number of days past delivery date
|
||||
|
||||
Returns:
|
||||
Severity level: 'low', 'medium', 'high', 'critical'
|
||||
"""
|
||||
if days_overdue <= 1:
|
||||
return 'low'
|
||||
elif days_overdue <= 3:
|
||||
return 'medium'
|
||||
elif days_overdue <= 7:
|
||||
return 'high'
|
||||
else:
|
||||
return 'critical'
|
||||
|
||||
async def get_overdue_pos_for_dashboard(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get overdue POs formatted for dashboard display.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
limit: Max number of results
|
||||
|
||||
Returns:
|
||||
List of overdue POs with dashboard-friendly format
|
||||
"""
|
||||
overdue_pos = await self.detect_overdue_pos(tenant_id)
|
||||
|
||||
# Sort by severity and days overdue (most critical first)
|
||||
severity_order = {'critical': 0, 'high': 1, 'medium': 2, 'low': 3}
|
||||
overdue_pos.sort(
|
||||
key=lambda x: (severity_order.get(x['severity'], 999), -x['days_overdue'])
|
||||
)
|
||||
|
||||
# Limit results
|
||||
return overdue_pos[:limit]
|
||||
|
||||
async def check_single_po_overdue(
|
||||
self,
|
||||
po_id: uuid.UUID,
|
||||
tenant_id: uuid.UUID
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Check if a single PO is overdue.
|
||||
|
||||
Args:
|
||||
po_id: PO ID
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Overdue info if PO is overdue, None otherwise
|
||||
"""
|
||||
try:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with database_manager.get_session() as session:
|
||||
query = select(PurchaseOrder).where(
|
||||
and_(
|
||||
PurchaseOrder.id == po_id,
|
||||
PurchaseOrder.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
po = result.scalar_one_or_none()
|
||||
|
||||
if not po:
|
||||
return None
|
||||
|
||||
# Check if overdue
|
||||
if (
|
||||
po.status in [
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
] and
|
||||
po.estimated_delivery_date and
|
||||
po.estimated_delivery_date < now
|
||||
):
|
||||
days_overdue = (now - po.estimated_delivery_date).days
|
||||
|
||||
return {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'days_overdue': days_overdue,
|
||||
'severity': self._calculate_severity(days_overdue),
|
||||
'estimated_delivery_date': po.estimated_delivery_date.isoformat()
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking single PO overdue status",
|
||||
error=str(e),
|
||||
po_id=str(po_id),
|
||||
exc_info=True
|
||||
)
|
||||
return None
|
||||
416
services/procurement/app/services/procurement_alert_service.py
Normal file
416
services/procurement/app/services/procurement_alert_service.py
Normal file
@@ -0,0 +1,416 @@
|
||||
"""
|
||||
Procurement Alert Service - Simplified
|
||||
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import List, Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementAlertService:
|
||||
"""Simplified procurement alert service using UnifiedEventPublisher"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
async def emit_po_approval_needed(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
total_amount: float,
|
||||
currency: str,
|
||||
items_count: int,
|
||||
required_delivery_date: str
|
||||
):
|
||||
"""Emit PO approval needed event"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount": total_amount,
|
||||
"po_amount": total_amount, # Alias for compatibility
|
||||
"currency": currency,
|
||||
"items_count": items_count,
|
||||
"required_delivery_date": required_delivery_date
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.po_approval_needed",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"po_approval_needed_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
total_amount=total_amount
|
||||
)
|
||||
|
||||
async def emit_delivery_overdue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
supplier_contact: Optional[str],
|
||||
expected_date: str,
|
||||
days_overdue: int,
|
||||
items: List[Dict[str, Any]]
|
||||
):
|
||||
"""Emit delivery overdue alert"""
|
||||
|
||||
# Determine severity based on days overdue
|
||||
if days_overdue > 7:
|
||||
severity = "urgent"
|
||||
elif days_overdue > 3:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date,
|
||||
"days_overdue": days_overdue,
|
||||
"items": items,
|
||||
"items_count": len(items)
|
||||
}
|
||||
|
||||
if supplier_contact:
|
||||
metadata["supplier_contact"] = supplier_contact
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_overdue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
days_overdue=days_overdue
|
||||
)
|
||||
|
||||
async def emit_supplier_performance_issue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
issue_type: str,
|
||||
issue_description: str,
|
||||
affected_orders: int = 0,
|
||||
total_value_affected: Optional[float] = None
|
||||
):
|
||||
"""Emit supplier performance issue alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"issue_type": issue_type,
|
||||
"issue_description": issue_description,
|
||||
"affected_orders": affected_orders
|
||||
}
|
||||
|
||||
if total_value_affected:
|
||||
metadata["total_value_affected"] = total_value_affected
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.supplier_performance_issue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"supplier_performance_issue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_name=supplier_name,
|
||||
issue_type=issue_type
|
||||
)
|
||||
|
||||
async def emit_price_increase_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
ingredient_name: str,
|
||||
old_price: float,
|
||||
new_price: float,
|
||||
increase_percent: float
|
||||
):
|
||||
"""Emit price increase alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"ingredient_name": ingredient_name,
|
||||
"old_price": old_price,
|
||||
"new_price": new_price,
|
||||
"increase_percent": increase_percent
|
||||
}
|
||||
|
||||
# Determine severity based on increase
|
||||
if increase_percent > 20:
|
||||
severity = "high"
|
||||
elif increase_percent > 10:
|
||||
severity = "medium"
|
||||
else:
|
||||
severity = "low"
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.price_increase",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"price_increase_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
increase_percent=increase_percent
|
||||
)
|
||||
|
||||
async def emit_partial_delivery(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
ordered_quantity: float,
|
||||
delivered_quantity: float,
|
||||
missing_quantity: float,
|
||||
ingredient_name: str
|
||||
):
|
||||
"""Emit partial delivery alert"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"ordered_quantity": ordered_quantity,
|
||||
"delivered_quantity": delivered_quantity,
|
||||
"missing_quantity": missing_quantity,
|
||||
"ingredient_name": ingredient_name
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.partial_delivery",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"partial_delivery_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
missing_quantity=missing_quantity
|
||||
)
|
||||
|
||||
async def emit_delivery_quality_issue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
issue_description: str,
|
||||
affected_items: List[Dict[str, Any]],
|
||||
requires_return: bool = False
|
||||
):
|
||||
"""Emit delivery quality issue alert"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"issue_description": issue_description,
|
||||
"affected_items": affected_items,
|
||||
"requires_return": requires_return,
|
||||
"affected_items_count": len(affected_items)
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_quality_issue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_quality_issue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
requires_return=requires_return
|
||||
)
|
||||
|
||||
async def emit_low_supplier_rating(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
current_rating: float,
|
||||
issues_count: int,
|
||||
recommendation: str
|
||||
):
|
||||
"""Emit low supplier rating alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"current_rating": current_rating,
|
||||
"issues_count": issues_count,
|
||||
"recommendation": recommendation
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.low_supplier_rating",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"low_supplier_rating_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_name=supplier_name,
|
||||
current_rating=current_rating
|
||||
)
|
||||
|
||||
# Recommendation methods
|
||||
|
||||
async def emit_supplier_consolidation(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
current_suppliers_count: int,
|
||||
suggested_suppliers: List[str],
|
||||
potential_savings_eur: float
|
||||
):
|
||||
"""Emit supplier consolidation recommendation"""
|
||||
|
||||
metadata = {
|
||||
"current_suppliers_count": current_suppliers_count,
|
||||
"suggested_suppliers": suggested_suppliers,
|
||||
"potential_savings_eur": potential_savings_eur
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.supplier_consolidation",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"supplier_consolidation_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
potential_savings=potential_savings_eur
|
||||
)
|
||||
|
||||
async def emit_bulk_purchase_opportunity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_order_frequency: int,
|
||||
suggested_bulk_size: float,
|
||||
potential_discount_percent: float,
|
||||
estimated_savings_eur: float
|
||||
):
|
||||
"""Emit bulk purchase opportunity recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_order_frequency": current_order_frequency,
|
||||
"suggested_bulk_size": suggested_bulk_size,
|
||||
"potential_discount_percent": potential_discount_percent,
|
||||
"estimated_savings_eur": estimated_savings_eur
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.bulk_purchase_opportunity",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"bulk_purchase_opportunity_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
estimated_savings=estimated_savings_eur
|
||||
)
|
||||
|
||||
async def emit_alternative_supplier_suggestion(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_supplier: str,
|
||||
alternative_supplier: str,
|
||||
price_difference_eur: float,
|
||||
quality_rating: float
|
||||
):
|
||||
"""Emit alternative supplier suggestion"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_supplier": current_supplier,
|
||||
"alternative_supplier": alternative_supplier,
|
||||
"price_difference_eur": price_difference_eur,
|
||||
"quality_rating": quality_rating
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.alternative_supplier_suggestion",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"alternative_supplier_suggestion_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
|
||||
async def emit_reorder_point_optimization(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_reorder_point: float,
|
||||
suggested_reorder_point: float,
|
||||
rationale: str
|
||||
):
|
||||
"""Emit reorder point optimization recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_reorder_point": current_reorder_point,
|
||||
"suggested_reorder_point": suggested_reorder_point,
|
||||
"rationale": rationale
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.reorder_point_optimization",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"reorder_point_optimization_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
293
services/procurement/app/services/procurement_event_service.py
Normal file
293
services/procurement/app/services/procurement_event_service.py
Normal file
@@ -0,0 +1,293 @@
|
||||
"""
|
||||
Procurement Event Service - Simplified
|
||||
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
|
||||
ALERTS (actionable):
|
||||
- po_approval_needed: Purchase order requires approval
|
||||
- delivery_overdue: Delivery past expected date
|
||||
|
||||
NOTIFICATIONS (informational):
|
||||
- po_approved: Purchase order approved
|
||||
- po_sent_to_supplier: PO sent to supplier
|
||||
- delivery_scheduled: Delivery confirmed
|
||||
- delivery_arriving_soon: Delivery arriving within hours
|
||||
- delivery_received: Delivery arrived
|
||||
|
||||
This service demonstrates the mixed event model where a single domain
|
||||
emits both actionable alerts and informational notifications.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementEventService:
|
||||
"""
|
||||
Service for emitting procurement/supply chain events using EventPublisher.
|
||||
"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
# ============================================================
|
||||
# ALERTS (Actionable)
|
||||
# ============================================================
|
||||
|
||||
async def emit_po_approval_needed_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
total_amount_eur: float,
|
||||
items_count: int,
|
||||
urgency_reason: str,
|
||||
delivery_needed_by: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit ALERT when purchase order requires approval.
|
||||
"""
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"po_number": po_id, # Add po_number for template compatibility
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": float(total_amount_eur),
|
||||
"total_amount": float(total_amount_eur), # Add total_amount for template compatibility
|
||||
"currency": "EUR", # Add currency for template compatibility
|
||||
"items_count": items_count,
|
||||
"urgency_reason": urgency_reason,
|
||||
"delivery_needed_by": delivery_needed_by,
|
||||
"required_delivery_date": delivery_needed_by, # Add for template compatibility
|
||||
}
|
||||
|
||||
# Determine severity based on amount and urgency
|
||||
if total_amount_eur > 1000 or "expedited" in urgency_reason.lower():
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.po_approval_needed",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"po_approval_needed_alert_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id,
|
||||
total_amount_eur=total_amount_eur
|
||||
)
|
||||
|
||||
async def emit_delivery_overdue_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
expected_date: str,
|
||||
days_overdue: int,
|
||||
items_affected: List[Dict[str, Any]],
|
||||
) -> None:
|
||||
"""
|
||||
Emit ALERT when delivery is overdue.
|
||||
"""
|
||||
# Determine severity based on days overdue
|
||||
if days_overdue > 7:
|
||||
severity = "urgent"
|
||||
elif days_overdue > 3:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date,
|
||||
"days_overdue": days_overdue,
|
||||
"items_affected": items_affected,
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_overdue_alert_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id,
|
||||
days_overdue=days_overdue
|
||||
)
|
||||
|
||||
# ============================================================
|
||||
# NOTIFICATIONS (Informational)
|
||||
# ============================================================
|
||||
|
||||
async def emit_po_approved_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
total_amount_eur: float,
|
||||
approved_by: str,
|
||||
expected_delivery_date: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when purchase order is approved.
|
||||
"""
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": float(total_amount_eur),
|
||||
"approved_by": approved_by,
|
||||
"expected_delivery_date": expected_delivery_date,
|
||||
"approved_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.po_approved",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"po_approved_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id
|
||||
)
|
||||
|
||||
async def emit_po_sent_to_supplier_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
supplier_email: str,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when PO is sent to supplier.
|
||||
"""
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"sent_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.po_sent_to_supplier",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"po_sent_to_supplier_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id
|
||||
)
|
||||
|
||||
async def emit_delivery_scheduled_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
expected_delivery_date: str,
|
||||
tracking_number: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is scheduled/confirmed.
|
||||
"""
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": expected_delivery_date,
|
||||
"tracking_number": tracking_number,
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_scheduled",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_scheduled_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
|
||||
async def emit_delivery_arriving_soon_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
supplier_name: str,
|
||||
expected_arrival_time: str,
|
||||
hours_until_arrival: int,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is arriving soon (within hours).
|
||||
"""
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_arrival_time": expected_arrival_time,
|
||||
"hours_until_arrival": hours_until_arrival,
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_arriving_soon",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_arriving_soon_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
|
||||
async def emit_delivery_received_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
items_received: int,
|
||||
received_by: str,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is received.
|
||||
"""
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"items_received": items_received,
|
||||
"received_by": received_by,
|
||||
"received_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_received",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_received_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
1055
services/procurement/app/services/procurement_service.py
Normal file
1055
services/procurement/app/services/procurement_service.py
Normal file
File diff suppressed because it is too large
Load Diff
1140
services/procurement/app/services/purchase_order_service.py
Normal file
1140
services/procurement/app/services/purchase_order_service.py
Normal file
File diff suppressed because it is too large
Load Diff
376
services/procurement/app/services/recipe_explosion_service.py
Normal file
376
services/procurement/app/services/recipe_explosion_service.py
Normal file
@@ -0,0 +1,376 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/services/recipe_explosion_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Recipe Explosion Service - Multi-level BOM (Bill of Materials) explosion
|
||||
Converts finished product demand into raw ingredient requirements for locally-produced items
|
||||
"""
|
||||
|
||||
import uuid
|
||||
import structlog
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from decimal import Decimal
|
||||
from collections import defaultdict
|
||||
|
||||
from shared.clients.recipes_client import RecipesServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class CircularDependencyError(Exception):
|
||||
"""Raised when a circular dependency is detected in recipe tree"""
|
||||
pass
|
||||
|
||||
|
||||
class RecipeExplosionService:
|
||||
"""
|
||||
Service for exploding finished product requirements into raw ingredient requirements.
|
||||
Supports multi-level BOM explosion (recipes that reference other recipes).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
recipes_client: RecipesServiceClient,
|
||||
inventory_client: InventoryServiceClient
|
||||
):
|
||||
self.recipes_client = recipes_client
|
||||
self.inventory_client = inventory_client
|
||||
self.max_depth = settings.MAX_BOM_EXPLOSION_DEPTH # Default: 5 levels
|
||||
|
||||
async def explode_requirements(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
requirements: List[Dict]
|
||||
) -> Tuple[List[Dict], Dict]:
|
||||
"""
|
||||
Explode locally-produced finished products into raw ingredient requirements.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
requirements: List of procurement requirements (can mix locally-produced and purchased items)
|
||||
|
||||
Returns:
|
||||
Tuple of (exploded_requirements, explosion_metadata)
|
||||
- exploded_requirements: Final list with locally-produced items exploded to ingredients
|
||||
- explosion_metadata: Details about the explosion process
|
||||
"""
|
||||
logger.info("Starting recipe explosion",
|
||||
tenant_id=str(tenant_id),
|
||||
total_requirements=len(requirements))
|
||||
|
||||
# Separate locally-produced from purchased items
|
||||
locally_produced = []
|
||||
purchased_direct = []
|
||||
|
||||
for req in requirements:
|
||||
if req.get('is_locally_produced', False) and req.get('recipe_id'):
|
||||
locally_produced.append(req)
|
||||
else:
|
||||
purchased_direct.append(req)
|
||||
|
||||
logger.info("Requirements categorized",
|
||||
locally_produced_count=len(locally_produced),
|
||||
purchased_direct_count=len(purchased_direct))
|
||||
|
||||
# If no locally-produced items, return as-is
|
||||
if not locally_produced:
|
||||
return requirements, {'explosion_performed': False, 'message': 'No locally-produced items'}
|
||||
|
||||
# Explode locally-produced items
|
||||
exploded_ingredients = await self._explode_locally_produced_batch(
|
||||
tenant_id=tenant_id,
|
||||
locally_produced_requirements=locally_produced
|
||||
)
|
||||
|
||||
# Combine purchased items with exploded ingredients
|
||||
final_requirements = purchased_direct + exploded_ingredients
|
||||
|
||||
# Create metadata
|
||||
metadata = {
|
||||
'explosion_performed': True,
|
||||
'locally_produced_items_count': len(locally_produced),
|
||||
'purchased_direct_count': len(purchased_direct),
|
||||
'exploded_ingredients_count': len(exploded_ingredients),
|
||||
'total_final_requirements': len(final_requirements)
|
||||
}
|
||||
|
||||
logger.info("Recipe explosion completed", **metadata)
|
||||
|
||||
return final_requirements, metadata
|
||||
|
||||
async def _explode_locally_produced_batch(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
locally_produced_requirements: List[Dict]
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Explode a batch of locally-produced requirements into raw ingredients.
|
||||
|
||||
Uses multi-level explosion (recursive) to handle recipes that reference other recipes.
|
||||
"""
|
||||
# Aggregated ingredient requirements
|
||||
aggregated_ingredients: Dict[str, Dict] = {}
|
||||
|
||||
for req in locally_produced_requirements:
|
||||
product_id = req['product_id']
|
||||
recipe_id = req['recipe_id']
|
||||
required_quantity = Decimal(str(req['required_quantity']))
|
||||
|
||||
logger.info("Exploding locally-produced item",
|
||||
product_id=str(product_id),
|
||||
recipe_id=str(recipe_id),
|
||||
quantity=float(required_quantity))
|
||||
|
||||
try:
|
||||
# Explode this recipe (recursive)
|
||||
ingredients = await self._explode_recipe_recursive(
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=recipe_id,
|
||||
required_quantity=required_quantity,
|
||||
current_depth=0,
|
||||
visited_recipes=set(),
|
||||
parent_requirement=req
|
||||
)
|
||||
|
||||
# Aggregate ingredients
|
||||
for ingredient in ingredients:
|
||||
ingredient_id = ingredient['ingredient_id']
|
||||
quantity = ingredient['quantity']
|
||||
|
||||
if ingredient_id in aggregated_ingredients:
|
||||
# Add to existing
|
||||
existing_qty = Decimal(str(aggregated_ingredients[ingredient_id]['quantity']))
|
||||
aggregated_ingredients[ingredient_id]['quantity'] = float(existing_qty + quantity)
|
||||
else:
|
||||
# New ingredient
|
||||
aggregated_ingredients[ingredient_id] = ingredient
|
||||
|
||||
except CircularDependencyError as e:
|
||||
logger.error("Circular dependency detected",
|
||||
product_id=str(product_id),
|
||||
recipe_id=str(recipe_id),
|
||||
error=str(e))
|
||||
# Skip this item or handle gracefully
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error("Error exploding recipe",
|
||||
product_id=str(product_id),
|
||||
recipe_id=str(recipe_id),
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Convert aggregated dict to list
|
||||
return list(aggregated_ingredients.values())
|
||||
|
||||
async def _explode_recipe_recursive(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
recipe_id: uuid.UUID,
|
||||
required_quantity: Decimal,
|
||||
current_depth: int,
|
||||
visited_recipes: Set[str],
|
||||
parent_requirement: Dict
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Recursively explode a recipe into raw ingredients.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
recipe_id: Recipe to explode
|
||||
required_quantity: How much of the finished product is needed
|
||||
current_depth: Current recursion depth (to prevent infinite loops)
|
||||
visited_recipes: Set of recipe IDs already visited (circular dependency detection)
|
||||
parent_requirement: The parent procurement requirement
|
||||
|
||||
Returns:
|
||||
List of ingredient requirements (raw materials only)
|
||||
"""
|
||||
# Check depth limit
|
||||
if current_depth >= self.max_depth:
|
||||
logger.warning("Max explosion depth reached",
|
||||
recipe_id=str(recipe_id),
|
||||
max_depth=self.max_depth)
|
||||
raise RecursionError(f"Max BOM explosion depth ({self.max_depth}) exceeded")
|
||||
|
||||
# Check circular dependency
|
||||
recipe_id_str = str(recipe_id)
|
||||
if recipe_id_str in visited_recipes:
|
||||
logger.error("Circular dependency detected",
|
||||
recipe_id=recipe_id_str,
|
||||
visited_recipes=list(visited_recipes))
|
||||
raise CircularDependencyError(
|
||||
f"Circular dependency detected: recipe {recipe_id_str} references itself"
|
||||
)
|
||||
|
||||
# Add to visited set
|
||||
visited_recipes.add(recipe_id_str)
|
||||
|
||||
logger.debug("Exploding recipe",
|
||||
recipe_id=recipe_id_str,
|
||||
required_quantity=float(required_quantity),
|
||||
depth=current_depth)
|
||||
|
||||
# Fetch recipe from Recipes Service
|
||||
recipe_data = await self.recipes_client.get_recipe_by_id(
|
||||
tenant_id=str(tenant_id),
|
||||
recipe_id=recipe_id_str
|
||||
)
|
||||
|
||||
if not recipe_data:
|
||||
logger.error("Recipe not found", recipe_id=recipe_id_str)
|
||||
raise ValueError(f"Recipe {recipe_id_str} not found")
|
||||
|
||||
# Calculate scale factor
|
||||
recipe_yield_quantity = Decimal(str(recipe_data.get('yield_quantity', 1)))
|
||||
scale_factor = required_quantity / recipe_yield_quantity
|
||||
|
||||
logger.debug("Recipe scale calculation",
|
||||
recipe_yield=float(recipe_yield_quantity),
|
||||
required=float(required_quantity),
|
||||
scale_factor=float(scale_factor))
|
||||
|
||||
# Get recipe ingredients
|
||||
ingredients = recipe_data.get('ingredients', [])
|
||||
if not ingredients:
|
||||
logger.warning("Recipe has no ingredients", recipe_id=recipe_id_str)
|
||||
return []
|
||||
|
||||
# Process each ingredient
|
||||
exploded_ingredients = []
|
||||
|
||||
for recipe_ingredient in ingredients:
|
||||
ingredient_id = uuid.UUID(recipe_ingredient['ingredient_id'])
|
||||
ingredient_quantity = Decimal(str(recipe_ingredient['quantity']))
|
||||
scaled_quantity = ingredient_quantity * scale_factor
|
||||
|
||||
logger.debug("Processing recipe ingredient",
|
||||
ingredient_id=str(ingredient_id),
|
||||
base_quantity=float(ingredient_quantity),
|
||||
scaled_quantity=float(scaled_quantity))
|
||||
|
||||
# Check if this ingredient is ALSO locally produced (nested recipe)
|
||||
ingredient_info = await self._get_ingredient_info(tenant_id, ingredient_id)
|
||||
|
||||
if ingredient_info and ingredient_info.get('produced_locally') and ingredient_info.get('recipe_id'):
|
||||
# Recursive case: This ingredient has its own recipe
|
||||
logger.info("Ingredient is locally produced, recursing",
|
||||
ingredient_id=str(ingredient_id),
|
||||
nested_recipe_id=ingredient_info['recipe_id'],
|
||||
depth=current_depth + 1)
|
||||
|
||||
nested_ingredients = await self._explode_recipe_recursive(
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=uuid.UUID(ingredient_info['recipe_id']),
|
||||
required_quantity=scaled_quantity,
|
||||
current_depth=current_depth + 1,
|
||||
visited_recipes=visited_recipes.copy(), # Pass a copy to allow sibling branches
|
||||
parent_requirement=parent_requirement
|
||||
)
|
||||
|
||||
exploded_ingredients.extend(nested_ingredients)
|
||||
|
||||
else:
|
||||
# Base case: This is a raw ingredient (not produced locally)
|
||||
exploded_ingredients.append({
|
||||
'ingredient_id': str(ingredient_id),
|
||||
'product_id': str(ingredient_id),
|
||||
'quantity': float(scaled_quantity),
|
||||
'unit': recipe_ingredient.get('unit'),
|
||||
'is_locally_produced': False,
|
||||
'recipe_id': None,
|
||||
'parent_requirement_id': parent_requirement.get('id'),
|
||||
'bom_explosion_level': current_depth + 1,
|
||||
'source_recipe_id': recipe_id_str
|
||||
})
|
||||
|
||||
return exploded_ingredients
|
||||
|
||||
async def _get_ingredient_info(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
ingredient_id: uuid.UUID
|
||||
) -> Optional[Dict]:
|
||||
"""
|
||||
Get ingredient info from Inventory Service to check if it's locally produced.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_id: Ingredient/Product ID
|
||||
|
||||
Returns:
|
||||
Dict with ingredient info including produced_locally and recipe_id flags
|
||||
"""
|
||||
try:
|
||||
ingredient = await self.inventory_client.get_ingredient_by_id(
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id)
|
||||
)
|
||||
|
||||
if not ingredient:
|
||||
return None
|
||||
|
||||
return {
|
||||
'id': ingredient.get('id'),
|
||||
'name': ingredient.get('name'),
|
||||
'produced_locally': ingredient.get('produced_locally', False),
|
||||
'recipe_id': ingredient.get('recipe_id')
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching ingredient info",
|
||||
ingredient_id=str(ingredient_id),
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def validate_recipe_explosion(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
recipe_id: uuid.UUID
|
||||
) -> Dict:
|
||||
"""
|
||||
Validate if a recipe can be safely exploded (check for circular dependencies).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
recipe_id: Recipe to validate
|
||||
|
||||
Returns:
|
||||
Dict with validation results
|
||||
"""
|
||||
try:
|
||||
await self._explode_recipe_recursive(
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=recipe_id,
|
||||
required_quantity=Decimal("1"), # Test with 1 unit
|
||||
current_depth=0,
|
||||
visited_recipes=set(),
|
||||
parent_requirement={}
|
||||
)
|
||||
|
||||
return {
|
||||
'valid': True,
|
||||
'message': 'Recipe can be safely exploded'
|
||||
}
|
||||
|
||||
except CircularDependencyError as e:
|
||||
return {
|
||||
'valid': False,
|
||||
'error': 'circular_dependency',
|
||||
'message': str(e)
|
||||
}
|
||||
|
||||
except RecursionError as e:
|
||||
return {
|
||||
'valid': False,
|
||||
'error': 'max_depth_exceeded',
|
||||
'message': str(e)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
'valid': False,
|
||||
'error': 'unknown',
|
||||
'message': str(e)
|
||||
}
|
||||
@@ -0,0 +1,500 @@
|
||||
"""
|
||||
Replenishment Planning Service
|
||||
|
||||
Main orchestrator for advanced procurement planning that integrates:
|
||||
- Lead time planning
|
||||
- Inventory projection
|
||||
- Safety stock calculation
|
||||
- Shelf life management
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass, asdict
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from .lead_time_planner import LeadTimePlanner, LeadTimeRequirement, LeadTimePlan
|
||||
from .inventory_projector import (
|
||||
InventoryProjector,
|
||||
DailyDemand,
|
||||
ScheduledReceipt,
|
||||
IngredientProjection
|
||||
)
|
||||
from .safety_stock_calculator import SafetyStockCalculator, SafetyStockResult
|
||||
from .shelf_life_manager import ShelfLifeManager, ShelfLifeAdjustment
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IngredientRequirement:
|
||||
"""Complete requirement for one ingredient"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
required_quantity: Decimal
|
||||
required_by_date: date
|
||||
supplier_id: Optional[str] = None
|
||||
lead_time_days: int = 3
|
||||
shelf_life_days: Optional[int] = None
|
||||
is_perishable: bool = False
|
||||
category: str = 'dry'
|
||||
unit_of_measure: str = 'kg'
|
||||
current_stock: Decimal = Decimal('0')
|
||||
daily_consumption_rate: float = 0.0
|
||||
demand_std_dev: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReplenishmentPlanItem:
|
||||
"""Single item in replenishment plan"""
|
||||
id: str
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
|
||||
# Quantities
|
||||
base_quantity: Decimal
|
||||
safety_stock_quantity: Decimal
|
||||
shelf_life_adjusted_quantity: Decimal
|
||||
final_order_quantity: Decimal
|
||||
|
||||
# Dates
|
||||
order_date: date
|
||||
delivery_date: date
|
||||
required_by_date: date
|
||||
|
||||
# Metadata
|
||||
lead_time_days: int
|
||||
is_urgent: bool
|
||||
urgency_reason: Optional[str]
|
||||
waste_risk: str
|
||||
stockout_risk: str
|
||||
supplier_id: Optional[str]
|
||||
|
||||
# Calculation details
|
||||
safety_stock_calculation: Dict
|
||||
shelf_life_adjustment: Dict
|
||||
inventory_projection: Optional[Dict]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReplenishmentPlan:
|
||||
"""Complete replenishment plan"""
|
||||
plan_id: str
|
||||
tenant_id: str
|
||||
planning_date: date
|
||||
projection_horizon_days: int
|
||||
|
||||
items: List[ReplenishmentPlanItem]
|
||||
|
||||
# Summary statistics
|
||||
total_items: int
|
||||
urgent_items: int
|
||||
high_risk_items: int
|
||||
total_estimated_cost: Decimal
|
||||
|
||||
# Metadata
|
||||
created_at: date
|
||||
|
||||
|
||||
class ReplenishmentPlanningService:
|
||||
"""
|
||||
Orchestrates advanced replenishment planning.
|
||||
|
||||
Workflow:
|
||||
1. Project inventory levels (InventoryProjector)
|
||||
2. Identify coverage gaps and required quantities
|
||||
3. Calculate safety stock (SafetyStockCalculator)
|
||||
4. Adjust for shelf life (ShelfLifeManager)
|
||||
5. Calculate order dates (LeadTimePlanner)
|
||||
6. Generate complete replenishment plan
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
projection_horizon_days: int = 7,
|
||||
default_service_level: float = 0.95,
|
||||
default_buffer_days: int = 1
|
||||
):
|
||||
"""
|
||||
Initialize replenishment planning service.
|
||||
|
||||
Args:
|
||||
projection_horizon_days: Days to project ahead
|
||||
default_service_level: Default target service level
|
||||
default_buffer_days: Default buffer days for orders
|
||||
"""
|
||||
self.projection_horizon_days = projection_horizon_days
|
||||
|
||||
# Initialize sub-services
|
||||
self.inventory_projector = InventoryProjector(projection_horizon_days)
|
||||
self.safety_stock_calculator = SafetyStockCalculator(default_service_level)
|
||||
self.shelf_life_manager = ShelfLifeManager()
|
||||
self.lead_time_planner = LeadTimePlanner(default_buffer_days)
|
||||
|
||||
async def generate_replenishment_plan(
|
||||
self,
|
||||
tenant_id: str,
|
||||
requirements: List[IngredientRequirement],
|
||||
forecast_id: Optional[str] = None,
|
||||
production_schedule_id: Optional[str] = None
|
||||
) -> ReplenishmentPlan:
|
||||
"""
|
||||
Generate complete replenishment plan.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
requirements: List of ingredient requirements
|
||||
forecast_id: Optional reference to forecast
|
||||
production_schedule_id: Optional reference to production schedule
|
||||
|
||||
Returns:
|
||||
Complete replenishment plan
|
||||
"""
|
||||
plan_id = str(uuid.uuid4())
|
||||
planning_date = date.today()
|
||||
|
||||
logger.info(
|
||||
f"Generating replenishment plan {plan_id} for {len(requirements)} ingredients"
|
||||
)
|
||||
|
||||
plan_items = []
|
||||
|
||||
for req in requirements:
|
||||
try:
|
||||
item = await self._plan_ingredient_replenishment(req)
|
||||
plan_items.append(item)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to plan replenishment for {req.ingredient_name}: {e}"
|
||||
)
|
||||
# Continue with other ingredients
|
||||
|
||||
# Calculate summary statistics
|
||||
total_items = len(plan_items)
|
||||
urgent_items = sum(1 for item in plan_items if item.is_urgent)
|
||||
high_risk_items = sum(
|
||||
1 for item in plan_items
|
||||
if item.stockout_risk in ['high', 'critical']
|
||||
)
|
||||
|
||||
# Estimate total cost (placeholder - need price data)
|
||||
total_estimated_cost = sum(
|
||||
item.final_order_quantity
|
||||
for item in plan_items
|
||||
)
|
||||
|
||||
plan = ReplenishmentPlan(
|
||||
plan_id=plan_id,
|
||||
tenant_id=tenant_id,
|
||||
planning_date=planning_date,
|
||||
projection_horizon_days=self.projection_horizon_days,
|
||||
items=plan_items,
|
||||
total_items=total_items,
|
||||
urgent_items=urgent_items,
|
||||
high_risk_items=high_risk_items,
|
||||
total_estimated_cost=total_estimated_cost,
|
||||
created_at=planning_date
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Replenishment plan generated: {total_items} items, "
|
||||
f"{urgent_items} urgent, {high_risk_items} high risk"
|
||||
)
|
||||
|
||||
return plan
|
||||
|
||||
async def _plan_ingredient_replenishment(
|
||||
self,
|
||||
req: IngredientRequirement
|
||||
) -> ReplenishmentPlanItem:
|
||||
"""
|
||||
Plan replenishment for a single ingredient.
|
||||
|
||||
Args:
|
||||
req: Ingredient requirement
|
||||
|
||||
Returns:
|
||||
Replenishment plan item
|
||||
"""
|
||||
# Step 1: Project inventory to identify needs
|
||||
projection = await self._project_ingredient_inventory(req)
|
||||
|
||||
# Step 2: Calculate base quantity needed
|
||||
base_quantity = self._calculate_base_quantity(req, projection)
|
||||
|
||||
# Step 3: Calculate safety stock
|
||||
safety_stock_result = self._calculate_safety_stock(req)
|
||||
safety_stock_quantity = safety_stock_result.safety_stock_quantity
|
||||
|
||||
# Step 4: Adjust for shelf life
|
||||
total_quantity = base_quantity + safety_stock_quantity
|
||||
shelf_life_adjustment = self._adjust_for_shelf_life(
|
||||
req,
|
||||
total_quantity
|
||||
)
|
||||
|
||||
# Step 5: Calculate order dates
|
||||
lead_time_plan = self._calculate_order_dates(
|
||||
req,
|
||||
shelf_life_adjustment.adjusted_quantity
|
||||
)
|
||||
|
||||
# Create plan item
|
||||
item = ReplenishmentPlanItem(
|
||||
id=str(uuid.uuid4()),
|
||||
ingredient_id=req.ingredient_id,
|
||||
ingredient_name=req.ingredient_name,
|
||||
base_quantity=base_quantity,
|
||||
safety_stock_quantity=safety_stock_quantity,
|
||||
shelf_life_adjusted_quantity=shelf_life_adjustment.adjusted_quantity,
|
||||
final_order_quantity=shelf_life_adjustment.adjusted_quantity,
|
||||
order_date=lead_time_plan.order_date,
|
||||
delivery_date=lead_time_plan.delivery_date,
|
||||
required_by_date=req.required_by_date,
|
||||
lead_time_days=req.lead_time_days,
|
||||
is_urgent=lead_time_plan.is_urgent,
|
||||
urgency_reason=lead_time_plan.urgency_reason,
|
||||
waste_risk=shelf_life_adjustment.waste_risk,
|
||||
stockout_risk=projection.stockout_risk if projection else 'unknown',
|
||||
supplier_id=req.supplier_id,
|
||||
safety_stock_calculation=self.safety_stock_calculator.export_to_dict(safety_stock_result),
|
||||
shelf_life_adjustment=self.shelf_life_manager.export_to_dict(shelf_life_adjustment),
|
||||
inventory_projection=self.inventory_projector.export_projection_to_dict(projection) if projection else None
|
||||
)
|
||||
|
||||
return item
|
||||
|
||||
async def _project_ingredient_inventory(
|
||||
self,
|
||||
req: IngredientRequirement
|
||||
) -> Optional[IngredientProjection]:
|
||||
"""
|
||||
Project inventory for ingredient.
|
||||
|
||||
Args:
|
||||
req: Ingredient requirement
|
||||
|
||||
Returns:
|
||||
Inventory projection
|
||||
"""
|
||||
try:
|
||||
# Build daily demand forecast
|
||||
daily_demand = []
|
||||
if req.daily_consumption_rate > 0:
|
||||
for i in range(self.projection_horizon_days):
|
||||
demand_date = date.today() + timedelta(days=i)
|
||||
daily_demand.append(
|
||||
DailyDemand(
|
||||
ingredient_id=req.ingredient_id,
|
||||
date=demand_date,
|
||||
quantity=Decimal(str(req.daily_consumption_rate))
|
||||
)
|
||||
)
|
||||
|
||||
# No scheduled receipts for now (could add future POs here)
|
||||
scheduled_receipts = []
|
||||
|
||||
projection = self.inventory_projector.project_inventory(
|
||||
ingredient_id=req.ingredient_id,
|
||||
ingredient_name=req.ingredient_name,
|
||||
current_stock=req.current_stock,
|
||||
unit_of_measure=req.unit_of_measure,
|
||||
daily_demand=daily_demand,
|
||||
scheduled_receipts=scheduled_receipts
|
||||
)
|
||||
|
||||
return projection
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to project inventory for {req.ingredient_name}: {e}")
|
||||
return None
|
||||
|
||||
def _calculate_base_quantity(
|
||||
self,
|
||||
req: IngredientRequirement,
|
||||
projection: Optional[IngredientProjection]
|
||||
) -> Decimal:
|
||||
"""
|
||||
Calculate base quantity needed.
|
||||
|
||||
Args:
|
||||
req: Ingredient requirement
|
||||
projection: Inventory projection
|
||||
|
||||
Returns:
|
||||
Base quantity
|
||||
"""
|
||||
if projection:
|
||||
# Use projection to calculate need
|
||||
required = self.inventory_projector.calculate_required_order_quantity(
|
||||
projection,
|
||||
target_coverage_days=self.projection_horizon_days
|
||||
)
|
||||
return max(required, req.required_quantity)
|
||||
else:
|
||||
# Fallback to required quantity
|
||||
return req.required_quantity
|
||||
|
||||
def _calculate_safety_stock(
|
||||
self,
|
||||
req: IngredientRequirement
|
||||
) -> SafetyStockResult:
|
||||
"""
|
||||
Calculate safety stock.
|
||||
|
||||
Args:
|
||||
req: Ingredient requirement
|
||||
|
||||
Returns:
|
||||
Safety stock result
|
||||
"""
|
||||
if req.demand_std_dev > 0:
|
||||
# Use statistical method
|
||||
return self.safety_stock_calculator.calculate_safety_stock(
|
||||
demand_std_dev=req.demand_std_dev,
|
||||
lead_time_days=req.lead_time_days
|
||||
)
|
||||
elif req.daily_consumption_rate > 0:
|
||||
# Use percentage method
|
||||
return self.safety_stock_calculator.calculate_using_fixed_percentage(
|
||||
average_demand=req.daily_consumption_rate,
|
||||
lead_time_days=req.lead_time_days,
|
||||
percentage=0.20
|
||||
)
|
||||
else:
|
||||
# No safety stock
|
||||
return SafetyStockResult(
|
||||
safety_stock_quantity=Decimal('0'),
|
||||
service_level=0.0,
|
||||
z_score=0.0,
|
||||
demand_std_dev=0.0,
|
||||
lead_time_days=req.lead_time_days,
|
||||
calculation_method='none',
|
||||
confidence='low',
|
||||
reasoning='ERROR:INSUFFICIENT_DATA' # Error code for i18n translation
|
||||
)
|
||||
|
||||
def _adjust_for_shelf_life(
|
||||
self,
|
||||
req: IngredientRequirement,
|
||||
quantity: Decimal
|
||||
) -> ShelfLifeAdjustment:
|
||||
"""
|
||||
Adjust quantity for shelf life constraints.
|
||||
|
||||
Args:
|
||||
req: Ingredient requirement
|
||||
quantity: Proposed quantity
|
||||
|
||||
Returns:
|
||||
Shelf life adjustment
|
||||
"""
|
||||
if not req.is_perishable or not req.shelf_life_days:
|
||||
# No shelf life constraint
|
||||
return ShelfLifeAdjustment(
|
||||
original_quantity=quantity,
|
||||
adjusted_quantity=quantity,
|
||||
adjustment_reason='Non-perishable or no shelf life data',
|
||||
waste_risk='low',
|
||||
recommended_order_date=date.today(),
|
||||
use_by_date=date.today() + timedelta(days=365),
|
||||
is_constrained=False
|
||||
)
|
||||
|
||||
return self.shelf_life_manager.adjust_order_quantity_for_shelf_life(
|
||||
ingredient_id=req.ingredient_id,
|
||||
ingredient_name=req.ingredient_name,
|
||||
requested_quantity=quantity,
|
||||
daily_consumption_rate=req.daily_consumption_rate,
|
||||
shelf_life_days=req.shelf_life_days,
|
||||
category=req.category,
|
||||
is_perishable=req.is_perishable,
|
||||
delivery_date=req.required_by_date - timedelta(days=req.lead_time_days)
|
||||
)
|
||||
|
||||
def _calculate_order_dates(
|
||||
self,
|
||||
req: IngredientRequirement,
|
||||
quantity: Decimal
|
||||
) -> LeadTimePlan:
|
||||
"""
|
||||
Calculate order and delivery dates.
|
||||
|
||||
Args:
|
||||
req: Ingredient requirement
|
||||
quantity: Order quantity
|
||||
|
||||
Returns:
|
||||
Lead time plan
|
||||
"""
|
||||
lead_time_req = LeadTimeRequirement(
|
||||
ingredient_id=req.ingredient_id,
|
||||
ingredient_name=req.ingredient_name,
|
||||
required_quantity=quantity,
|
||||
required_by_date=req.required_by_date,
|
||||
supplier_id=req.supplier_id,
|
||||
lead_time_days=req.lead_time_days
|
||||
)
|
||||
|
||||
plans = self.lead_time_planner.plan_requirements([lead_time_req])
|
||||
|
||||
return plans[0] if plans else LeadTimePlan(
|
||||
ingredient_id=req.ingredient_id,
|
||||
ingredient_name=req.ingredient_name,
|
||||
order_quantity=quantity,
|
||||
order_date=date.today(),
|
||||
delivery_date=date.today() + timedelta(days=req.lead_time_days),
|
||||
required_by_date=req.required_by_date,
|
||||
lead_time_days=req.lead_time_days,
|
||||
buffer_days=1,
|
||||
is_urgent=False,
|
||||
supplier_id=req.supplier_id
|
||||
)
|
||||
|
||||
def export_plan_to_dict(self, plan: ReplenishmentPlan) -> Dict:
|
||||
"""
|
||||
Export plan to dictionary for API response.
|
||||
|
||||
Args:
|
||||
plan: Replenishment plan
|
||||
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
return {
|
||||
'plan_id': plan.plan_id,
|
||||
'tenant_id': plan.tenant_id,
|
||||
'planning_date': plan.planning_date.isoformat(),
|
||||
'projection_horizon_days': plan.projection_horizon_days,
|
||||
'total_items': plan.total_items,
|
||||
'urgent_items': plan.urgent_items,
|
||||
'high_risk_items': plan.high_risk_items,
|
||||
'total_estimated_cost': float(plan.total_estimated_cost),
|
||||
'created_at': plan.created_at.isoformat(),
|
||||
'items': [
|
||||
{
|
||||
'id': item.id,
|
||||
'ingredient_id': item.ingredient_id,
|
||||
'ingredient_name': item.ingredient_name,
|
||||
'base_quantity': float(item.base_quantity),
|
||||
'safety_stock_quantity': float(item.safety_stock_quantity),
|
||||
'shelf_life_adjusted_quantity': float(item.shelf_life_adjusted_quantity),
|
||||
'final_order_quantity': float(item.final_order_quantity),
|
||||
'order_date': item.order_date.isoformat(),
|
||||
'delivery_date': item.delivery_date.isoformat(),
|
||||
'required_by_date': item.required_by_date.isoformat(),
|
||||
'lead_time_days': item.lead_time_days,
|
||||
'is_urgent': item.is_urgent,
|
||||
'urgency_reason': item.urgency_reason,
|
||||
'waste_risk': item.waste_risk,
|
||||
'stockout_risk': item.stockout_risk,
|
||||
'supplier_id': item.supplier_id,
|
||||
'safety_stock_calculation': item.safety_stock_calculation,
|
||||
'shelf_life_adjustment': item.shelf_life_adjustment,
|
||||
'inventory_projection': item.inventory_projection
|
||||
}
|
||||
for item in plan.items
|
||||
]
|
||||
}
|
||||
474
services/procurement/app/services/safety_stock_calculator.py
Normal file
474
services/procurement/app/services/safety_stock_calculator.py
Normal file
@@ -0,0 +1,474 @@
|
||||
"""
|
||||
Safety Stock Calculator
|
||||
|
||||
Calculates dynamic safety stock based on demand variability,
|
||||
lead time, and service level targets.
|
||||
"""
|
||||
|
||||
import math
|
||||
import statistics
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SafetyStockResult:
|
||||
"""Result of safety stock calculation"""
|
||||
safety_stock_quantity: Decimal
|
||||
service_level: float
|
||||
z_score: float
|
||||
demand_std_dev: float
|
||||
lead_time_days: int
|
||||
calculation_method: str
|
||||
confidence: str # 'high', 'medium', 'low'
|
||||
reasoning_data: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class DemandHistory:
|
||||
"""Historical demand data for an ingredient"""
|
||||
ingredient_id: str
|
||||
daily_demands: List[float] # Historical daily demands
|
||||
mean_demand: float
|
||||
std_dev: float
|
||||
coefficient_of_variation: float
|
||||
|
||||
|
||||
class SafetyStockCalculator:
|
||||
"""
|
||||
Calculates safety stock using statistical methods.
|
||||
|
||||
Formula: Safety Stock = Z × σ × √L
|
||||
where:
|
||||
- Z = service level z-score (e.g., 1.96 for 97.5%)
|
||||
- σ = demand standard deviation
|
||||
- L = lead time in days
|
||||
|
||||
This accounts for demand variability during lead time.
|
||||
"""
|
||||
|
||||
# Z-scores for common service levels
|
||||
SERVICE_LEVEL_Z_SCORES = {
|
||||
0.50: 0.00, # 50% - no buffer (not recommended)
|
||||
0.80: 0.84, # 80% service level
|
||||
0.85: 1.04, # 85% service level
|
||||
0.90: 1.28, # 90% service level
|
||||
0.95: 1.65, # 95% service level
|
||||
0.975: 1.96, # 97.5% service level
|
||||
0.99: 2.33, # 99% service level
|
||||
0.995: 2.58, # 99.5% service level
|
||||
0.999: 3.09 # 99.9% service level
|
||||
}
|
||||
|
||||
def __init__(self, default_service_level: float = 0.95):
|
||||
"""
|
||||
Initialize safety stock calculator.
|
||||
|
||||
Args:
|
||||
default_service_level: Default target service level (0-1)
|
||||
"""
|
||||
self.default_service_level = default_service_level
|
||||
|
||||
def calculate_safety_stock(
|
||||
self,
|
||||
demand_std_dev: float,
|
||||
lead_time_days: int,
|
||||
service_level: Optional[float] = None
|
||||
) -> SafetyStockResult:
|
||||
"""
|
||||
Calculate safety stock using standard formula.
|
||||
|
||||
Safety Stock = Z × σ × √L
|
||||
|
||||
Args:
|
||||
demand_std_dev: Standard deviation of daily demand
|
||||
lead_time_days: Supplier lead time in days
|
||||
service_level: Target service level (uses default if None)
|
||||
|
||||
Returns:
|
||||
SafetyStockResult with calculation details
|
||||
"""
|
||||
if service_level is None:
|
||||
service_level = self.default_service_level
|
||||
|
||||
# Get z-score for service level
|
||||
z_score = self._get_z_score(service_level)
|
||||
|
||||
# Calculate safety stock
|
||||
if lead_time_days <= 0 or demand_std_dev <= 0:
|
||||
return SafetyStockResult(
|
||||
safety_stock_quantity=Decimal('0'),
|
||||
service_level=service_level,
|
||||
z_score=z_score,
|
||||
demand_std_dev=demand_std_dev,
|
||||
lead_time_days=lead_time_days,
|
||||
calculation_method='zero_due_to_invalid_inputs',
|
||||
confidence='low',
|
||||
reasoning_data={
|
||||
'type': 'error_lead_time_invalid',
|
||||
'parameters': {
|
||||
'lead_time_days': lead_time_days,
|
||||
'demand_std_dev': demand_std_dev
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Safety Stock = Z × σ × √L
|
||||
safety_stock = z_score * demand_std_dev * math.sqrt(lead_time_days)
|
||||
|
||||
# Determine confidence
|
||||
confidence = self._determine_confidence(demand_std_dev, lead_time_days)
|
||||
|
||||
return SafetyStockResult(
|
||||
safety_stock_quantity=Decimal(str(round(safety_stock, 2))),
|
||||
service_level=service_level,
|
||||
z_score=z_score,
|
||||
demand_std_dev=demand_std_dev,
|
||||
lead_time_days=lead_time_days,
|
||||
calculation_method='statistical_z_score',
|
||||
confidence=confidence,
|
||||
reasoning_data={
|
||||
'type': 'statistical_z_score',
|
||||
'calculation_method': 'statistical_z_score',
|
||||
'parameters': {
|
||||
'service_level': round(service_level * 100, 1),
|
||||
'z_score': round(z_score, 2),
|
||||
'demand_std_dev': round(demand_std_dev, 2),
|
||||
'lead_time_days': lead_time_days,
|
||||
'safety_stock': round(safety_stock, 2)
|
||||
},
|
||||
'confidence': confidence
|
||||
}
|
||||
)
|
||||
|
||||
def calculate_from_demand_history(
|
||||
self,
|
||||
daily_demands: List[float],
|
||||
lead_time_days: int,
|
||||
service_level: Optional[float] = None
|
||||
) -> SafetyStockResult:
|
||||
"""
|
||||
Calculate safety stock from historical demand data.
|
||||
|
||||
Args:
|
||||
daily_demands: List of historical daily demands
|
||||
lead_time_days: Supplier lead time in days
|
||||
service_level: Target service level
|
||||
|
||||
Returns:
|
||||
SafetyStockResult with calculation details
|
||||
"""
|
||||
if not daily_demands or len(daily_demands) < 2:
|
||||
logger.warning("Insufficient demand history for safety stock calculation")
|
||||
return SafetyStockResult(
|
||||
safety_stock_quantity=Decimal('0'),
|
||||
service_level=service_level or self.default_service_level,
|
||||
z_score=0.0,
|
||||
demand_std_dev=0.0,
|
||||
lead_time_days=lead_time_days,
|
||||
calculation_method='insufficient_data',
|
||||
confidence='low',
|
||||
reasoning_data={
|
||||
'type': 'error_insufficient_data',
|
||||
'parameters': {
|
||||
'data_points': len(daily_demands) if daily_demands else 0,
|
||||
'min_required': 2
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Calculate standard deviation
|
||||
demand_std_dev = statistics.stdev(daily_demands)
|
||||
|
||||
return self.calculate_safety_stock(
|
||||
demand_std_dev=demand_std_dev,
|
||||
lead_time_days=lead_time_days,
|
||||
service_level=service_level
|
||||
)
|
||||
|
||||
def calculate_with_lead_time_variability(
|
||||
self,
|
||||
demand_mean: float,
|
||||
demand_std_dev: float,
|
||||
lead_time_mean: int,
|
||||
lead_time_std_dev: int,
|
||||
service_level: Optional[float] = None
|
||||
) -> SafetyStockResult:
|
||||
"""
|
||||
Calculate safety stock considering both demand AND lead time variability.
|
||||
|
||||
More accurate formula:
|
||||
SS = Z × √(L_mean × σ_demand² + μ_demand² × σ_lead_time²)
|
||||
|
||||
Args:
|
||||
demand_mean: Mean daily demand
|
||||
demand_std_dev: Standard deviation of daily demand
|
||||
lead_time_mean: Mean lead time in days
|
||||
lead_time_std_dev: Standard deviation of lead time
|
||||
service_level: Target service level
|
||||
|
||||
Returns:
|
||||
SafetyStockResult with calculation details
|
||||
"""
|
||||
if service_level is None:
|
||||
service_level = self.default_service_level
|
||||
|
||||
z_score = self._get_z_score(service_level)
|
||||
|
||||
# Calculate combined variance
|
||||
variance = (
|
||||
lead_time_mean * (demand_std_dev ** 2) +
|
||||
(demand_mean ** 2) * (lead_time_std_dev ** 2)
|
||||
)
|
||||
|
||||
safety_stock = z_score * math.sqrt(variance)
|
||||
|
||||
confidence = 'high' if lead_time_std_dev > 0 else 'medium'
|
||||
|
||||
return SafetyStockResult(
|
||||
safety_stock_quantity=Decimal(str(round(safety_stock, 2))),
|
||||
service_level=service_level,
|
||||
z_score=z_score,
|
||||
demand_std_dev=demand_std_dev,
|
||||
lead_time_days=lead_time_mean,
|
||||
calculation_method='statistical_with_lead_time_variability',
|
||||
confidence=confidence,
|
||||
reasoning_data={
|
||||
'type': 'advanced_variability',
|
||||
'calculation_method': 'statistical_with_lead_time_variability',
|
||||
'parameters': {
|
||||
'service_level': round(service_level * 100, 1),
|
||||
'z_score': round(z_score, 2),
|
||||
'demand_mean': round(demand_mean, 2),
|
||||
'demand_std_dev': round(demand_std_dev, 2),
|
||||
'lead_time_mean': lead_time_mean,
|
||||
'lead_time_std_dev': round(lead_time_std_dev, 1),
|
||||
'safety_stock': round(safety_stock, 2)
|
||||
},
|
||||
'confidence': confidence
|
||||
}
|
||||
)
|
||||
|
||||
def calculate_using_fixed_percentage(
|
||||
self,
|
||||
average_demand: float,
|
||||
lead_time_days: int,
|
||||
percentage: float = 0.20
|
||||
) -> SafetyStockResult:
|
||||
"""
|
||||
Calculate safety stock as percentage of lead time demand.
|
||||
|
||||
Simple method: Safety Stock = % × (Average Daily Demand × Lead Time)
|
||||
|
||||
Args:
|
||||
average_demand: Average daily demand
|
||||
lead_time_days: Supplier lead time in days
|
||||
percentage: Safety stock percentage (default 20%)
|
||||
|
||||
Returns:
|
||||
SafetyStockResult with calculation details
|
||||
"""
|
||||
lead_time_demand = average_demand * lead_time_days
|
||||
safety_stock = lead_time_demand * percentage
|
||||
|
||||
return SafetyStockResult(
|
||||
safety_stock_quantity=Decimal(str(round(safety_stock, 2))),
|
||||
service_level=0.0, # Not based on service level
|
||||
z_score=0.0,
|
||||
demand_std_dev=0.0,
|
||||
lead_time_days=lead_time_days,
|
||||
calculation_method='fixed_percentage',
|
||||
confidence='low',
|
||||
reasoning_data={
|
||||
'type': 'fixed_percentage',
|
||||
'calculation_method': 'fixed_percentage',
|
||||
'parameters': {
|
||||
'percentage': round(percentage * 100, 0),
|
||||
'average_demand': round(average_demand, 2),
|
||||
'lead_time_days': lead_time_days,
|
||||
'lead_time_demand': round(lead_time_demand, 2),
|
||||
'safety_stock': round(safety_stock, 2)
|
||||
},
|
||||
'confidence': 'low'
|
||||
}
|
||||
)
|
||||
|
||||
def calculate_batch_safety_stock(
|
||||
self,
|
||||
ingredients_data: List[Dict]
|
||||
) -> Dict[str, SafetyStockResult]:
|
||||
"""
|
||||
Calculate safety stock for multiple ingredients.
|
||||
|
||||
Args:
|
||||
ingredients_data: List of dicts with ingredient data
|
||||
|
||||
Returns:
|
||||
Dictionary mapping ingredient_id to SafetyStockResult
|
||||
"""
|
||||
results = {}
|
||||
|
||||
for data in ingredients_data:
|
||||
ingredient_id = data['ingredient_id']
|
||||
|
||||
if 'daily_demands' in data:
|
||||
# Use historical data
|
||||
result = self.calculate_from_demand_history(
|
||||
daily_demands=data['daily_demands'],
|
||||
lead_time_days=data['lead_time_days'],
|
||||
service_level=data.get('service_level')
|
||||
)
|
||||
elif 'demand_std_dev' in data:
|
||||
# Use provided std dev
|
||||
result = self.calculate_safety_stock(
|
||||
demand_std_dev=data['demand_std_dev'],
|
||||
lead_time_days=data['lead_time_days'],
|
||||
service_level=data.get('service_level')
|
||||
)
|
||||
else:
|
||||
# Fallback to percentage method
|
||||
result = self.calculate_using_fixed_percentage(
|
||||
average_demand=data.get('average_demand', 0),
|
||||
lead_time_days=data['lead_time_days'],
|
||||
percentage=data.get('safety_percentage', 0.20)
|
||||
)
|
||||
|
||||
results[ingredient_id] = result
|
||||
|
||||
logger.info(f"Calculated safety stock for {len(results)} ingredients")
|
||||
|
||||
return results
|
||||
|
||||
def analyze_demand_history(
|
||||
self,
|
||||
daily_demands: List[float]
|
||||
) -> DemandHistory:
|
||||
"""
|
||||
Analyze demand history to extract statistics.
|
||||
|
||||
Args:
|
||||
daily_demands: List of historical daily demands
|
||||
|
||||
Returns:
|
||||
DemandHistory with statistics
|
||||
"""
|
||||
if not daily_demands:
|
||||
return DemandHistory(
|
||||
ingredient_id="unknown",
|
||||
daily_demands=[],
|
||||
mean_demand=0.0,
|
||||
std_dev=0.0,
|
||||
coefficient_of_variation=0.0
|
||||
)
|
||||
|
||||
mean_demand = statistics.mean(daily_demands)
|
||||
std_dev = statistics.stdev(daily_demands) if len(daily_demands) >= 2 else 0.0
|
||||
cv = (std_dev / mean_demand) if mean_demand > 0 else 0.0
|
||||
|
||||
return DemandHistory(
|
||||
ingredient_id="unknown",
|
||||
daily_demands=daily_demands,
|
||||
mean_demand=mean_demand,
|
||||
std_dev=std_dev,
|
||||
coefficient_of_variation=cv
|
||||
)
|
||||
|
||||
def _get_z_score(self, service_level: float) -> float:
|
||||
"""
|
||||
Get z-score for service level.
|
||||
|
||||
Args:
|
||||
service_level: Target service level (0-1)
|
||||
|
||||
Returns:
|
||||
Z-score
|
||||
"""
|
||||
# Find closest service level
|
||||
if service_level in self.SERVICE_LEVEL_Z_SCORES:
|
||||
return self.SERVICE_LEVEL_Z_SCORES[service_level]
|
||||
|
||||
# Interpolate or use closest
|
||||
levels = sorted(self.SERVICE_LEVEL_Z_SCORES.keys())
|
||||
|
||||
for i, level in enumerate(levels):
|
||||
if service_level <= level:
|
||||
return self.SERVICE_LEVEL_Z_SCORES[level]
|
||||
|
||||
# Use highest if beyond range
|
||||
return self.SERVICE_LEVEL_Z_SCORES[levels[-1]]
|
||||
|
||||
def _determine_confidence(
|
||||
self,
|
||||
demand_std_dev: float,
|
||||
lead_time_days: int
|
||||
) -> str:
|
||||
"""
|
||||
Determine confidence level of calculation.
|
||||
|
||||
Args:
|
||||
demand_std_dev: Demand standard deviation
|
||||
lead_time_days: Lead time in days
|
||||
|
||||
Returns:
|
||||
Confidence level
|
||||
"""
|
||||
if demand_std_dev == 0:
|
||||
return 'low' # No variability in data
|
||||
|
||||
if lead_time_days < 3:
|
||||
return 'high' # Short lead time, easier to manage
|
||||
elif lead_time_days < 7:
|
||||
return 'medium'
|
||||
else:
|
||||
return 'medium' # Long lead time, more uncertainty
|
||||
|
||||
def recommend_service_level(
|
||||
self,
|
||||
ingredient_category: str,
|
||||
is_critical: bool = False
|
||||
) -> float:
|
||||
"""
|
||||
Recommend service level based on ingredient characteristics.
|
||||
|
||||
Args:
|
||||
ingredient_category: Category of ingredient
|
||||
is_critical: Whether ingredient is business-critical
|
||||
|
||||
Returns:
|
||||
Recommended service level
|
||||
"""
|
||||
# Critical ingredients: very high service level
|
||||
if is_critical:
|
||||
return 0.99
|
||||
|
||||
# Perishables: moderate service level (to avoid waste)
|
||||
if ingredient_category.lower() in ['dairy', 'meat', 'produce', 'fresh']:
|
||||
return 0.90
|
||||
|
||||
# Standard ingredients: high service level
|
||||
return 0.95
|
||||
|
||||
def export_to_dict(self, result: SafetyStockResult) -> Dict:
|
||||
"""
|
||||
Export result to dictionary for API response.
|
||||
|
||||
Args:
|
||||
result: SafetyStockResult
|
||||
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
return {
|
||||
'safety_stock_quantity': float(result.safety_stock_quantity),
|
||||
'service_level': result.service_level,
|
||||
'z_score': result.z_score,
|
||||
'demand_std_dev': result.demand_std_dev,
|
||||
'lead_time_days': result.lead_time_days,
|
||||
'calculation_method': result.calculation_method,
|
||||
'confidence': result.confidence,
|
||||
'reasoning_data': result.reasoning_data
|
||||
}
|
||||
444
services/procurement/app/services/shelf_life_manager.py
Normal file
444
services/procurement/app/services/shelf_life_manager.py
Normal file
@@ -0,0 +1,444 @@
|
||||
"""
|
||||
Shelf Life Manager
|
||||
|
||||
Manages shelf life constraints for perishable ingredients to minimize waste
|
||||
and ensure food safety.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import statistics
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ShelfLifeConstraint:
|
||||
"""Shelf life constraints for an ingredient"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
shelf_life_days: int
|
||||
is_perishable: bool
|
||||
category: str # 'fresh', 'frozen', 'dry', 'canned'
|
||||
max_order_quantity_days: Optional[int] = None # Max days worth to order at once
|
||||
|
||||
|
||||
@dataclass
|
||||
class ShelfLifeAdjustment:
|
||||
"""Result of shelf life adjustment"""
|
||||
original_quantity: Decimal
|
||||
adjusted_quantity: Decimal
|
||||
adjustment_reason: str
|
||||
waste_risk: str # 'low', 'medium', 'high'
|
||||
recommended_order_date: date
|
||||
use_by_date: date
|
||||
is_constrained: bool
|
||||
|
||||
|
||||
class ShelfLifeManager:
|
||||
"""
|
||||
Manages procurement planning considering shelf life constraints.
|
||||
|
||||
For perishable items:
|
||||
1. Don't order too far in advance (will expire)
|
||||
2. Don't order too much at once (will waste)
|
||||
3. Calculate optimal order timing
|
||||
4. Warn about expiration risks
|
||||
"""
|
||||
|
||||
# Category-specific defaults
|
||||
CATEGORY_DEFAULTS = {
|
||||
'fresh': {
|
||||
'max_days_ahead': 2,
|
||||
'max_order_days_supply': 3,
|
||||
'waste_risk_threshold': 0.80
|
||||
},
|
||||
'dairy': {
|
||||
'max_days_ahead': 3,
|
||||
'max_order_days_supply': 5,
|
||||
'waste_risk_threshold': 0.85
|
||||
},
|
||||
'frozen': {
|
||||
'max_days_ahead': 14,
|
||||
'max_order_days_supply': 30,
|
||||
'waste_risk_threshold': 0.90
|
||||
},
|
||||
'dry': {
|
||||
'max_days_ahead': 90,
|
||||
'max_order_days_supply': 90,
|
||||
'waste_risk_threshold': 0.95
|
||||
},
|
||||
'canned': {
|
||||
'max_days_ahead': 180,
|
||||
'max_order_days_supply': 180,
|
||||
'waste_risk_threshold': 0.95
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(self, waste_risk_threshold: float = 0.85):
|
||||
"""
|
||||
Initialize shelf life manager.
|
||||
|
||||
Args:
|
||||
waste_risk_threshold: % of shelf life before considering waste risk
|
||||
"""
|
||||
self.waste_risk_threshold = waste_risk_threshold
|
||||
|
||||
def adjust_order_quantity_for_shelf_life(
|
||||
self,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
requested_quantity: Decimal,
|
||||
daily_consumption_rate: float,
|
||||
shelf_life_days: int,
|
||||
category: str = 'dry',
|
||||
is_perishable: bool = True,
|
||||
delivery_date: Optional[date] = None
|
||||
) -> ShelfLifeAdjustment:
|
||||
"""
|
||||
Adjust order quantity to prevent waste due to expiration.
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient ID
|
||||
ingredient_name: Ingredient name
|
||||
requested_quantity: Requested order quantity
|
||||
daily_consumption_rate: Average daily usage
|
||||
shelf_life_days: Days until expiration
|
||||
category: Ingredient category
|
||||
is_perishable: Whether item is perishable
|
||||
delivery_date: Expected delivery date
|
||||
|
||||
Returns:
|
||||
ShelfLifeAdjustment with adjusted quantity
|
||||
"""
|
||||
if not is_perishable:
|
||||
# Non-perishable, no adjustment needed
|
||||
return ShelfLifeAdjustment(
|
||||
original_quantity=requested_quantity,
|
||||
adjusted_quantity=requested_quantity,
|
||||
adjustment_reason='Non-perishable item, no shelf life constraint',
|
||||
waste_risk='low',
|
||||
recommended_order_date=delivery_date or date.today(),
|
||||
use_by_date=delivery_date + timedelta(days=365) if delivery_date else date.today() + timedelta(days=365),
|
||||
is_constrained=False
|
||||
)
|
||||
|
||||
if delivery_date is None:
|
||||
delivery_date = date.today()
|
||||
|
||||
# Get category defaults
|
||||
defaults = self.CATEGORY_DEFAULTS.get(
|
||||
category.lower(),
|
||||
self.CATEGORY_DEFAULTS['dry']
|
||||
)
|
||||
|
||||
# Calculate use by date
|
||||
use_by_date = delivery_date + timedelta(days=shelf_life_days)
|
||||
|
||||
# Calculate how many days the requested quantity will last
|
||||
if daily_consumption_rate > 0:
|
||||
days_supply = float(requested_quantity) / daily_consumption_rate
|
||||
else:
|
||||
days_supply = 0
|
||||
|
||||
# Calculate maximum safe quantity (using waste risk threshold)
|
||||
safe_shelf_life_days = int(shelf_life_days * self.waste_risk_threshold)
|
||||
max_safe_quantity = Decimal(str(daily_consumption_rate * safe_shelf_life_days))
|
||||
|
||||
# Check if adjustment needed
|
||||
is_constrained = requested_quantity > max_safe_quantity
|
||||
adjusted_quantity = requested_quantity
|
||||
|
||||
if is_constrained:
|
||||
adjusted_quantity = max_safe_quantity
|
||||
adjustment_reason = (
|
||||
f"Reduced from {requested_quantity} to {adjusted_quantity} to fit within "
|
||||
f"{safe_shelf_life_days}-day safe consumption window (shelf life: {shelf_life_days} days)"
|
||||
)
|
||||
logger.warning(
|
||||
f"{ingredient_name}: Order quantity reduced due to shelf life constraint "
|
||||
f"({requested_quantity} → {adjusted_quantity})"
|
||||
)
|
||||
else:
|
||||
adjustment_reason = "Quantity within safe shelf life window"
|
||||
|
||||
# Calculate waste risk
|
||||
waste_risk = self._calculate_waste_risk(
|
||||
days_supply=days_supply,
|
||||
shelf_life_days=shelf_life_days,
|
||||
threshold=defaults['waste_risk_threshold']
|
||||
)
|
||||
|
||||
return ShelfLifeAdjustment(
|
||||
original_quantity=requested_quantity,
|
||||
adjusted_quantity=adjusted_quantity,
|
||||
adjustment_reason=adjustment_reason,
|
||||
waste_risk=waste_risk,
|
||||
recommended_order_date=delivery_date - timedelta(days=defaults['max_days_ahead']),
|
||||
use_by_date=use_by_date,
|
||||
is_constrained=is_constrained
|
||||
)
|
||||
|
||||
def calculate_optimal_order_date(
|
||||
self,
|
||||
required_by_date: date,
|
||||
shelf_life_days: int,
|
||||
category: str = 'dry',
|
||||
lead_time_days: int = 0
|
||||
) -> Tuple[date, str]:
|
||||
"""
|
||||
Calculate optimal order date considering shelf life.
|
||||
|
||||
Args:
|
||||
required_by_date: When item is needed
|
||||
shelf_life_days: Shelf life in days
|
||||
category: Ingredient category
|
||||
lead_time_days: Supplier lead time
|
||||
|
||||
Returns:
|
||||
Tuple of (optimal_order_date, reasoning)
|
||||
"""
|
||||
defaults = self.CATEGORY_DEFAULTS.get(
|
||||
category.lower(),
|
||||
self.CATEGORY_DEFAULTS['dry']
|
||||
)
|
||||
|
||||
# Calculate delivery date accounting for lead time
|
||||
delivery_date = required_by_date - timedelta(days=lead_time_days)
|
||||
|
||||
# For perishables, don't deliver too far in advance
|
||||
max_advance_days = min(
|
||||
defaults['max_days_ahead'],
|
||||
int(shelf_life_days * 0.3) # Max 30% of shelf life
|
||||
)
|
||||
|
||||
# Optimal delivery: close to required date but not too early
|
||||
optimal_delivery_date = required_by_date - timedelta(days=max_advance_days)
|
||||
|
||||
# Optimal order date
|
||||
optimal_order_date = optimal_delivery_date - timedelta(days=lead_time_days)
|
||||
|
||||
reasoning = (
|
||||
f"Order placed {lead_time_days} days before delivery "
|
||||
f"(arrives {max_advance_days} days before use to maintain freshness)"
|
||||
)
|
||||
|
||||
return optimal_order_date, reasoning
|
||||
|
||||
def validate_order_timing(
|
||||
self,
|
||||
order_date: date,
|
||||
delivery_date: date,
|
||||
required_by_date: date,
|
||||
shelf_life_days: int,
|
||||
ingredient_name: str
|
||||
) -> Tuple[bool, List[str]]:
|
||||
"""
|
||||
Validate order timing against shelf life constraints.
|
||||
|
||||
Args:
|
||||
order_date: Planned order date
|
||||
delivery_date: Expected delivery date
|
||||
required_by_date: Date when item is needed
|
||||
shelf_life_days: Shelf life in days
|
||||
ingredient_name: Name of ingredient
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, list of warnings)
|
||||
"""
|
||||
warnings = []
|
||||
|
||||
# Check if item will arrive in time
|
||||
if delivery_date > required_by_date:
|
||||
warnings.append(
|
||||
f"Delivery date {delivery_date} is after required date {required_by_date}"
|
||||
)
|
||||
|
||||
# Check if item will expire before use
|
||||
expiry_date = delivery_date + timedelta(days=shelf_life_days)
|
||||
if expiry_date < required_by_date:
|
||||
warnings.append(
|
||||
f"Item will expire on {expiry_date} before required date {required_by_date}"
|
||||
)
|
||||
|
||||
# Check if ordering too far in advance
|
||||
days_in_storage = (required_by_date - delivery_date).days
|
||||
if days_in_storage > shelf_life_days * 0.8:
|
||||
warnings.append(
|
||||
f"Item will be in storage for {days_in_storage} days "
|
||||
f"(80% of {shelf_life_days}-day shelf life)"
|
||||
)
|
||||
|
||||
is_valid = len(warnings) == 0
|
||||
|
||||
if not is_valid:
|
||||
for warning in warnings:
|
||||
logger.warning(f"{ingredient_name}: {warning}")
|
||||
|
||||
return is_valid, warnings
|
||||
|
||||
def calculate_fifo_rotation_schedule(
|
||||
self,
|
||||
current_inventory: List[Dict],
|
||||
new_order_quantity: Decimal,
|
||||
delivery_date: date,
|
||||
daily_consumption: float
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Calculate FIFO (First In First Out) rotation schedule.
|
||||
|
||||
Args:
|
||||
current_inventory: List of existing batches with expiry dates
|
||||
new_order_quantity: New order quantity
|
||||
delivery_date: New order delivery date
|
||||
daily_consumption: Daily consumption rate
|
||||
|
||||
Returns:
|
||||
List of usage schedule
|
||||
"""
|
||||
# Combine current and new inventory
|
||||
all_batches = []
|
||||
|
||||
for batch in current_inventory:
|
||||
all_batches.append({
|
||||
'quantity': batch['quantity'],
|
||||
'expiry_date': batch['expiry_date'],
|
||||
'is_existing': True
|
||||
})
|
||||
|
||||
# Add new order (estimate shelf life from existing batches)
|
||||
if current_inventory:
|
||||
avg_shelf_life_days = statistics.mean([
|
||||
(batch['expiry_date'] - date.today()).days
|
||||
for batch in current_inventory
|
||||
])
|
||||
else:
|
||||
avg_shelf_life_days = 30
|
||||
|
||||
all_batches.append({
|
||||
'quantity': new_order_quantity,
|
||||
'expiry_date': delivery_date + timedelta(days=int(avg_shelf_life_days)),
|
||||
'is_existing': False
|
||||
})
|
||||
|
||||
# Sort by expiry date (FIFO)
|
||||
all_batches.sort(key=lambda x: x['expiry_date'])
|
||||
|
||||
# Create consumption schedule
|
||||
schedule = []
|
||||
current_date = date.today()
|
||||
remaining_consumption = daily_consumption
|
||||
|
||||
for batch in all_batches:
|
||||
days_until_expiry = (batch['expiry_date'] - current_date).days
|
||||
batch_quantity = float(batch['quantity'])
|
||||
|
||||
# Calculate days to consume this batch
|
||||
days_to_consume = min(
|
||||
batch_quantity / daily_consumption,
|
||||
days_until_expiry
|
||||
)
|
||||
|
||||
quantity_consumed = days_to_consume * daily_consumption
|
||||
waste = max(0, batch_quantity - quantity_consumed)
|
||||
|
||||
schedule.append({
|
||||
'start_date': current_date,
|
||||
'end_date': current_date + timedelta(days=int(days_to_consume)),
|
||||
'quantity': batch['quantity'],
|
||||
'quantity_consumed': Decimal(str(quantity_consumed)),
|
||||
'quantity_wasted': Decimal(str(waste)),
|
||||
'expiry_date': batch['expiry_date'],
|
||||
'is_existing': batch['is_existing']
|
||||
})
|
||||
|
||||
current_date += timedelta(days=int(days_to_consume))
|
||||
|
||||
return schedule
|
||||
|
||||
def _calculate_waste_risk(
|
||||
self,
|
||||
days_supply: float,
|
||||
shelf_life_days: int,
|
||||
threshold: float
|
||||
) -> str:
|
||||
"""
|
||||
Calculate waste risk level.
|
||||
|
||||
Args:
|
||||
days_supply: Days of supply ordered
|
||||
shelf_life_days: Shelf life in days
|
||||
threshold: Waste risk threshold
|
||||
|
||||
Returns:
|
||||
Risk level: 'low', 'medium', 'high'
|
||||
"""
|
||||
if days_supply <= shelf_life_days * threshold * 0.5:
|
||||
return 'low'
|
||||
elif days_supply <= shelf_life_days * threshold:
|
||||
return 'medium'
|
||||
else:
|
||||
return 'high'
|
||||
|
||||
def get_expiration_alerts(
|
||||
self,
|
||||
inventory_batches: List[Dict],
|
||||
alert_days_threshold: int = 3
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Get alerts for batches expiring soon.
|
||||
|
||||
Args:
|
||||
inventory_batches: List of batches with expiry dates
|
||||
alert_days_threshold: Days before expiry to alert
|
||||
|
||||
Returns:
|
||||
List of expiration alerts
|
||||
"""
|
||||
alerts = []
|
||||
today = date.today()
|
||||
|
||||
for batch in inventory_batches:
|
||||
expiry_date = batch.get('expiry_date')
|
||||
if not expiry_date:
|
||||
continue
|
||||
|
||||
days_until_expiry = (expiry_date - today).days
|
||||
|
||||
if days_until_expiry <= alert_days_threshold:
|
||||
alerts.append({
|
||||
'ingredient_id': batch.get('ingredient_id'),
|
||||
'ingredient_name': batch.get('ingredient_name'),
|
||||
'quantity': batch.get('quantity'),
|
||||
'expiry_date': expiry_date,
|
||||
'days_until_expiry': days_until_expiry,
|
||||
'severity': 'critical' if days_until_expiry <= 1 else 'high'
|
||||
})
|
||||
|
||||
if alerts:
|
||||
logger.warning(f"Found {len(alerts)} batches expiring within {alert_days_threshold} days")
|
||||
|
||||
return alerts
|
||||
|
||||
def export_to_dict(self, adjustment: ShelfLifeAdjustment) -> Dict:
|
||||
"""
|
||||
Export adjustment to dictionary for API response.
|
||||
|
||||
Args:
|
||||
adjustment: ShelfLifeAdjustment
|
||||
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
return {
|
||||
'original_quantity': float(adjustment.original_quantity),
|
||||
'adjusted_quantity': float(adjustment.adjusted_quantity),
|
||||
'adjustment_reason': adjustment.adjustment_reason,
|
||||
'waste_risk': adjustment.waste_risk,
|
||||
'recommended_order_date': adjustment.recommended_order_date.isoformat(),
|
||||
'use_by_date': adjustment.use_by_date.isoformat(),
|
||||
'is_constrained': adjustment.is_constrained
|
||||
}
|
||||
@@ -0,0 +1,343 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/services/smart_procurement_calculator.py
|
||||
# ================================================================
|
||||
"""
|
||||
Smart Procurement Calculator
|
||||
Migrated from Orders Service
|
||||
|
||||
Implements multi-constraint procurement quantity optimization combining:
|
||||
- AI demand forecasting
|
||||
- Ingredient reorder rules (reorder_point, reorder_quantity)
|
||||
- Supplier constraints (minimum_order_quantity, minimum_order_amount)
|
||||
- Storage limits (max_stock_level)
|
||||
- Price tier optimization
|
||||
"""
|
||||
|
||||
import math
|
||||
from decimal import Decimal
|
||||
from typing import Dict, Any, List, Tuple, Optional
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SmartProcurementCalculator:
|
||||
"""
|
||||
Smart procurement quantity calculator with multi-tier constraint optimization
|
||||
"""
|
||||
|
||||
def __init__(self, procurement_settings: Dict[str, Any]):
|
||||
"""
|
||||
Initialize calculator with tenant procurement settings
|
||||
|
||||
Args:
|
||||
procurement_settings: Tenant settings dict with flags:
|
||||
- use_reorder_rules: bool
|
||||
- economic_rounding: bool
|
||||
- respect_storage_limits: bool
|
||||
- use_supplier_minimums: bool
|
||||
- optimize_price_tiers: bool
|
||||
"""
|
||||
self.use_reorder_rules = procurement_settings.get('use_reorder_rules', True)
|
||||
self.economic_rounding = procurement_settings.get('economic_rounding', True)
|
||||
self.respect_storage_limits = procurement_settings.get('respect_storage_limits', True)
|
||||
self.use_supplier_minimums = procurement_settings.get('use_supplier_minimums', True)
|
||||
self.optimize_price_tiers = procurement_settings.get('optimize_price_tiers', True)
|
||||
|
||||
def calculate_procurement_quantity(
|
||||
self,
|
||||
ingredient: Dict[str, Any],
|
||||
supplier: Optional[Dict[str, Any]],
|
||||
price_list_entry: Optional[Dict[str, Any]],
|
||||
ai_forecast_quantity: Decimal,
|
||||
current_stock: Decimal,
|
||||
safety_stock_percentage: Decimal = Decimal('20.0')
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate optimal procurement quantity using smart hybrid approach
|
||||
|
||||
Args:
|
||||
ingredient: Ingredient data with reorder_point, reorder_quantity, max_stock_level
|
||||
supplier: Supplier data with minimum_order_amount
|
||||
price_list_entry: Price list with minimum_order_quantity, tier_pricing
|
||||
ai_forecast_quantity: AI-predicted demand quantity
|
||||
current_stock: Current stock level
|
||||
safety_stock_percentage: Safety stock buffer percentage
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
- order_quantity: Final calculated quantity to order
|
||||
- calculation_method: Method used (e.g., 'REORDER_POINT_TRIGGERED')
|
||||
- ai_suggested_quantity: Original AI forecast
|
||||
- adjusted_quantity: Final quantity after constraints
|
||||
- adjustment_reason: Human-readable explanation
|
||||
- warnings: List of warnings/notes
|
||||
- supplier_minimum_applied: bool
|
||||
- storage_limit_applied: bool
|
||||
- reorder_rule_applied: bool
|
||||
- price_tier_applied: Dict or None
|
||||
"""
|
||||
warnings = []
|
||||
result = {
|
||||
'ai_suggested_quantity': ai_forecast_quantity,
|
||||
'supplier_minimum_applied': False,
|
||||
'storage_limit_applied': False,
|
||||
'reorder_rule_applied': False,
|
||||
'price_tier_applied': None
|
||||
}
|
||||
|
||||
# Extract ingredient parameters
|
||||
reorder_point = Decimal(str(ingredient.get('reorder_point', 0)))
|
||||
reorder_quantity = Decimal(str(ingredient.get('reorder_quantity', 0)))
|
||||
low_stock_threshold = Decimal(str(ingredient.get('low_stock_threshold', 0)))
|
||||
max_stock_level = Decimal(str(ingredient.get('max_stock_level') or 'Infinity'))
|
||||
|
||||
# Extract supplier/price list parameters
|
||||
supplier_min_qty = Decimal('0')
|
||||
supplier_min_amount = Decimal('0')
|
||||
tier_pricing = []
|
||||
|
||||
if price_list_entry:
|
||||
supplier_min_qty = Decimal(str(price_list_entry.get('minimum_order_quantity', 0)))
|
||||
tier_pricing = price_list_entry.get('tier_pricing') or []
|
||||
|
||||
if supplier:
|
||||
supplier_min_amount = Decimal(str(supplier.get('minimum_order_amount', 0)))
|
||||
|
||||
# Calculate AI-based net requirement with safety stock
|
||||
safety_stock = ai_forecast_quantity * (safety_stock_percentage / Decimal('100'))
|
||||
total_needed = ai_forecast_quantity + safety_stock
|
||||
ai_net_requirement = max(Decimal('0'), total_needed - current_stock)
|
||||
|
||||
# TIER 1: Critical Safety Check (Emergency Override)
|
||||
if self.use_reorder_rules and current_stock <= low_stock_threshold:
|
||||
base_order = max(reorder_quantity, ai_net_requirement)
|
||||
result['calculation_method'] = 'CRITICAL_STOCK_EMERGENCY'
|
||||
result['reorder_rule_applied'] = True
|
||||
warnings.append(f"CRITICAL: Stock ({current_stock}) below threshold ({low_stock_threshold})")
|
||||
order_qty = base_order
|
||||
|
||||
# TIER 2: Reorder Point Triggered
|
||||
elif self.use_reorder_rules and current_stock <= reorder_point:
|
||||
base_order = max(reorder_quantity, ai_net_requirement)
|
||||
result['calculation_method'] = 'REORDER_POINT_TRIGGERED'
|
||||
result['reorder_rule_applied'] = True
|
||||
warnings.append(f"Reorder point triggered: stock ({current_stock}) ≤ reorder point ({reorder_point})")
|
||||
order_qty = base_order
|
||||
|
||||
# TIER 3: Forecast-Driven (Above reorder point, no immediate need)
|
||||
elif ai_net_requirement > 0:
|
||||
order_qty = ai_net_requirement
|
||||
result['calculation_method'] = 'FORECAST_DRIVEN_PROACTIVE'
|
||||
warnings.append(f"AI forecast suggests ordering {ai_net_requirement} units")
|
||||
|
||||
# TIER 4: No Order Needed
|
||||
else:
|
||||
result['order_quantity'] = Decimal('0')
|
||||
result['adjusted_quantity'] = Decimal('0')
|
||||
result['calculation_method'] = 'SUFFICIENT_STOCK'
|
||||
result['adjustment_reason'] = f"Current stock ({current_stock}) is sufficient. No order needed."
|
||||
result['warnings'] = warnings
|
||||
return result
|
||||
|
||||
# Apply Economic Rounding (reorder_quantity multiples)
|
||||
if self.economic_rounding and reorder_quantity > 0:
|
||||
multiples = math.ceil(float(order_qty / reorder_quantity))
|
||||
rounded_qty = Decimal(multiples) * reorder_quantity
|
||||
if rounded_qty > order_qty:
|
||||
warnings.append(f"Rounded to {multiples}× reorder quantity ({reorder_quantity}) = {rounded_qty}")
|
||||
order_qty = rounded_qty
|
||||
|
||||
# Apply Supplier Minimum Quantity Constraint
|
||||
if self.use_supplier_minimums and supplier_min_qty > 0:
|
||||
if order_qty < supplier_min_qty:
|
||||
warnings.append(f"Increased from {order_qty} to supplier minimum ({supplier_min_qty})")
|
||||
order_qty = supplier_min_qty
|
||||
result['supplier_minimum_applied'] = True
|
||||
else:
|
||||
# Round to multiples of minimum_order_quantity (packaging constraint)
|
||||
multiples = math.ceil(float(order_qty / supplier_min_qty))
|
||||
rounded_qty = Decimal(multiples) * supplier_min_qty
|
||||
if rounded_qty > order_qty:
|
||||
warnings.append(f"Rounded to {multiples}× supplier packaging ({supplier_min_qty}) = {rounded_qty}")
|
||||
result['supplier_minimum_applied'] = True
|
||||
order_qty = rounded_qty
|
||||
|
||||
# Apply Price Tier Optimization
|
||||
if self.optimize_price_tiers and tier_pricing and price_list_entry:
|
||||
unit_price = Decimal(str(price_list_entry.get('unit_price', 0)))
|
||||
tier_result = self._optimize_price_tier(
|
||||
order_qty,
|
||||
unit_price,
|
||||
tier_pricing,
|
||||
current_stock,
|
||||
max_stock_level
|
||||
)
|
||||
|
||||
if tier_result['tier_applied']:
|
||||
order_qty = tier_result['optimized_quantity']
|
||||
result['price_tier_applied'] = tier_result['tier_info']
|
||||
warnings.append(tier_result['message'])
|
||||
|
||||
# Apply Storage Capacity Constraint
|
||||
if self.respect_storage_limits and max_stock_level != Decimal('Infinity'):
|
||||
if (current_stock + order_qty) > max_stock_level:
|
||||
capped_qty = max(Decimal('0'), max_stock_level - current_stock)
|
||||
warnings.append(f"Capped from {order_qty} to {capped_qty} due to storage limit ({max_stock_level})")
|
||||
order_qty = capped_qty
|
||||
result['storage_limit_applied'] = True
|
||||
result['calculation_method'] += '_STORAGE_LIMITED'
|
||||
|
||||
# Check supplier minimum_order_amount (total order value constraint)
|
||||
if self.use_supplier_minimums and supplier_min_amount > 0 and price_list_entry:
|
||||
unit_price = Decimal(str(price_list_entry.get('unit_price', 0)))
|
||||
order_value = order_qty * unit_price
|
||||
|
||||
if order_value < supplier_min_amount:
|
||||
warnings.append(
|
||||
f"⚠️ Order value €{order_value:.2f} < supplier minimum €{supplier_min_amount:.2f}. "
|
||||
"This item needs to be combined with other products in the same PO."
|
||||
)
|
||||
result['calculation_method'] += '_NEEDS_CONSOLIDATION'
|
||||
|
||||
# Build final result
|
||||
result['order_quantity'] = order_qty
|
||||
result['adjusted_quantity'] = order_qty
|
||||
result['adjustment_reason'] = self._build_adjustment_reason(
|
||||
ai_forecast_quantity,
|
||||
ai_net_requirement,
|
||||
order_qty,
|
||||
warnings,
|
||||
result
|
||||
)
|
||||
result['warnings'] = warnings
|
||||
|
||||
return result
|
||||
|
||||
def _optimize_price_tier(
|
||||
self,
|
||||
current_qty: Decimal,
|
||||
base_unit_price: Decimal,
|
||||
tier_pricing: List[Dict[str, Any]],
|
||||
current_stock: Decimal,
|
||||
max_stock_level: Decimal
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Optimize order quantity to capture volume discount tiers if beneficial
|
||||
|
||||
Args:
|
||||
current_qty: Current calculated order quantity
|
||||
base_unit_price: Base unit price without tiers
|
||||
tier_pricing: List of tier dicts with 'quantity' and 'price'
|
||||
current_stock: Current stock level
|
||||
max_stock_level: Maximum storage capacity
|
||||
|
||||
Returns:
|
||||
Dict with tier_applied (bool), optimized_quantity, tier_info, message
|
||||
"""
|
||||
if not tier_pricing:
|
||||
return {'tier_applied': False, 'optimized_quantity': current_qty}
|
||||
|
||||
# Sort tiers by quantity
|
||||
sorted_tiers = sorted(tier_pricing, key=lambda x: x['quantity'])
|
||||
|
||||
best_tier = None
|
||||
best_savings = Decimal('0')
|
||||
|
||||
for tier in sorted_tiers:
|
||||
tier_qty = Decimal(str(tier['quantity']))
|
||||
tier_price = Decimal(str(tier['price']))
|
||||
|
||||
# Skip if tier quantity is below current quantity (already captured)
|
||||
if tier_qty <= current_qty:
|
||||
continue
|
||||
|
||||
# Skip if tier would exceed storage capacity
|
||||
if self.respect_storage_limits and (current_stock + tier_qty) > max_stock_level:
|
||||
continue
|
||||
|
||||
# Skip if tier is more than 50% above current quantity (too much excess)
|
||||
if tier_qty > current_qty * Decimal('1.5'):
|
||||
continue
|
||||
|
||||
# Calculate savings
|
||||
current_cost = current_qty * base_unit_price
|
||||
tier_cost = tier_qty * tier_price
|
||||
savings = current_cost - tier_cost
|
||||
|
||||
if savings > best_savings:
|
||||
best_savings = savings
|
||||
best_tier = {
|
||||
'quantity': tier_qty,
|
||||
'price': tier_price,
|
||||
'savings': savings
|
||||
}
|
||||
|
||||
if best_tier:
|
||||
return {
|
||||
'tier_applied': True,
|
||||
'optimized_quantity': best_tier['quantity'],
|
||||
'tier_info': best_tier,
|
||||
'message': (
|
||||
f"Upgraded to {best_tier['quantity']} units "
|
||||
f"@ €{best_tier['price']}/unit "
|
||||
f"(saves €{best_tier['savings']:.2f})"
|
||||
)
|
||||
}
|
||||
|
||||
return {'tier_applied': False, 'optimized_quantity': current_qty}
|
||||
|
||||
def _build_adjustment_reason(
|
||||
self,
|
||||
ai_forecast: Decimal,
|
||||
ai_net_requirement: Decimal,
|
||||
final_quantity: Decimal,
|
||||
warnings: List[str],
|
||||
result: Dict[str, Any]
|
||||
) -> str:
|
||||
"""
|
||||
Build human-readable explanation of quantity adjustments
|
||||
|
||||
Args:
|
||||
ai_forecast: Original AI forecast
|
||||
ai_net_requirement: AI forecast + safety stock - current stock
|
||||
final_quantity: Final order quantity after all adjustments
|
||||
warnings: List of warning messages
|
||||
result: Calculation result dict
|
||||
|
||||
Returns:
|
||||
Human-readable adjustment explanation
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Start with calculation method
|
||||
method = result.get('calculation_method', 'UNKNOWN')
|
||||
parts.append(f"Method: {method.replace('_', ' ').title()}")
|
||||
|
||||
# AI forecast base
|
||||
parts.append(f"AI Forecast: {ai_forecast} units, Net Requirement: {ai_net_requirement} units")
|
||||
|
||||
# Adjustments applied
|
||||
adjustments = []
|
||||
if result.get('reorder_rule_applied'):
|
||||
adjustments.append("reorder rules")
|
||||
if result.get('supplier_minimum_applied'):
|
||||
adjustments.append("supplier minimums")
|
||||
if result.get('storage_limit_applied'):
|
||||
adjustments.append("storage limits")
|
||||
if result.get('price_tier_applied'):
|
||||
adjustments.append("price tier optimization")
|
||||
|
||||
if adjustments:
|
||||
parts.append(f"Adjustments: {', '.join(adjustments)}")
|
||||
|
||||
# Final quantity
|
||||
parts.append(f"Final Quantity: {final_quantity} units")
|
||||
|
||||
# Key warnings
|
||||
if warnings:
|
||||
key_warnings = [w for w in warnings if '⚠️' in w or 'CRITICAL' in w or 'saves €' in w]
|
||||
if key_warnings:
|
||||
parts.append(f"Notes: {'; '.join(key_warnings)}")
|
||||
|
||||
return " | ".join(parts)
|
||||
538
services/procurement/app/services/supplier_selector.py
Normal file
538
services/procurement/app/services/supplier_selector.py
Normal file
@@ -0,0 +1,538 @@
|
||||
"""
|
||||
Supplier Selector
|
||||
|
||||
Intelligently selects suppliers based on multi-criteria optimization including
|
||||
price, lead time, quality, reliability, and risk diversification.
|
||||
"""
|
||||
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from dataclasses import dataclass
|
||||
from datetime import date
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SupplierOption:
|
||||
"""Supplier option for an ingredient"""
|
||||
supplier_id: str
|
||||
supplier_name: str
|
||||
unit_price: Decimal
|
||||
lead_time_days: int
|
||||
min_order_quantity: Optional[Decimal] = None
|
||||
max_capacity: Optional[Decimal] = None
|
||||
quality_score: float = 0.85 # 0-1
|
||||
reliability_score: float = 0.90 # 0-1
|
||||
on_time_delivery_rate: float = 0.95 # 0-1
|
||||
current_allocation_percentage: float = 0.0 # Current % of total orders
|
||||
|
||||
|
||||
@dataclass
|
||||
class SupplierAllocation:
|
||||
"""Allocation of quantity to a supplier"""
|
||||
supplier_id: str
|
||||
supplier_name: str
|
||||
allocated_quantity: Decimal
|
||||
allocation_percentage: float
|
||||
allocation_type: str # 'primary', 'backup', 'diversification'
|
||||
unit_price: Decimal
|
||||
total_cost: Decimal
|
||||
lead_time_days: int
|
||||
supplier_score: float
|
||||
score_breakdown: Dict[str, float]
|
||||
allocation_reason: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class SupplierSelectionResult:
|
||||
"""Complete supplier selection result"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
required_quantity: Decimal
|
||||
allocations: List[SupplierAllocation]
|
||||
total_cost: Decimal
|
||||
weighted_lead_time: float
|
||||
risk_score: float # Lower is better
|
||||
diversification_applied: bool
|
||||
selection_strategy: str
|
||||
|
||||
|
||||
class SupplierSelector:
|
||||
"""
|
||||
Selects optimal suppliers using multi-criteria decision analysis.
|
||||
|
||||
Scoring Factors:
|
||||
1. Price (lower is better)
|
||||
2. Lead time (shorter is better)
|
||||
3. Quality score (higher is better)
|
||||
4. Reliability (higher is better)
|
||||
5. Diversification (balance across suppliers)
|
||||
|
||||
Strategies:
|
||||
- Single source: Best overall supplier
|
||||
- Dual source: Primary + backup
|
||||
- Multi-source: Split across 2-3 suppliers for large orders
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
price_weight: float = 0.40,
|
||||
lead_time_weight: float = 0.20,
|
||||
quality_weight: float = 0.20,
|
||||
reliability_weight: float = 0.20,
|
||||
diversification_threshold: Decimal = Decimal('1000'),
|
||||
max_single_supplier_percentage: float = 0.70
|
||||
):
|
||||
"""
|
||||
Initialize supplier selector.
|
||||
|
||||
Args:
|
||||
price_weight: Weight for price (0-1)
|
||||
lead_time_weight: Weight for lead time (0-1)
|
||||
quality_weight: Weight for quality (0-1)
|
||||
reliability_weight: Weight for reliability (0-1)
|
||||
diversification_threshold: Quantity above which to diversify
|
||||
max_single_supplier_percentage: Max % to single supplier
|
||||
"""
|
||||
self.price_weight = price_weight
|
||||
self.lead_time_weight = lead_time_weight
|
||||
self.quality_weight = quality_weight
|
||||
self.reliability_weight = reliability_weight
|
||||
self.diversification_threshold = diversification_threshold
|
||||
self.max_single_supplier_percentage = max_single_supplier_percentage
|
||||
|
||||
# Validate weights sum to 1
|
||||
total_weight = (
|
||||
price_weight + lead_time_weight + quality_weight + reliability_weight
|
||||
)
|
||||
if abs(total_weight - 1.0) > 0.01:
|
||||
logger.warning(
|
||||
f"Supplier selection weights don't sum to 1.0 (sum={total_weight}), normalizing"
|
||||
)
|
||||
self.price_weight /= total_weight
|
||||
self.lead_time_weight /= total_weight
|
||||
self.quality_weight /= total_weight
|
||||
self.reliability_weight /= total_weight
|
||||
|
||||
def select_suppliers(
|
||||
self,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
required_quantity: Decimal,
|
||||
supplier_options: List[SupplierOption]
|
||||
) -> SupplierSelectionResult:
|
||||
"""
|
||||
Select optimal supplier(s) for an ingredient.
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient ID
|
||||
ingredient_name: Ingredient name
|
||||
required_quantity: Quantity needed
|
||||
supplier_options: List of available suppliers
|
||||
|
||||
Returns:
|
||||
SupplierSelectionResult with allocations
|
||||
"""
|
||||
if not supplier_options:
|
||||
raise ValueError(f"No supplier options available for {ingredient_name}")
|
||||
|
||||
logger.info(
|
||||
f"Selecting suppliers for {ingredient_name}: "
|
||||
f"{required_quantity} units from {len(supplier_options)} options"
|
||||
)
|
||||
|
||||
# Score all suppliers
|
||||
scored_suppliers = self._score_suppliers(supplier_options)
|
||||
|
||||
# Determine selection strategy
|
||||
strategy = self._determine_strategy(required_quantity, supplier_options)
|
||||
|
||||
# Select suppliers based on strategy
|
||||
if strategy == 'single_source':
|
||||
allocations = self._select_single_source(
|
||||
required_quantity,
|
||||
scored_suppliers
|
||||
)
|
||||
elif strategy == 'dual_source':
|
||||
allocations = self._select_dual_source(
|
||||
required_quantity,
|
||||
scored_suppliers
|
||||
)
|
||||
else: # multi_source
|
||||
allocations = self._select_multi_source(
|
||||
required_quantity,
|
||||
scored_suppliers
|
||||
)
|
||||
|
||||
# Calculate result metrics
|
||||
total_cost = sum(alloc.total_cost for alloc in allocations)
|
||||
weighted_lead_time = sum(
|
||||
alloc.lead_time_days * alloc.allocation_percentage
|
||||
for alloc in allocations
|
||||
)
|
||||
risk_score = self._calculate_risk_score(allocations)
|
||||
diversification_applied = len(allocations) > 1
|
||||
|
||||
result = SupplierSelectionResult(
|
||||
ingredient_id=ingredient_id,
|
||||
ingredient_name=ingredient_name,
|
||||
required_quantity=required_quantity,
|
||||
allocations=allocations,
|
||||
total_cost=total_cost,
|
||||
weighted_lead_time=weighted_lead_time,
|
||||
risk_score=risk_score,
|
||||
diversification_applied=diversification_applied,
|
||||
selection_strategy=strategy
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"{ingredient_name}: Selected {len(allocations)} supplier(s) "
|
||||
f"(strategy={strategy}, total_cost=${total_cost:.2f})"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def _score_suppliers(
|
||||
self,
|
||||
suppliers: List[SupplierOption]
|
||||
) -> List[Tuple[SupplierOption, float, Dict[str, float]]]:
|
||||
"""
|
||||
Score all suppliers using weighted criteria.
|
||||
|
||||
Args:
|
||||
suppliers: List of supplier options
|
||||
|
||||
Returns:
|
||||
List of (supplier, score, score_breakdown) tuples
|
||||
"""
|
||||
if not suppliers:
|
||||
return []
|
||||
|
||||
# Normalize factors for comparison
|
||||
prices = [s.unit_price for s in suppliers]
|
||||
lead_times = [s.lead_time_days for s in suppliers]
|
||||
|
||||
min_price = min(prices)
|
||||
max_price = max(prices)
|
||||
min_lead_time = min(lead_times)
|
||||
max_lead_time = max(lead_times)
|
||||
|
||||
scored = []
|
||||
|
||||
for supplier in suppliers:
|
||||
# Price score (normalized, lower is better)
|
||||
if max_price > min_price:
|
||||
price_score = 1.0 - float((supplier.unit_price - min_price) / (max_price - min_price))
|
||||
else:
|
||||
price_score = 1.0
|
||||
|
||||
# Lead time score (normalized, shorter is better)
|
||||
if max_lead_time > min_lead_time:
|
||||
lead_time_score = 1.0 - (supplier.lead_time_days - min_lead_time) / (max_lead_time - min_lead_time)
|
||||
else:
|
||||
lead_time_score = 1.0
|
||||
|
||||
# Quality and reliability scores (already 0-1)
|
||||
quality_score = supplier.quality_score
|
||||
reliability_score = supplier.reliability_score
|
||||
|
||||
# Calculate weighted total score
|
||||
total_score = (
|
||||
self.price_weight * price_score +
|
||||
self.lead_time_weight * lead_time_score +
|
||||
self.quality_weight * quality_score +
|
||||
self.reliability_weight * reliability_score
|
||||
)
|
||||
|
||||
score_breakdown = {
|
||||
'price_score': price_score,
|
||||
'lead_time_score': lead_time_score,
|
||||
'quality_score': quality_score,
|
||||
'reliability_score': reliability_score,
|
||||
'total_score': total_score
|
||||
}
|
||||
|
||||
scored.append((supplier, total_score, score_breakdown))
|
||||
|
||||
# Sort by score (descending)
|
||||
scored.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
return scored
|
||||
|
||||
def _determine_strategy(
|
||||
self,
|
||||
required_quantity: Decimal,
|
||||
suppliers: List[SupplierOption]
|
||||
) -> str:
|
||||
"""
|
||||
Determine selection strategy based on quantity and options.
|
||||
|
||||
Args:
|
||||
required_quantity: Quantity needed
|
||||
suppliers: Available suppliers
|
||||
|
||||
Returns:
|
||||
Strategy: 'single_source', 'dual_source', or 'multi_source'
|
||||
"""
|
||||
if len(suppliers) == 1:
|
||||
return 'single_source'
|
||||
|
||||
# Large orders should be diversified
|
||||
if required_quantity >= self.diversification_threshold:
|
||||
return 'multi_source' if len(suppliers) >= 3 else 'dual_source'
|
||||
|
||||
# Small orders: single source unless quality/reliability concerns
|
||||
avg_reliability = sum(s.reliability_score for s in suppliers) / len(suppliers)
|
||||
if avg_reliability < 0.85:
|
||||
return 'dual_source' # Use backup for unreliable suppliers
|
||||
|
||||
return 'single_source'
|
||||
|
||||
def _select_single_source(
|
||||
self,
|
||||
required_quantity: Decimal,
|
||||
scored_suppliers: List[Tuple[SupplierOption, float, Dict[str, float]]]
|
||||
) -> List[SupplierAllocation]:
|
||||
"""
|
||||
Select single best supplier.
|
||||
|
||||
Args:
|
||||
required_quantity: Quantity needed
|
||||
scored_suppliers: Scored suppliers
|
||||
|
||||
Returns:
|
||||
List with single allocation
|
||||
"""
|
||||
best_supplier, score, score_breakdown = scored_suppliers[0]
|
||||
|
||||
# Check capacity
|
||||
if best_supplier.max_capacity and required_quantity > best_supplier.max_capacity:
|
||||
logger.warning(
|
||||
f"{best_supplier.supplier_name}: Required quantity {required_quantity} "
|
||||
f"exceeds capacity {best_supplier.max_capacity}, will need to split"
|
||||
)
|
||||
# Fall back to dual source
|
||||
return self._select_dual_source(required_quantity, scored_suppliers)
|
||||
|
||||
allocation = SupplierAllocation(
|
||||
supplier_id=best_supplier.supplier_id,
|
||||
supplier_name=best_supplier.supplier_name,
|
||||
allocated_quantity=required_quantity,
|
||||
allocation_percentage=1.0,
|
||||
allocation_type='primary',
|
||||
unit_price=best_supplier.unit_price,
|
||||
total_cost=best_supplier.unit_price * required_quantity,
|
||||
lead_time_days=best_supplier.lead_time_days,
|
||||
supplier_score=score,
|
||||
score_breakdown=score_breakdown,
|
||||
allocation_reason='Best overall score (single source strategy)'
|
||||
)
|
||||
|
||||
return [allocation]
|
||||
|
||||
def _select_dual_source(
|
||||
self,
|
||||
required_quantity: Decimal,
|
||||
scored_suppliers: List[Tuple[SupplierOption, float, Dict[str, float]]]
|
||||
) -> List[SupplierAllocation]:
|
||||
"""
|
||||
Select primary supplier + backup.
|
||||
|
||||
Args:
|
||||
required_quantity: Quantity needed
|
||||
scored_suppliers: Scored suppliers
|
||||
|
||||
Returns:
|
||||
List with two allocations
|
||||
"""
|
||||
if len(scored_suppliers) < 2:
|
||||
return self._select_single_source(required_quantity, scored_suppliers)
|
||||
|
||||
primary_supplier, primary_score, primary_breakdown = scored_suppliers[0]
|
||||
backup_supplier, backup_score, backup_breakdown = scored_suppliers[1]
|
||||
|
||||
# Primary gets 70%, backup gets 30%
|
||||
primary_percentage = self.max_single_supplier_percentage
|
||||
backup_percentage = 1.0 - primary_percentage
|
||||
|
||||
primary_qty = required_quantity * Decimal(str(primary_percentage))
|
||||
backup_qty = required_quantity * Decimal(str(backup_percentage))
|
||||
|
||||
# Check capacities
|
||||
if primary_supplier.max_capacity and primary_qty > primary_supplier.max_capacity:
|
||||
# Rebalance
|
||||
primary_qty = primary_supplier.max_capacity
|
||||
backup_qty = required_quantity - primary_qty
|
||||
primary_percentage = float(primary_qty / required_quantity)
|
||||
backup_percentage = float(backup_qty / required_quantity)
|
||||
|
||||
allocations = [
|
||||
SupplierAllocation(
|
||||
supplier_id=primary_supplier.supplier_id,
|
||||
supplier_name=primary_supplier.supplier_name,
|
||||
allocated_quantity=primary_qty,
|
||||
allocation_percentage=primary_percentage,
|
||||
allocation_type='primary',
|
||||
unit_price=primary_supplier.unit_price,
|
||||
total_cost=primary_supplier.unit_price * primary_qty,
|
||||
lead_time_days=primary_supplier.lead_time_days,
|
||||
supplier_score=primary_score,
|
||||
score_breakdown=primary_breakdown,
|
||||
allocation_reason=f'Primary supplier ({primary_percentage*100:.0f}% allocation)'
|
||||
),
|
||||
SupplierAllocation(
|
||||
supplier_id=backup_supplier.supplier_id,
|
||||
supplier_name=backup_supplier.supplier_name,
|
||||
allocated_quantity=backup_qty,
|
||||
allocation_percentage=backup_percentage,
|
||||
allocation_type='backup',
|
||||
unit_price=backup_supplier.unit_price,
|
||||
total_cost=backup_supplier.unit_price * backup_qty,
|
||||
lead_time_days=backup_supplier.lead_time_days,
|
||||
supplier_score=backup_score,
|
||||
score_breakdown=backup_breakdown,
|
||||
allocation_reason=f'Backup supplier ({backup_percentage*100:.0f}% allocation for risk mitigation)'
|
||||
)
|
||||
]
|
||||
|
||||
return allocations
|
||||
|
||||
def _select_multi_source(
|
||||
self,
|
||||
required_quantity: Decimal,
|
||||
scored_suppliers: List[Tuple[SupplierOption, float, Dict[str, float]]]
|
||||
) -> List[SupplierAllocation]:
|
||||
"""
|
||||
Split across multiple suppliers for large orders.
|
||||
|
||||
Args:
|
||||
required_quantity: Quantity needed
|
||||
scored_suppliers: Scored suppliers
|
||||
|
||||
Returns:
|
||||
List with multiple allocations
|
||||
"""
|
||||
if len(scored_suppliers) < 3:
|
||||
return self._select_dual_source(required_quantity, scored_suppliers)
|
||||
|
||||
# Use top 3 suppliers
|
||||
top_3 = scored_suppliers[:3]
|
||||
|
||||
# Allocate proportionally to scores
|
||||
total_score = sum(score for _, score, _ in top_3)
|
||||
|
||||
allocations = []
|
||||
remaining_qty = required_quantity
|
||||
|
||||
for i, (supplier, score, score_breakdown) in enumerate(top_3):
|
||||
if i == len(top_3) - 1:
|
||||
# Last supplier gets remainder
|
||||
allocated_qty = remaining_qty
|
||||
else:
|
||||
# Allocate based on score proportion
|
||||
proportion = score / total_score
|
||||
allocated_qty = required_quantity * Decimal(str(proportion))
|
||||
|
||||
# Check capacity
|
||||
if supplier.max_capacity and allocated_qty > supplier.max_capacity:
|
||||
allocated_qty = supplier.max_capacity
|
||||
|
||||
allocation_percentage = float(allocated_qty / required_quantity)
|
||||
|
||||
allocation = SupplierAllocation(
|
||||
supplier_id=supplier.supplier_id,
|
||||
supplier_name=supplier.supplier_name,
|
||||
allocated_quantity=allocated_qty,
|
||||
allocation_percentage=allocation_percentage,
|
||||
allocation_type='diversification',
|
||||
unit_price=supplier.unit_price,
|
||||
total_cost=supplier.unit_price * allocated_qty,
|
||||
lead_time_days=supplier.lead_time_days,
|
||||
supplier_score=score,
|
||||
score_breakdown=score_breakdown,
|
||||
allocation_reason=f'Multi-source diversification ({allocation_percentage*100:.0f}%)'
|
||||
)
|
||||
|
||||
allocations.append(allocation)
|
||||
remaining_qty -= allocated_qty
|
||||
|
||||
if remaining_qty <= 0:
|
||||
break
|
||||
|
||||
return allocations
|
||||
|
||||
def _calculate_risk_score(
|
||||
self,
|
||||
allocations: List[SupplierAllocation]
|
||||
) -> float:
|
||||
"""
|
||||
Calculate overall risk score (lower is better).
|
||||
|
||||
Args:
|
||||
allocations: List of allocations
|
||||
|
||||
Returns:
|
||||
Risk score (0-1)
|
||||
"""
|
||||
if not allocations:
|
||||
return 1.0
|
||||
|
||||
# Single source = higher risk
|
||||
diversification_risk = 1.0 / len(allocations)
|
||||
|
||||
# Concentration risk (how much in single supplier)
|
||||
max_allocation = max(alloc.allocation_percentage for alloc in allocations)
|
||||
concentration_risk = max_allocation
|
||||
|
||||
# Reliability risk (average of supplier reliability)
|
||||
# Note: We don't have reliability in SupplierAllocation, estimate from score
|
||||
avg_supplier_score = sum(alloc.supplier_score for alloc in allocations) / len(allocations)
|
||||
reliability_risk = 1.0 - avg_supplier_score
|
||||
|
||||
# Combined risk (weighted)
|
||||
risk_score = (
|
||||
0.4 * diversification_risk +
|
||||
0.3 * concentration_risk +
|
||||
0.3 * reliability_risk
|
||||
)
|
||||
|
||||
return risk_score
|
||||
|
||||
def export_result_to_dict(self, result: SupplierSelectionResult) -> Dict:
|
||||
"""
|
||||
Export result to dictionary for API response.
|
||||
|
||||
Args:
|
||||
result: Supplier selection result
|
||||
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
return {
|
||||
'ingredient_id': result.ingredient_id,
|
||||
'ingredient_name': result.ingredient_name,
|
||||
'required_quantity': float(result.required_quantity),
|
||||
'total_cost': float(result.total_cost),
|
||||
'weighted_lead_time': result.weighted_lead_time,
|
||||
'risk_score': result.risk_score,
|
||||
'diversification_applied': result.diversification_applied,
|
||||
'selection_strategy': result.selection_strategy,
|
||||
'allocations': [
|
||||
{
|
||||
'supplier_id': alloc.supplier_id,
|
||||
'supplier_name': alloc.supplier_name,
|
||||
'allocated_quantity': float(alloc.allocated_quantity),
|
||||
'allocation_percentage': alloc.allocation_percentage,
|
||||
'allocation_type': alloc.allocation_type,
|
||||
'unit_price': float(alloc.unit_price),
|
||||
'total_cost': float(alloc.total_cost),
|
||||
'lead_time_days': alloc.lead_time_days,
|
||||
'supplier_score': alloc.supplier_score,
|
||||
'score_breakdown': alloc.score_breakdown,
|
||||
'allocation_reason': alloc.allocation_reason
|
||||
}
|
||||
for alloc in result.allocations
|
||||
]
|
||||
}
|
||||
9
services/procurement/app/utils/__init__.py
Normal file
9
services/procurement/app/utils/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# services/procurement/app/utils/__init__.py
|
||||
"""
|
||||
Utility modules for procurement service
|
||||
"""
|
||||
|
||||
# Note: Redis utilities are now provided by shared.redis_utils
|
||||
# Import from shared.redis_utils instead of local cache module
|
||||
|
||||
__all__ = []
|
||||
150
services/procurement/migrations/env.py
Normal file
150
services/procurement/migrations/env.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Alembic environment configuration for procurement service"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Determine the project root (where the shared directory is located)
|
||||
current_file_dir = os.path.dirname(os.path.abspath(__file__)) # migrations directory
|
||||
service_dir = os.path.dirname(current_file_dir) # procurement service directory
|
||||
project_root = os.path.dirname(os.path.dirname(service_dir)) # project root
|
||||
|
||||
# Add project root to Python path first
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
# Add shared directory to Python path
|
||||
shared_path = os.path.join(project_root, "shared")
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# Add service directory to Python path
|
||||
if service_dir not in sys.path:
|
||||
sys.path.insert(0, service_dir)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # noqa: F401, F403
|
||||
from app.models.replenishment import * # noqa: F401, F403
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Determine service name from file path
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
# If DATABASE_URL is not set, construct from individual components
|
||||
if not database_url:
|
||||
# Try generic PostgreSQL environment variables first
|
||||
postgres_host = os.getenv('POSTGRES_HOST')
|
||||
postgres_port = os.getenv('POSTGRES_PORT', '5432')
|
||||
postgres_db = os.getenv('POSTGRES_DB')
|
||||
postgres_user = os.getenv('POSTGRES_USER')
|
||||
postgres_password = os.getenv('POSTGRES_PASSWORD')
|
||||
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
else:
|
||||
# Final fallback: try to get from settings object
|
||||
try:
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not database_url:
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Execute migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async support."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/procurement/migrations/script.py.mako
Normal file
26
services/procurement/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,617 @@
|
||||
"""unified initial procurement schema with all fields from all migrations
|
||||
|
||||
Revision ID: 001_unified_initial_schema
|
||||
Revises:
|
||||
Create Date: 2025-11-27 12:00:00.000000+00:00
|
||||
|
||||
Complete procurement service schema including:
|
||||
- Procurement plans and requirements
|
||||
- Purchase orders and items (with reasoning_data for i18n JTBD dashboard)
|
||||
- Deliveries and delivery items
|
||||
- Supplier invoices
|
||||
- Replenishment planning
|
||||
- Inventory projections
|
||||
- Supplier allocations and selection history
|
||||
- Audit logs
|
||||
- Internal transfer fields
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_unified_initial_schema'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create PostgreSQL enum types first
|
||||
# PurchaseOrderStatus enum
|
||||
purchaseorderstatus_enum = postgresql.ENUM(
|
||||
'draft', 'pending_approval', 'approved', 'sent_to_supplier',
|
||||
'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed',
|
||||
name='purchaseorderstatus',
|
||||
create_type=False
|
||||
)
|
||||
purchaseorderstatus_enum.create(op.get_bind(), checkfirst=True)
|
||||
|
||||
# DeliveryStatus enum
|
||||
deliverystatus_enum = postgresql.ENUM(
|
||||
'scheduled', 'in_transit', 'out_for_delivery', 'delivered',
|
||||
'partially_delivered', 'failed_delivery', 'returned',
|
||||
name='deliverystatus',
|
||||
create_type=False
|
||||
)
|
||||
deliverystatus_enum.create(op.get_bind(), checkfirst=True)
|
||||
|
||||
# InvoiceStatus enum
|
||||
invoicestatus_enum = postgresql.ENUM(
|
||||
'pending', 'approved', 'paid', 'overdue', 'disputed', 'cancelled',
|
||||
name='invoicestatus',
|
||||
create_type=False
|
||||
)
|
||||
invoicestatus_enum.create(op.get_bind(), checkfirst=True)
|
||||
|
||||
# ========================================================================
|
||||
# PROCUREMENT PLANNING TABLES
|
||||
# ========================================================================
|
||||
|
||||
# Create procurement_plans table
|
||||
op.create_table('procurement_plans',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('plan_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('plan_date', sa.Date(), nullable=False),
|
||||
sa.Column('plan_period_start', sa.Date(), nullable=False),
|
||||
sa.Column('plan_period_end', sa.Date(), nullable=False),
|
||||
sa.Column('planning_horizon_days', sa.Integer(), nullable=False, server_default='14'),
|
||||
sa.Column('status', sa.String(length=50), nullable=False, server_default='draft'),
|
||||
sa.Column('plan_type', sa.String(length=50), nullable=False, server_default='regular'),
|
||||
sa.Column('priority', sa.String(length=20), nullable=False, server_default='normal'),
|
||||
sa.Column('business_model', sa.String(length=50), nullable=True),
|
||||
sa.Column('procurement_strategy', sa.String(length=50), nullable=False, server_default='just_in_time'),
|
||||
sa.Column('total_requirements', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('total_estimated_cost', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('total_approved_cost', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('cost_variance', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('total_demand_orders', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('total_demand_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('total_production_requirements', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.00'),
|
||||
sa.Column('safety_stock_buffer', sa.Numeric(precision=5, scale=2), nullable=False, server_default='20.00'),
|
||||
sa.Column('primary_suppliers_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('backup_suppliers_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('supplier_diversification_score', sa.Numeric(precision=3, scale=1), nullable=True),
|
||||
sa.Column('supply_risk_level', sa.String(length=20), nullable=False, server_default='low'),
|
||||
sa.Column('demand_forecast_confidence', sa.Numeric(precision=3, scale=1), nullable=True),
|
||||
sa.Column('seasonality_adjustment', sa.Numeric(precision=5, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('approved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('execution_started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('execution_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('fulfillment_rate', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('on_time_delivery_rate', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('cost_accuracy', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('quality_score', sa.Numeric(precision=3, scale=1), nullable=True),
|
||||
sa.Column('source_orders', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('production_schedules', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('inventory_snapshots', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('forecast_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('stakeholder_notifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('approval_workflow', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('special_requirements', sa.Text(), nullable=True),
|
||||
sa.Column('seasonal_adjustments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('emergency_provisions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('erp_reference', sa.String(length=100), nullable=True),
|
||||
sa.Column('supplier_portal_reference', sa.String(length=100), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('updated_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('plan_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_procurement_plans_plan_date'), 'procurement_plans', ['plan_date'], unique=False)
|
||||
op.create_index(op.f('ix_procurement_plans_plan_number'), 'procurement_plans', ['plan_number'], unique=True)
|
||||
op.create_index(op.f('ix_procurement_plans_status'), 'procurement_plans', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_procurement_plans_tenant_id'), 'procurement_plans', ['tenant_id'], unique=False)
|
||||
|
||||
# Create procurement_requirements table
|
||||
op.create_table('procurement_requirements',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('plan_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('requirement_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('product_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('product_name', sa.String(length=200), nullable=False),
|
||||
sa.Column('product_sku', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_category', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_type', sa.String(length=50), nullable=False, server_default='ingredient'),
|
||||
sa.Column('is_locally_produced', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('parent_requirement_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('bom_explosion_level', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('required_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('unit_of_measure', sa.String(length=50), nullable=False),
|
||||
sa.Column('safety_stock_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('total_quantity_needed', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('current_stock_level', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.00'),
|
||||
sa.Column('reserved_stock', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.00'),
|
||||
sa.Column('available_stock', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('net_requirement', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('order_demand', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('production_demand', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.00'),
|
||||
sa.Column('forecast_demand', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.00'),
|
||||
sa.Column('buffer_demand', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.00'),
|
||||
sa.Column('preferred_supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('backup_supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('supplier_name', sa.String(length=200), nullable=True),
|
||||
sa.Column('supplier_lead_time_days', sa.Integer(), nullable=True),
|
||||
sa.Column('minimum_order_quantity', sa.Numeric(precision=12, scale=3), nullable=True),
|
||||
sa.Column('estimated_unit_cost', sa.Numeric(precision=10, scale=4), nullable=True),
|
||||
sa.Column('estimated_total_cost', sa.Numeric(precision=12, scale=2), nullable=True),
|
||||
sa.Column('last_purchase_cost', sa.Numeric(precision=10, scale=4), nullable=True),
|
||||
sa.Column('cost_variance', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('required_by_date', sa.Date(), nullable=False),
|
||||
sa.Column('lead_time_buffer_days', sa.Integer(), nullable=False, server_default='1'),
|
||||
sa.Column('suggested_order_date', sa.Date(), nullable=False),
|
||||
sa.Column('latest_order_date', sa.Date(), nullable=False),
|
||||
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=False, server_default='pending'),
|
||||
sa.Column('priority', sa.String(length=20), nullable=False, server_default='normal'),
|
||||
sa.Column('risk_level', sa.String(length=20), nullable=False, server_default='low'),
|
||||
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('purchase_order_number', sa.String(length=50), nullable=True),
|
||||
sa.Column('ordered_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('ordered_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expected_delivery_date', sa.Date(), nullable=True),
|
||||
sa.Column('actual_delivery_date', sa.Date(), nullable=True),
|
||||
sa.Column('received_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('delivery_status', sa.String(length=50), nullable=False, server_default='pending'),
|
||||
sa.Column('fulfillment_rate', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('on_time_delivery', sa.Boolean(), nullable=True),
|
||||
sa.Column('quality_rating', sa.Numeric(precision=3, scale=1), nullable=True),
|
||||
sa.Column('source_orders', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('source_production_batches', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('demand_analysis', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('quality_specifications', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('special_requirements', sa.Text(), nullable=True),
|
||||
sa.Column('storage_requirements', sa.String(length=200), nullable=True),
|
||||
sa.Column('calculation_method', sa.String(length=100), nullable=True),
|
||||
sa.Column('ai_suggested_quantity', sa.Numeric(precision=12, scale=3), nullable=True),
|
||||
sa.Column('adjusted_quantity', sa.Numeric(precision=12, scale=3), nullable=True),
|
||||
sa.Column('adjustment_reason', sa.Text(), nullable=True),
|
||||
sa.Column('price_tier_applied', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('supplier_minimum_applied', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('storage_limit_applied', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('reorder_rule_applied', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('approved_quantity', sa.Numeric(precision=12, scale=3), nullable=True),
|
||||
sa.Column('approved_cost', sa.Numeric(precision=12, scale=2), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('approved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('procurement_notes', sa.Text(), nullable=True),
|
||||
sa.Column('supplier_communication', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.Column('requirement_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.ForeignKeyConstraint(['plan_id'], ['procurement_plans.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_procurement_requirements_plan_id'), 'procurement_requirements', ['plan_id'], unique=False)
|
||||
op.create_index(op.f('ix_procurement_requirements_product_id'), 'procurement_requirements', ['product_id'], unique=False)
|
||||
op.create_index(op.f('ix_procurement_requirements_requirement_number'), 'procurement_requirements', ['requirement_number'], unique=False)
|
||||
op.create_index(op.f('ix_procurement_requirements_status'), 'procurement_requirements', ['status'], unique=False)
|
||||
|
||||
# ========================================================================
|
||||
# PURCHASE ORDER TABLES
|
||||
# ========================================================================
|
||||
|
||||
# Create purchase_orders table (with reasoning_data for i18n and internal transfer fields)
|
||||
op.create_table('purchase_orders',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('po_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('reference_number', sa.String(length=100), nullable=True),
|
||||
sa.Column('procurement_plan_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('status', purchaseorderstatus_enum, nullable=False, server_default='draft'),
|
||||
sa.Column('priority', sa.String(length=20), nullable=False, server_default='normal'),
|
||||
sa.Column('order_date', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('required_delivery_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('estimated_delivery_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expected_delivery_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('subtotal', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('tax_amount', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('shipping_cost', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('currency', sa.String(length=3), nullable=False, server_default='EUR'),
|
||||
sa.Column('delivery_address', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_instructions', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_contact', sa.String(length=200), nullable=True),
|
||||
sa.Column('delivery_phone', sa.String(length=30), nullable=True),
|
||||
sa.Column('requires_approval', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('approved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('auto_approved', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('auto_approval_rule_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('sent_to_supplier_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('supplier_confirmation_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('supplier_reference', sa.String(length=100), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('internal_notes', sa.Text(), nullable=True),
|
||||
sa.Column('terms_and_conditions', sa.Text(), nullable=True),
|
||||
# JTBD Dashboard: Structured reasoning for i18n support
|
||||
sa.Column('reasoning_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
# Internal transfer fields
|
||||
sa.Column('is_internal', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('source_tenant_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('destination_tenant_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('transfer_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('updated_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['procurement_plan_id'], ['procurement_plans.id']),
|
||||
# Note: supplier_id references suppliers service - no FK constraint in microservices
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_purchase_orders_po_number'), 'purchase_orders', ['po_number'], unique=True)
|
||||
op.create_index(op.f('ix_purchase_orders_procurement_plan_id'), 'purchase_orders', ['procurement_plan_id'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_orders_status'), 'purchase_orders', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_orders_supplier_id'), 'purchase_orders', ['supplier_id'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_orders_tenant_id'), 'purchase_orders', ['tenant_id'], unique=False)
|
||||
op.create_index('ix_purchase_orders_tenant_status', 'purchase_orders', ['tenant_id', 'status'], unique=False)
|
||||
op.create_index('ix_purchase_orders_tenant_plan', 'purchase_orders', ['tenant_id', 'procurement_plan_id'], unique=False)
|
||||
op.create_index('ix_purchase_orders_order_date', 'purchase_orders', ['order_date'], unique=False)
|
||||
op.create_index('ix_purchase_orders_delivery_date', 'purchase_orders', ['required_delivery_date'], unique=False)
|
||||
# Internal transfer indexes
|
||||
op.create_index('ix_purchase_orders_is_internal', 'purchase_orders', ['is_internal'])
|
||||
op.create_index('ix_purchase_orders_source_tenant', 'purchase_orders', ['source_tenant_id'])
|
||||
op.create_index('ix_purchase_orders_destination_tenant', 'purchase_orders', ['destination_tenant_id'])
|
||||
op.create_index('ix_po_internal_transfers', 'purchase_orders', ['tenant_id', 'is_internal', 'source_tenant_id'])
|
||||
|
||||
# Create purchase_order_items table (with supplier_price_list_id)
|
||||
op.create_table('purchase_order_items',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('procurement_requirement_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('inventory_product_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_price_list_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('product_code', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_name', sa.String(length=200), nullable=False),
|
||||
sa.Column('ordered_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('unit_of_measure', sa.String(length=20), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(precision=10, scale=4), nullable=False),
|
||||
sa.Column('line_total', sa.Numeric(precision=12, scale=2), nullable=False),
|
||||
sa.Column('received_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('remaining_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('quality_requirements', sa.Text(), nullable=True),
|
||||
sa.Column('item_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['procurement_requirement_id'], ['procurement_requirements.id']),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_purchase_order_items_inventory_product_id'), 'purchase_order_items', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_order_items_procurement_requirement_id'), 'purchase_order_items', ['procurement_requirement_id'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_order_items_purchase_order_id'), 'purchase_order_items', ['purchase_order_id'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_order_items_supplier_price_list_id'), 'purchase_order_items', ['supplier_price_list_id'], unique=False)
|
||||
op.create_index(op.f('ix_purchase_order_items_tenant_id'), 'purchase_order_items', ['tenant_id'], unique=False)
|
||||
op.create_index('ix_po_items_tenant_po', 'purchase_order_items', ['tenant_id', 'purchase_order_id'], unique=False)
|
||||
op.create_index('ix_po_items_inventory_product', 'purchase_order_items', ['inventory_product_id'], unique=False)
|
||||
|
||||
# ========================================================================
|
||||
# DELIVERY TABLES
|
||||
# ========================================================================
|
||||
|
||||
# Create deliveries table
|
||||
op.create_table('deliveries',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('delivery_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('supplier_delivery_note', sa.String(length=10), nullable=True),
|
||||
sa.Column('status', deliverystatus_enum, nullable=False, server_default='scheduled'),
|
||||
sa.Column('scheduled_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('estimated_arrival', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_arrival', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('delivery_address', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_contact', sa.String(200), nullable=True),
|
||||
sa.Column('delivery_phone', sa.String(30), nullable=True),
|
||||
sa.Column('carrier_name', sa.String(200), nullable=True),
|
||||
sa.Column('tracking_number', sa.String(100), nullable=True),
|
||||
sa.Column('inspection_passed', sa.Boolean(), nullable=True),
|
||||
sa.Column('inspection_notes', sa.Text(), nullable=True),
|
||||
sa.Column('quality_issues', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('received_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('received_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('photos', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ondelete='CASCADE'),
|
||||
# ... Note: supplier_id references suppliers service - no FK constraint in microservices
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_deliveries_delivery_number'), 'deliveries', ['delivery_number'], unique=True)
|
||||
op.create_index(op.f('ix_deliveries_purchase_order_id'), 'deliveries', ['purchase_order_id'], unique=False)
|
||||
op.create_index(op.f('ix_deliveries_status'), 'deliveries', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_deliveries_supplier_id'), 'deliveries', ['supplier_id'], unique=False)
|
||||
op.create_index(op.f('ix_deliveries_tenant_id'), 'deliveries', ['tenant_id'], unique=False)
|
||||
op.create_index('ix_deliveries_scheduled_date', 'deliveries', ['scheduled_date'], unique=False)
|
||||
op.create_index('ix_deliveries_tenant_status', 'deliveries', ['tenant_id', 'status'], unique=False)
|
||||
|
||||
# Create delivery_items table
|
||||
op.create_table('delivery_items',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('delivery_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_item_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('inventory_product_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ordered_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('delivered_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('accepted_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
|
||||
sa.Column('rejected_quantity', sa.Numeric(precision=12, scale=3), nullable=False, server_default='0.000'),
|
||||
sa.Column('batch_lot_number', sa.String(length=100), nullable=True),
|
||||
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('quality_grade', sa.String(length=20), nullable=True),
|
||||
sa.Column('quality_issues', sa.Text(), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('item_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['purchase_order_item_id'], ['purchase_order_items.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_delivery_items_delivery_id'), 'delivery_items', ['delivery_id'], unique=False)
|
||||
op.create_index(op.f('ix_delivery_items_inventory_product_id'), 'delivery_items', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_delivery_items_purchase_order_item_id'), 'delivery_items', ['purchase_order_item_id'], unique=False)
|
||||
op.create_index(op.f('ix_delivery_items_tenant_id'), 'delivery_items', ['tenant_id'], unique=False)
|
||||
op.create_index('ix_delivery_items_tenant_delivery', 'delivery_items', ['tenant_id', 'delivery_id'], unique=False)
|
||||
op.create_index('ix_delivery_items_inventory_product', 'delivery_items', ['inventory_product_id'], unique=False)
|
||||
|
||||
# ========================================================================
|
||||
# INVOICE TABLES
|
||||
# ========================================================================
|
||||
|
||||
# Create supplier_invoices table
|
||||
op.create_table('supplier_invoices',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('invoice_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('supplier_invoice_number', sa.String(length=100), nullable=False),
|
||||
sa.Column('status', invoicestatus_enum, nullable=False, server_default='pending'),
|
||||
sa.Column('invoice_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('due_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('received_date', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('subtotal', sa.Numeric(precision=12, scale=2), nullable=False),
|
||||
sa.Column('tax_amount', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('shipping_cost', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False),
|
||||
sa.Column('currency', sa.String(length=3), nullable=False, server_default='EUR'),
|
||||
sa.Column('paid_amount', sa.Numeric(precision=12, scale=2), nullable=False, server_default='0.00'),
|
||||
sa.Column('payment_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('payment_reference', sa.String(length=100), nullable=True),
|
||||
sa.Column('approved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('invoice_document_url', sa.String(length=500), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'], ondelete='SET NULL'),
|
||||
# Note: supplier_id references suppliers service - no FK constraint in microservices
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_supplier_invoices_invoice_number'), 'supplier_invoices', ['invoice_number'], unique=True)
|
||||
op.create_index(op.f('ix_supplier_invoices_purchase_order_id'), 'supplier_invoices', ['purchase_order_id'], unique=False)
|
||||
op.create_index(op.f('ix_supplier_invoices_status'), 'supplier_invoices', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_supplier_invoices_supplier_id'), 'supplier_invoices', ['supplier_id'], unique=False)
|
||||
op.create_index(op.f('ix_supplier_invoices_tenant_id'), 'supplier_invoices', ['tenant_id'], unique=False)
|
||||
op.create_index('ix_invoices_due_date', 'supplier_invoices', ['due_date'], unique=False)
|
||||
op.create_index('ix_invoices_tenant_status', 'supplier_invoices', ['tenant_id', 'status'], unique=False)
|
||||
op.create_index('ix_invoices_tenant_supplier', 'supplier_invoices', ['tenant_id', 'supplier_id'], unique=False)
|
||||
|
||||
# ========================================================================
|
||||
# REPLENISHMENT PLANNING TABLES
|
||||
# ========================================================================
|
||||
|
||||
# Create replenishment_plans table
|
||||
op.create_table('replenishment_plans',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('planning_date', sa.Date(), nullable=False),
|
||||
sa.Column('projection_horizon_days', sa.Integer(), nullable=False, server_default='7'),
|
||||
sa.Column('forecast_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('production_schedule_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('total_items', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('urgent_items', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('high_risk_items', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('total_estimated_cost', sa.Numeric(12, 2), nullable=False, server_default='0'),
|
||||
sa.Column('status', sa.String(50), nullable=False, server_default='draft'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('executed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_replenishment_plans_tenant_id', 'replenishment_plans', ['tenant_id'])
|
||||
op.create_index('ix_replenishment_plans_planning_date', 'replenishment_plans', ['planning_date'])
|
||||
op.create_index('ix_replenishment_plans_status', 'replenishment_plans', ['status'])
|
||||
|
||||
# Create replenishment_plan_items table
|
||||
op.create_table('replenishment_plan_items',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('replenishment_plan_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_name', sa.String(200), nullable=False),
|
||||
sa.Column('unit_of_measure', sa.String(20), nullable=False),
|
||||
sa.Column('base_quantity', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('safety_stock_quantity', sa.Numeric(12, 3), nullable=False, server_default='0'),
|
||||
sa.Column('shelf_life_adjusted_quantity', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('final_order_quantity', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('order_date', sa.Date(), nullable=False),
|
||||
sa.Column('delivery_date', sa.Date(), nullable=False),
|
||||
sa.Column('required_by_date', sa.Date(), nullable=False),
|
||||
sa.Column('lead_time_days', sa.Integer(), nullable=False),
|
||||
sa.Column('is_urgent', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('urgency_reason', sa.Text(), nullable=True),
|
||||
sa.Column('waste_risk', sa.String(20), nullable=False, server_default='low'),
|
||||
sa.Column('stockout_risk', sa.String(20), nullable=False, server_default='low'),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('safety_stock_calculation', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('shelf_life_adjustment', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('inventory_projection', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['replenishment_plan_id'], ['replenishment_plans.id'], ondelete='CASCADE')
|
||||
)
|
||||
op.create_index('ix_replenishment_plan_items_plan_id', 'replenishment_plan_items', ['replenishment_plan_id'])
|
||||
op.create_index('ix_replenishment_plan_items_ingredient_id', 'replenishment_plan_items', ['ingredient_id'])
|
||||
op.create_index('ix_replenishment_plan_items_order_date', 'replenishment_plan_items', ['order_date'])
|
||||
op.create_index('ix_replenishment_plan_items_is_urgent', 'replenishment_plan_items', ['is_urgent'])
|
||||
|
||||
# Create inventory_projections table
|
||||
op.create_table('inventory_projections',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_name', sa.String(200), nullable=False),
|
||||
sa.Column('projection_date', sa.Date(), nullable=False),
|
||||
sa.Column('starting_stock', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('forecasted_consumption', sa.Numeric(12, 3), nullable=False, server_default='0'),
|
||||
sa.Column('scheduled_receipts', sa.Numeric(12, 3), nullable=False, server_default='0'),
|
||||
sa.Column('projected_ending_stock', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('is_stockout', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('coverage_gap', sa.Numeric(12, 3), nullable=False, server_default='0'),
|
||||
sa.Column('replenishment_plan_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_inventory_projections_tenant_id', 'inventory_projections', ['tenant_id'])
|
||||
op.create_index('ix_inventory_projections_ingredient_id', 'inventory_projections', ['ingredient_id'])
|
||||
op.create_index('ix_inventory_projections_projection_date', 'inventory_projections', ['projection_date'])
|
||||
op.create_index('ix_inventory_projections_is_stockout', 'inventory_projections', ['is_stockout'])
|
||||
op.create_index('ix_inventory_projections_unique', 'inventory_projections', ['tenant_id', 'ingredient_id', 'projection_date'], unique=True)
|
||||
|
||||
# Create supplier_allocations table
|
||||
op.create_table('supplier_allocations',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('replenishment_plan_item_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('requirement_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_name', sa.String(200), nullable=False),
|
||||
sa.Column('allocation_type', sa.String(20), nullable=False),
|
||||
sa.Column('allocated_quantity', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('allocation_percentage', sa.Numeric(5, 4), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('total_cost', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('lead_time_days', sa.Integer(), nullable=False),
|
||||
sa.Column('supplier_score', sa.Numeric(5, 2), nullable=False),
|
||||
sa.Column('score_breakdown', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('allocation_reason', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['replenishment_plan_item_id'], ['replenishment_plan_items.id'], ondelete='CASCADE')
|
||||
)
|
||||
op.create_index('ix_supplier_allocations_plan_item_id', 'supplier_allocations', ['replenishment_plan_item_id'])
|
||||
op.create_index('ix_supplier_allocations_requirement_id', 'supplier_allocations', ['requirement_id'])
|
||||
op.create_index('ix_supplier_allocations_supplier_id', 'supplier_allocations', ['supplier_id'])
|
||||
|
||||
# Create supplier_selection_history table
|
||||
op.create_table('supplier_selection_history',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_name', sa.String(200), nullable=False),
|
||||
sa.Column('selected_supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('selected_supplier_name', sa.String(200), nullable=False),
|
||||
sa.Column('selection_date', sa.Date(), nullable=False),
|
||||
sa.Column('quantity', sa.Numeric(12, 3), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('total_cost', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('lead_time_days', sa.Integer(), nullable=False),
|
||||
sa.Column('quality_score', sa.Numeric(5, 2), nullable=True),
|
||||
sa.Column('delivery_performance', sa.Numeric(5, 2), nullable=True),
|
||||
sa.Column('selection_strategy', sa.String(50), nullable=False),
|
||||
sa.Column('was_primary_choice', sa.Boolean(), nullable=False, server_default='true'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_supplier_selection_history_tenant_id', 'supplier_selection_history', ['tenant_id'])
|
||||
op.create_index('ix_supplier_selection_history_ingredient_id', 'supplier_selection_history', ['ingredient_id'])
|
||||
op.create_index('ix_supplier_selection_history_supplier_id', 'supplier_selection_history', ['selected_supplier_id'])
|
||||
op.create_index('ix_supplier_selection_history_selection_date', 'supplier_selection_history', ['selection_date'])
|
||||
|
||||
# ========================================================================
|
||||
# AUDIT LOG TABLE
|
||||
# ========================================================================
|
||||
|
||||
# Create audit_logs table
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop tables in reverse order of creation
|
||||
op.drop_table('audit_logs')
|
||||
op.drop_table('supplier_selection_history')
|
||||
op.drop_table('supplier_allocations')
|
||||
op.drop_table('inventory_projections')
|
||||
op.drop_table('replenishment_plan_items')
|
||||
op.drop_table('replenishment_plans')
|
||||
op.drop_table('supplier_invoices')
|
||||
op.drop_table('delivery_items')
|
||||
op.drop_table('deliveries')
|
||||
op.drop_table('purchase_order_items')
|
||||
op.drop_table('purchase_orders')
|
||||
op.drop_table('procurement_requirements')
|
||||
op.drop_table('procurement_plans')
|
||||
|
||||
# Drop enum types
|
||||
op.execute("DROP TYPE IF EXISTS purchaseorderstatus")
|
||||
op.execute("DROP TYPE IF EXISTS deliverystatus")
|
||||
op.execute("DROP TYPE IF EXISTS invoicestatus")
|
||||
58
services/procurement/requirements.txt
Normal file
58
services/procurement/requirements.txt
Normal file
@@ -0,0 +1,58 @@
|
||||
# Procurement Service Dependencies
|
||||
# FastAPI and web framework
|
||||
fastapi==0.119.0
|
||||
uvicorn[standard]==0.32.1
|
||||
pydantic==2.12.3
|
||||
pydantic-settings==2.7.1
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.44
|
||||
asyncpg==0.30.0
|
||||
alembic==1.17.0
|
||||
psycopg2-binary==2.9.10
|
||||
|
||||
# HTTP clients
|
||||
httpx==0.28.1
|
||||
|
||||
# Redis for caching
|
||||
redis==6.4.0
|
||||
|
||||
# Message queuing
|
||||
aio-pika==9.4.3
|
||||
|
||||
# Scheduling
|
||||
APScheduler==3.10.4
|
||||
|
||||
# Logging and monitoring
|
||||
structlog==25.4.0
|
||||
psutil==5.9.8
|
||||
opentelemetry-api==1.39.1
|
||||
opentelemetry-sdk==1.39.1
|
||||
opentelemetry-instrumentation-fastapi==0.60b1
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.39.1
|
||||
opentelemetry-exporter-otlp-proto-http==1.39.1
|
||||
opentelemetry-instrumentation-httpx==0.60b1
|
||||
opentelemetry-instrumentation-redis==0.60b1
|
||||
opentelemetry-instrumentation-sqlalchemy==0.60b1
|
||||
|
||||
# Date and time utilities
|
||||
python-dateutil==2.9.0.post0
|
||||
pytz==2024.2
|
||||
|
||||
# Data processing for ML insights
|
||||
pandas==2.2.3
|
||||
numpy==2.2.1
|
||||
scikit-learn==1.6.1
|
||||
scipy==1.15.1
|
||||
|
||||
# Validation and utilities
|
||||
email-validator==2.2.0
|
||||
|
||||
# Authentication
|
||||
python-jose[cryptography]==3.3.0
|
||||
cryptography==44.0.0
|
||||
|
||||
# Development dependencies
|
||||
python-multipart==0.0.6
|
||||
pytest==8.3.4
|
||||
pytest-asyncio==0.25.2
|
||||
@@ -0,0 +1,481 @@
|
||||
"""
|
||||
Tests for Supplier Performance Predictor
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from datetime import datetime, timedelta
|
||||
from app.ml.supplier_performance_predictor import SupplierPerformancePredictor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_order_history_good_supplier():
|
||||
"""Generate sample order history for a reliable supplier."""
|
||||
dates = pd.date_range(start='2024-01-01', end='2024-12-31', freq='W')
|
||||
|
||||
orders = []
|
||||
for i, date in enumerate(dates):
|
||||
expected_delivery = date + timedelta(days=3)
|
||||
|
||||
# Good supplier: 95% on-time, occasional 1-day delay
|
||||
if np.random.random() < 0.95:
|
||||
actual_delivery = expected_delivery
|
||||
else:
|
||||
actual_delivery = expected_delivery + timedelta(days=1)
|
||||
|
||||
# Good quality: 98% no issues
|
||||
quality_issues = np.random.random() > 0.98
|
||||
quality_score = np.random.uniform(90, 100) if not quality_issues else np.random.uniform(70, 85)
|
||||
|
||||
# Good quantity accuracy: 99% accurate
|
||||
quantity_accuracy = np.random.uniform(0.98, 1.02)
|
||||
|
||||
orders.append({
|
||||
'order_id': f'order-{i}',
|
||||
'order_date': date,
|
||||
'expected_delivery_date': expected_delivery,
|
||||
'actual_delivery_date': actual_delivery,
|
||||
'order_quantity': 100,
|
||||
'received_quantity': int(100 * quantity_accuracy),
|
||||
'quality_issues': quality_issues,
|
||||
'quality_score': quality_score,
|
||||
'order_value': 500.0
|
||||
})
|
||||
|
||||
return pd.DataFrame(orders)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_order_history_poor_supplier():
|
||||
"""Generate sample order history for an unreliable supplier."""
|
||||
dates = pd.date_range(start='2024-01-01', end='2024-12-31', freq='W')
|
||||
|
||||
orders = []
|
||||
for i, date in enumerate(dates):
|
||||
expected_delivery = date + timedelta(days=3)
|
||||
|
||||
# Poor supplier: 60% on-time, frequent delays of 2-5 days
|
||||
if np.random.random() < 0.60:
|
||||
actual_delivery = expected_delivery
|
||||
else:
|
||||
actual_delivery = expected_delivery + timedelta(days=np.random.randint(2, 6))
|
||||
|
||||
# Poor quality: 20% issues
|
||||
quality_issues = np.random.random() > 0.80
|
||||
quality_score = np.random.uniform(85, 100) if not quality_issues else np.random.uniform(50, 75)
|
||||
|
||||
# Poor quantity accuracy: frequent short deliveries
|
||||
if np.random.random() < 0.25:
|
||||
quantity_accuracy = np.random.uniform(0.75, 0.95) # Short delivery
|
||||
else:
|
||||
quantity_accuracy = np.random.uniform(0.95, 1.05)
|
||||
|
||||
orders.append({
|
||||
'order_id': f'order-{i}',
|
||||
'order_date': date,
|
||||
'expected_delivery_date': expected_delivery,
|
||||
'actual_delivery_date': actual_delivery,
|
||||
'order_quantity': 100,
|
||||
'received_quantity': int(100 * quantity_accuracy),
|
||||
'quality_issues': quality_issues,
|
||||
'quality_score': quality_score,
|
||||
'order_value': 500.0
|
||||
})
|
||||
|
||||
return pd.DataFrame(orders)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analyze_good_supplier(sample_order_history_good_supplier):
|
||||
"""Test analysis of a reliable supplier."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='good-supplier',
|
||||
order_history=sample_order_history_good_supplier,
|
||||
min_orders=10
|
||||
)
|
||||
|
||||
# Check structure
|
||||
assert 'tenant_id' in results
|
||||
assert 'supplier_id' in results
|
||||
assert 'reliability_score' in results
|
||||
assert 'metrics' in results
|
||||
assert 'predictions' in results
|
||||
assert 'risk_assessment' in results
|
||||
assert 'insights' in results
|
||||
|
||||
# Check metrics calculated
|
||||
metrics = results['metrics']
|
||||
assert metrics['total_orders'] == len(sample_order_history_good_supplier)
|
||||
assert 'on_time_rate' in metrics
|
||||
assert 'quality_issue_rate' in metrics
|
||||
assert 'avg_quantity_accuracy' in metrics
|
||||
|
||||
# Good supplier should have high reliability score
|
||||
reliability_score = results['reliability_score']
|
||||
assert reliability_score >= 85, f"Expected high reliability, got {reliability_score}"
|
||||
|
||||
# Risk should be low
|
||||
risk_assessment = results['risk_assessment']
|
||||
assert risk_assessment['risk_level'] in ['low', 'medium']
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analyze_poor_supplier(sample_order_history_poor_supplier):
|
||||
"""Test analysis of an unreliable supplier."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='poor-supplier',
|
||||
order_history=sample_order_history_poor_supplier,
|
||||
min_orders=10
|
||||
)
|
||||
|
||||
# Poor supplier should have low reliability score
|
||||
reliability_score = results['reliability_score']
|
||||
assert reliability_score < 75, f"Expected low reliability, got {reliability_score}"
|
||||
|
||||
# Risk should be high or critical
|
||||
risk_assessment = results['risk_assessment']
|
||||
assert risk_assessment['risk_level'] in ['medium', 'high', 'critical']
|
||||
|
||||
# Should have risk factors
|
||||
assert len(risk_assessment['risk_factors']) > 0
|
||||
|
||||
# Should generate insights
|
||||
insights = results['insights']
|
||||
assert len(insights) > 0
|
||||
|
||||
# Should have at least one alert or prediction
|
||||
alert_insights = [i for i in insights if i['type'] in ['alert', 'prediction']]
|
||||
assert len(alert_insights) > 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_performance_metrics_calculation(sample_order_history_good_supplier):
|
||||
"""Test detailed metrics calculation."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='test-supplier',
|
||||
order_history=sample_order_history_good_supplier
|
||||
)
|
||||
|
||||
metrics = results['metrics']
|
||||
|
||||
# Check all key metrics present
|
||||
required_metrics = [
|
||||
'total_orders',
|
||||
'on_time_orders',
|
||||
'delayed_orders',
|
||||
'on_time_rate',
|
||||
'avg_delivery_delay_days',
|
||||
'avg_quantity_accuracy',
|
||||
'short_deliveries',
|
||||
'short_delivery_rate',
|
||||
'quality_issues',
|
||||
'quality_issue_rate',
|
||||
'avg_quality_score',
|
||||
'delivery_consistency',
|
||||
'quantity_consistency'
|
||||
]
|
||||
|
||||
for metric in required_metrics:
|
||||
assert metric in metrics, f"Missing metric: {metric}"
|
||||
|
||||
# Check metrics are reasonable
|
||||
assert 0 <= metrics['on_time_rate'] <= 100
|
||||
assert 0 <= metrics['avg_quantity_accuracy'] <= 200 # Allow up to 200% over-delivery
|
||||
assert 0 <= metrics['quality_issue_rate'] <= 100
|
||||
assert 0 <= metrics['avg_quality_score'] <= 100
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reliability_score_calculation():
|
||||
"""Test reliability score calculation with known inputs."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
# Perfect metrics
|
||||
perfect_metrics = {
|
||||
'on_time_rate': 100.0,
|
||||
'avg_quantity_accuracy': 100.0,
|
||||
'avg_quality_score': 100.0,
|
||||
'delivery_consistency': 100.0,
|
||||
'quantity_consistency': 100.0,
|
||||
'quality_issue_rate': 0.0,
|
||||
'short_delivery_rate': 0.0
|
||||
}
|
||||
|
||||
perfect_score = predictor._calculate_reliability_score(perfect_metrics)
|
||||
assert perfect_score >= 95, f"Expected perfect score ~100, got {perfect_score}"
|
||||
|
||||
# Poor metrics
|
||||
poor_metrics = {
|
||||
'on_time_rate': 50.0,
|
||||
'avg_quantity_accuracy': 85.0,
|
||||
'avg_quality_score': 70.0,
|
||||
'delivery_consistency': 50.0,
|
||||
'quantity_consistency': 60.0,
|
||||
'quality_issue_rate': 20.0, # Should apply penalty
|
||||
'short_delivery_rate': 25.0 # Should apply penalty
|
||||
}
|
||||
|
||||
poor_score = predictor._calculate_reliability_score(poor_metrics)
|
||||
assert poor_score < 70, f"Expected poor score <70, got {poor_score}"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delay_probability_prediction(sample_order_history_poor_supplier):
|
||||
"""Test delay probability prediction."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='test-supplier',
|
||||
order_history=sample_order_history_poor_supplier
|
||||
)
|
||||
|
||||
predictions = results['predictions']
|
||||
|
||||
# Should have delay probability
|
||||
assert 'next_order_delay_probability' in predictions
|
||||
assert 0 <= predictions['next_order_delay_probability'] <= 1.0
|
||||
|
||||
# Poor supplier should have higher delay probability
|
||||
assert predictions['next_order_delay_probability'] > 0.3
|
||||
|
||||
# Should have confidence score
|
||||
assert 'confidence' in predictions
|
||||
assert 0 <= predictions['confidence'] <= 100
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_risk_assessment(sample_order_history_poor_supplier):
|
||||
"""Test procurement risk assessment."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='test-supplier',
|
||||
order_history=sample_order_history_poor_supplier
|
||||
)
|
||||
|
||||
risk_assessment = results['risk_assessment']
|
||||
|
||||
# Check structure
|
||||
assert 'risk_level' in risk_assessment
|
||||
assert 'risk_score' in risk_assessment
|
||||
assert 'risk_factors' in risk_assessment
|
||||
assert 'recommendation' in risk_assessment
|
||||
|
||||
# Risk level should be valid
|
||||
assert risk_assessment['risk_level'] in ['low', 'medium', 'high', 'critical']
|
||||
|
||||
# Risk score should be 0-100
|
||||
assert 0 <= risk_assessment['risk_score'] <= 100
|
||||
|
||||
# Should have risk factors for poor supplier
|
||||
assert len(risk_assessment['risk_factors']) > 0
|
||||
|
||||
# Recommendation should be string
|
||||
assert isinstance(risk_assessment['recommendation'], str)
|
||||
assert len(risk_assessment['recommendation']) > 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_insight_generation_low_reliability(sample_order_history_poor_supplier):
|
||||
"""Test insight generation for low reliability supplier."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='poor-supplier',
|
||||
order_history=sample_order_history_poor_supplier
|
||||
)
|
||||
|
||||
insights = results['insights']
|
||||
|
||||
# Should generate insights
|
||||
assert len(insights) > 0
|
||||
|
||||
# Check for low reliability alert
|
||||
reliability_insights = [i for i in insights
|
||||
if 'reliability' in i.get('title', '').lower()]
|
||||
|
||||
if reliability_insights:
|
||||
insight = reliability_insights[0]
|
||||
assert insight['type'] in ['alert', 'recommendation']
|
||||
assert insight['priority'] in ['high', 'critical']
|
||||
assert 'actionable' in insight
|
||||
assert insight['actionable'] is True
|
||||
assert 'recommendation_actions' in insight
|
||||
assert len(insight['recommendation_actions']) > 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_insight_generation_high_delay_risk(sample_order_history_poor_supplier):
|
||||
"""Test insight generation for high delay probability."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='poor-supplier',
|
||||
order_history=sample_order_history_poor_supplier
|
||||
)
|
||||
|
||||
insights = results['insights']
|
||||
|
||||
# Check for delay risk prediction
|
||||
delay_insights = [i for i in insights
|
||||
if 'delay' in i.get('title', '').lower()]
|
||||
|
||||
if delay_insights:
|
||||
insight = delay_insights[0]
|
||||
assert 'confidence' in insight
|
||||
assert 'metrics_json' in insight
|
||||
assert 'recommendation_actions' in insight
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_insight_generation_excellent_supplier(sample_order_history_good_supplier):
|
||||
"""Test that excellent suppliers get positive insights."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='excellent-supplier',
|
||||
order_history=sample_order_history_good_supplier
|
||||
)
|
||||
|
||||
insights = results['insights']
|
||||
|
||||
# Should have positive insight for excellent performance
|
||||
positive_insights = [i for i in insights
|
||||
if 'excellent' in i.get('title', '').lower()]
|
||||
|
||||
if positive_insights:
|
||||
insight = positive_insights[0]
|
||||
assert insight['type'] == 'insight'
|
||||
assert insight['impact_type'] == 'positive_performance'
|
||||
|
||||
|
||||
def test_compare_suppliers():
|
||||
"""Test supplier comparison functionality."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
# Mock analysis results
|
||||
suppliers_analysis = [
|
||||
{
|
||||
'supplier_id': 'supplier-1',
|
||||
'reliability_score': 95,
|
||||
'risk_assessment': {'risk_level': 'low', 'risk_score': 10}
|
||||
},
|
||||
{
|
||||
'supplier_id': 'supplier-2',
|
||||
'reliability_score': 60,
|
||||
'risk_assessment': {'risk_level': 'high', 'risk_score': 75}
|
||||
},
|
||||
{
|
||||
'supplier_id': 'supplier-3',
|
||||
'reliability_score': 80,
|
||||
'risk_assessment': {'risk_level': 'medium', 'risk_score': 40}
|
||||
}
|
||||
]
|
||||
|
||||
comparison = predictor.compare_suppliers(suppliers_analysis)
|
||||
|
||||
# Check structure
|
||||
assert 'suppliers_compared' in comparison
|
||||
assert 'top_supplier' in comparison
|
||||
assert 'top_supplier_score' in comparison
|
||||
assert 'bottom_supplier' in comparison
|
||||
assert 'bottom_supplier_score' in comparison
|
||||
assert 'ranked_suppliers' in comparison
|
||||
assert 'recommendations' in comparison
|
||||
|
||||
# Check ranking
|
||||
assert comparison['suppliers_compared'] == 3
|
||||
assert comparison['top_supplier'] == 'supplier-1'
|
||||
assert comparison['top_supplier_score'] == 95
|
||||
assert comparison['bottom_supplier'] == 'supplier-2'
|
||||
assert comparison['bottom_supplier_score'] == 60
|
||||
|
||||
# Ranked suppliers should be in order
|
||||
ranked = comparison['ranked_suppliers']
|
||||
assert ranked[0]['supplier_id'] == 'supplier-1'
|
||||
assert ranked[-1]['supplier_id'] == 'supplier-2'
|
||||
|
||||
# Should have recommendations
|
||||
assert len(comparison['recommendations']) > 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_insufficient_data_handling():
|
||||
"""Test handling of insufficient order history."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
# Only 5 orders (less than min_orders=10)
|
||||
small_history = pd.DataFrame([
|
||||
{
|
||||
'order_date': datetime(2024, 1, i),
|
||||
'expected_delivery_date': datetime(2024, 1, i+3),
|
||||
'actual_delivery_date': datetime(2024, 1, i+3),
|
||||
'order_quantity': 100,
|
||||
'received_quantity': 100,
|
||||
'quality_issues': False,
|
||||
'quality_score': 95.0,
|
||||
'order_value': 500.0
|
||||
}
|
||||
for i in range(1, 6)
|
||||
])
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='new-supplier',
|
||||
order_history=small_history,
|
||||
min_orders=10
|
||||
)
|
||||
|
||||
# Should return insufficient data response
|
||||
assert results['orders_analyzed'] == 0
|
||||
assert results['reliability_score'] is None
|
||||
assert results['risk_assessment']['risk_level'] == 'unknown'
|
||||
assert 'Insufficient' in results['risk_assessment']['risk_factors'][0]
|
||||
|
||||
|
||||
def test_get_supplier_reliability_score():
|
||||
"""Test getting cached reliability scores."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
# Initially no score
|
||||
assert predictor.get_supplier_reliability_score('supplier-1') is None
|
||||
|
||||
# Set a score
|
||||
predictor.reliability_scores['supplier-1'] = 85
|
||||
|
||||
# Should retrieve it
|
||||
assert predictor.get_supplier_reliability_score('supplier-1') == 85
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_metrics_no_nan_values(sample_order_history_good_supplier):
|
||||
"""Test that metrics never contain NaN values."""
|
||||
predictor = SupplierPerformancePredictor()
|
||||
|
||||
results = await predictor.analyze_supplier_performance(
|
||||
tenant_id='test-tenant',
|
||||
supplier_id='test-supplier',
|
||||
order_history=sample_order_history_good_supplier
|
||||
)
|
||||
|
||||
metrics = results['metrics']
|
||||
|
||||
# Check no NaN values
|
||||
for key, value in metrics.items():
|
||||
if isinstance(value, float):
|
||||
assert not np.isnan(value), f"Metric {key} is NaN"
|
||||
Reference in New Issue
Block a user