Initial commit - production deployment
This commit is contained in:
11
services/procurement/app/api/__init__.py
Normal file
11
services/procurement/app/api/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Procurement Service API"""
|
||||
|
||||
from .procurement_plans import router as procurement_plans_router
|
||||
from .purchase_orders import router as purchase_orders_router
|
||||
from .replenishment import router as replenishment_router
|
||||
|
||||
__all__ = [
|
||||
"procurement_plans_router",
|
||||
"purchase_orders_router",
|
||||
"replenishment_router"
|
||||
]
|
||||
82
services/procurement/app/api/analytics.py
Normal file
82
services/procurement/app/api/analytics.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# services/procurement/app/api/analytics.py
|
||||
"""
|
||||
Procurement Analytics API - Reporting, statistics, and insights
|
||||
Professional+ tier subscription required
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.services.procurement_service import ProcurementService
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["procurement-analytics"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_procurement_service(db: AsyncSession = Depends(get_db)) -> ProcurementService:
|
||||
"""Dependency injection for ProcurementService"""
|
||||
return ProcurementService(db, settings)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("procurement")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_procurement_analytics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
procurement_service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement analytics dashboard for a tenant (Professional+ tier required)"""
|
||||
try:
|
||||
# Call the service method to get actual analytics data
|
||||
analytics_data = await procurement_service.get_procurement_analytics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
logger.info("Retrieved procurement analytics", tenant_id=tenant_id)
|
||||
return analytics_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get procurement analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get procurement analytics: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("procurement/trends")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_procurement_trends(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days: int = Query(7, description="Number of days to retrieve trends for", ge=1, le=90),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
procurement_service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement time-series trends for charts (Professional+ tier required)"""
|
||||
try:
|
||||
# Call the service method to get trends data
|
||||
trends_data = await procurement_service.get_procurement_trends(
|
||||
tenant_id=tenant_id,
|
||||
days=days
|
||||
)
|
||||
|
||||
logger.info("Retrieved procurement trends", tenant_id=tenant_id, days=days)
|
||||
return trends_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get procurement trends", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get procurement trends: {str(e)}")
|
||||
202
services/procurement/app/api/expected_deliveries.py
Normal file
202
services/procurement/app/api/expected_deliveries.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Expected Deliveries API for Procurement Service
|
||||
Public endpoint for expected delivery tracking
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["expected-deliveries"])
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("expected-deliveries")
|
||||
)
|
||||
async def get_expected_deliveries(
|
||||
tenant_id: str,
|
||||
days_ahead: int = Query(1, description="Number of days to look ahead", ge=0, le=30),
|
||||
include_overdue: bool = Query(True, description="Include overdue deliveries"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get expected deliveries for delivery tracking system.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to query
|
||||
days_ahead: Number of days to look ahead (default 1 = today + tomorrow)
|
||||
include_overdue: Include deliveries past expected date (default True)
|
||||
|
||||
Returns:
|
||||
{
|
||||
"deliveries": [
|
||||
{
|
||||
"po_id": "uuid",
|
||||
"po_number": "PO-2025-123",
|
||||
"supplier_id": "uuid",
|
||||
"supplier_name": "Molinos San José",
|
||||
"supplier_phone": "+34 915 234 567",
|
||||
"expected_delivery_date": "2025-12-02T10:00:00Z",
|
||||
"delivery_window_hours": 4,
|
||||
"status": "sent_to_supplier",
|
||||
"line_items": [...],
|
||||
"total_amount": 540.00,
|
||||
"currency": "EUR"
|
||||
}
|
||||
],
|
||||
"total_count": 8
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse tenant_id
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
logger.info(
|
||||
"Fetching expected deliveries",
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue
|
||||
)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_uuid,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
# Include deliveries from last 48 hours (recent overdue) until end_date
|
||||
# This ensures we only show truly recent overdue deliveries, not ancient history
|
||||
start_date = now - timedelta(hours=48)
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= start_date,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
else:
|
||||
# Only future deliveries within range
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
# Format deliveries for response
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Get supplier info from supplier service (for now, use supplier_id)
|
||||
# In production, you'd fetch from supplier service or join if same DB
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Try to get supplier details from notes or metadata
|
||||
# This is a simplified approach - in production you'd query supplier service
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
elif "Suministros Hostelería" in po.notes:
|
||||
supplier_name = "Suministros Hostelería"
|
||||
supplier_phone = "+34 911 234 567"
|
||||
elif "Miel Artesana" in po.notes:
|
||||
supplier_name = "Miel Artesana"
|
||||
supplier_phone = "+34 918 765 432"
|
||||
|
||||
# Format line items (limit to first 5)
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
# Default delivery window is 4 hours
|
||||
delivery_window_hours = 4
|
||||
|
||||
# Ensure expected delivery date is timezone-aware and in UTC format
|
||||
expected_delivery_utc = po.expected_delivery_date
|
||||
if expected_delivery_utc and expected_delivery_utc.tzinfo is None:
|
||||
# If naive datetime, assume it's UTC (this shouldn't happen with proper DB setup)
|
||||
expected_delivery_utc = expected_delivery_utc.replace(tzinfo=timezone.utc)
|
||||
elif expected_delivery_utc and expected_delivery_utc.tzinfo is not None:
|
||||
# Convert to UTC if it's in another timezone
|
||||
expected_delivery_utc = expected_delivery_utc.astimezone(timezone.utc)
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": expected_delivery_utc.isoformat() if expected_delivery_utc else None,
|
||||
"delivery_window_hours": delivery_window_hours,
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries retrieved",
|
||||
tenant_id=tenant_id,
|
||||
count=len(deliveries)
|
||||
)
|
||||
|
||||
return {
|
||||
"deliveries": deliveries,
|
||||
"total_count": len(deliveries)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
188
services/procurement/app/api/internal_delivery.py
Normal file
188
services/procurement/app/api/internal_delivery.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""
|
||||
Internal Delivery Tracking API for Procurement Service
|
||||
Service-to-service endpoint for expected delivery tracking by orchestrator
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal", tags=["internal"])
|
||||
|
||||
|
||||
@router.get("/expected-deliveries")
|
||||
async def get_expected_deliveries(
|
||||
tenant_id: str = Query(..., description="Tenant UUID"),
|
||||
days_ahead: int = Query(1, description="Number of days to look ahead", ge=0, le=30),
|
||||
include_overdue: bool = Query(True, description="Include overdue deliveries"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get expected deliveries for delivery tracking system.
|
||||
|
||||
Called by orchestrator's DeliveryTrackingService to monitor upcoming deliveries
|
||||
and generate delivery alerts (arriving_soon, overdue, receipt_incomplete).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to query
|
||||
days_ahead: Number of days to look ahead (default 1 = today + tomorrow)
|
||||
include_overdue: Include deliveries past expected date (default True)
|
||||
|
||||
Returns:
|
||||
{
|
||||
"deliveries": [
|
||||
{
|
||||
"po_id": "uuid",
|
||||
"po_number": "PO-2025-123",
|
||||
"supplier_id": "uuid",
|
||||
"supplier_name": "Molinos San José",
|
||||
"supplier_phone": "+34 915 234 567",
|
||||
"expected_delivery_date": "2025-12-02T10:00:00Z",
|
||||
"delivery_window_hours": 4,
|
||||
"status": "sent_to_supplier",
|
||||
"line_items": [...],
|
||||
"total_amount": 540.00,
|
||||
"currency": "EUR"
|
||||
}
|
||||
],
|
||||
"total_count": 8
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse tenant_id
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
logger.info(
|
||||
"Fetching expected deliveries",
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue
|
||||
)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_uuid,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
# Include any delivery from past until end_date
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
else:
|
||||
# Only future deliveries within range
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
# Format deliveries for response
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Get supplier info from supplier service (for now, use supplier_id)
|
||||
# In production, you'd fetch from supplier service or join if same DB
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Try to get supplier details from notes or metadata
|
||||
# This is a simplified approach - in production you'd query supplier service
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
elif "Suministros Hostelería" in po.notes:
|
||||
supplier_name = "Suministros Hostelería"
|
||||
supplier_phone = "+34 911 234 567"
|
||||
elif "Miel Artesana" in po.notes:
|
||||
supplier_name = "Miel Artesana"
|
||||
supplier_phone = "+34 918 765 432"
|
||||
|
||||
# Format line items (limit to first 5)
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
# Default delivery window is 4 hours
|
||||
delivery_window_hours = 4
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat() if po.expected_delivery_date else None,
|
||||
"delivery_window_hours": delivery_window_hours,
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries retrieved",
|
||||
tenant_id=tenant_id,
|
||||
count=len(deliveries)
|
||||
)
|
||||
|
||||
return {
|
||||
"deliveries": deliveries,
|
||||
"total_count": len(deliveries)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
102
services/procurement/app/api/internal_delivery_tracking.py
Normal file
102
services/procurement/app/api/internal_delivery_tracking.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Internal API for triggering delivery tracking alerts.
|
||||
Used by demo session cloning to generate realistic late delivery alerts.
|
||||
|
||||
Moved from orchestrator service to procurement service (domain ownership).
|
||||
|
||||
URL Pattern: /api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger
|
||||
This follows the tenant-scoped pattern so gateway can proxy correctly.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# New URL pattern: tenant-scoped so gateway proxies to procurement service correctly
|
||||
@router.post("/api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger")
|
||||
async def trigger_delivery_tracking(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check deliveries for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger delivery tracking for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after POs are seeded
|
||||
to generate realistic delivery alerts (arriving soon, overdue, etc.).
|
||||
|
||||
Security: Protected by x-internal-service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to check deliveries for
|
||||
request: FastAPI request object
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": true,
|
||||
"tenant_id": "uuid",
|
||||
"alerts_generated": 3,
|
||||
"breakdown": {
|
||||
"arriving_soon": 1,
|
||||
"overdue": 1,
|
||||
"receipt_incomplete": 1
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get delivery tracking service from app state
|
||||
delivery_tracking_service = getattr(request.app.state, 'delivery_tracking_service', None)
|
||||
|
||||
if not delivery_tracking_service:
|
||||
logger.error("Delivery tracking service not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Delivery tracking service not available"
|
||||
)
|
||||
|
||||
# Trigger delivery tracking for this tenant
|
||||
logger.info("Triggering delivery tracking", tenant_id=str(tenant_id))
|
||||
result = await delivery_tracking_service.check_expected_deliveries(tenant_id)
|
||||
|
||||
logger.info(
|
||||
"Delivery tracking completed",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("total_alerts", 0)
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"tenant_id": str(tenant_id),
|
||||
"alerts_generated": result.get("total_alerts", 0),
|
||||
"breakdown": {
|
||||
"arriving_soon": result.get("arriving_soon", 0),
|
||||
"overdue": result.get("overdue", 0),
|
||||
"receipt_incomplete": result.get("receipt_incomplete", 0)
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering delivery tracking",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger delivery tracking: {str(e)}"
|
||||
)
|
||||
701
services/procurement/app/api/internal_demo.py
Normal file
701
services/procurement/app/api/internal_demo.py
Normal file
@@ -0,0 +1,701 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Procurement Service
|
||||
Service-to-service endpoint for cloning procurement and purchase order data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta, date
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
|
||||
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
|
||||
from sqlalchemy.orm import selectinload
|
||||
from shared.schemas.reasoning_types import (
|
||||
create_po_reasoning_low_stock,
|
||||
create_po_reasoning_supplier_contract
|
||||
)
|
||||
from app.core.config import settings
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
async def _emit_po_approval_alerts_for_demo(
|
||||
virtual_tenant_id: uuid.UUID,
|
||||
pending_pos: list[PurchaseOrder]
|
||||
) -> int:
|
||||
"""
|
||||
Emit alerts for pending approval POs during demo cloning.
|
||||
Creates clients internally to avoid dependency injection issues.
|
||||
Returns the number of alerts successfully emitted.
|
||||
"""
|
||||
if not pending_pos:
|
||||
return 0
|
||||
|
||||
alerts_emitted = 0
|
||||
|
||||
try:
|
||||
# Initialize clients locally for this operation
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.messaging import RabbitMQClient
|
||||
|
||||
# Use the existing settings instead of creating a new config
|
||||
# This avoids issues with property-based configuration
|
||||
suppliers_client = SuppliersServiceClient(settings, "procurement-service")
|
||||
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement-service")
|
||||
|
||||
# Connect to RabbitMQ
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
logger.info(
|
||||
"Emitting PO approval alerts for demo",
|
||||
pending_po_count=len(pending_pos),
|
||||
virtual_tenant_id=str(virtual_tenant_id)
|
||||
)
|
||||
|
||||
# Emit alerts for each pending PO
|
||||
for po in pending_pos:
|
||||
try:
|
||||
# Get supplier details
|
||||
supplier_details = await suppliers_client.get_supplier_by_id(
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
supplier_id=str(po.supplier_id)
|
||||
)
|
||||
|
||||
# Skip if supplier not found
|
||||
if not supplier_details:
|
||||
logger.warning(
|
||||
"Supplier not found for PO, skipping alert",
|
||||
po_id=str(po.id),
|
||||
supplier_id=str(po.supplier_id)
|
||||
)
|
||||
continue
|
||||
|
||||
# Calculate urgency fields
|
||||
now = datetime.utcnow()
|
||||
hours_until_consequence = None
|
||||
deadline = None
|
||||
|
||||
if po.required_delivery_date:
|
||||
supplier_lead_time_days = supplier_details.get('standard_lead_time', 7)
|
||||
approval_deadline = po.required_delivery_date - timedelta(days=supplier_lead_time_days)
|
||||
deadline = approval_deadline
|
||||
hours_until_consequence = (approval_deadline - now).total_seconds() / 3600
|
||||
|
||||
# Prepare alert payload
|
||||
alert_data = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'tenant_id': str(virtual_tenant_id),
|
||||
'service': 'procurement',
|
||||
'type': 'po_approval_needed',
|
||||
'alert_type': 'po_approval_needed',
|
||||
'type_class': 'action_needed',
|
||||
'severity': 'high' if po.priority == 'critical' else 'medium',
|
||||
'title': '',
|
||||
'message': '',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'metadata': {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'supplier_name': supplier_details.get('name', ''),
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'created_at': po.created_at.isoformat(),
|
||||
'financial_impact': float(po.total_amount),
|
||||
'urgency_score': 85,
|
||||
'deadline': deadline.isoformat() if deadline else None,
|
||||
'hours_until_consequence': round(hours_until_consequence, 1) if hours_until_consequence else None,
|
||||
'reasoning_data': po.reasoning_data or {}
|
||||
},
|
||||
'message_params': {
|
||||
'po_number': po.po_number,
|
||||
'supplier_name': supplier_details.get('name', ''),
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'items_count': 0,
|
||||
'created_at': po.created_at.isoformat()
|
||||
},
|
||||
'actions': ['approve_po', 'reject_po', 'modify_po'],
|
||||
'item_type': 'alert'
|
||||
}
|
||||
|
||||
# Publish to RabbitMQ
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name='alerts.exchange',
|
||||
routing_key=f'alert.{alert_data["severity"]}.procurement',
|
||||
event_data=alert_data
|
||||
)
|
||||
|
||||
alerts_emitted += 1
|
||||
logger.debug(
|
||||
"PO approval alert emitted",
|
||||
po_id=str(po.id),
|
||||
po_number=po.po_number
|
||||
)
|
||||
|
||||
except Exception as po_error:
|
||||
logger.warning(
|
||||
"Failed to emit alert for PO",
|
||||
po_id=str(po.id),
|
||||
po_number=po.po_number,
|
||||
error=str(po_error)
|
||||
)
|
||||
# Continue with other POs
|
||||
|
||||
# Close RabbitMQ connection
|
||||
await rabbitmq_client.disconnect()
|
||||
|
||||
logger.info(
|
||||
"PO approval alerts emission completed",
|
||||
alerts_emitted=alerts_emitted,
|
||||
total_pending=len(pending_pos)
|
||||
)
|
||||
|
||||
return alerts_emitted
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to emit PO approval alerts",
|
||||
error=str(e),
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
exc_info=True
|
||||
)
|
||||
# Don't fail the cloning process - ensure we try to disconnect if connected
|
||||
try:
|
||||
if 'rabbitmq_client' in locals():
|
||||
await rabbitmq_client.disconnect()
|
||||
except:
|
||||
pass # Suppress cleanup errors
|
||||
return alerts_emitted
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone procurement service data for a virtual demo tenant
|
||||
|
||||
Loads seed data from JSON files and creates:
|
||||
- Purchase orders with line items
|
||||
- Procurement plans with requirements (if in seed data)
|
||||
- Replenishment plans with items (if in seed data)
|
||||
- Adjusts dates to recent timeframe
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting procurement data cloning from seed files",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"procurement_plans": 0,
|
||||
"procurement_requirements": 0,
|
||||
"purchase_orders": 0,
|
||||
"purchase_order_items": 0,
|
||||
"replenishment_plans": 0,
|
||||
"replenishment_items": 0
|
||||
}
|
||||
|
||||
def parse_date_field(date_value, session_time, field_name="date"):
|
||||
"""Parse date field, handling both ISO strings and BASE_TS markers"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
return adjust_date_for_demo(
|
||||
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
|
||||
session_time
|
||||
)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "07-procurement.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "07-procurement.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "07-procurement.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
logger.info(
|
||||
"Loaded procurement seed data",
|
||||
purchase_orders=len(seed_data.get('purchase_orders', [])),
|
||||
purchase_order_items=len(seed_data.get('purchase_order_items', [])),
|
||||
procurement_plans=len(seed_data.get('procurement_plans', []))
|
||||
)
|
||||
|
||||
# Load Purchase Orders from seed data
|
||||
order_id_map = {}
|
||||
for po_data in seed_data.get('purchase_orders', []):
|
||||
# Transform IDs using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
logger.debug("Processing purchase order", po_id=po_data.get('id'), po_number=po_data.get('po_number'))
|
||||
po_uuid = uuid.UUID(po_data['id'])
|
||||
transformed_id = transform_id(po_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse purchase order UUID",
|
||||
po_id=po_data.get('id'),
|
||||
po_number=po_data.get('po_number'),
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
order_id_map[uuid.UUID(po_data['id'])] = transformed_id
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
# FIX: Use current UTC time for future dates (expected delivery)
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.debug("Parsing dates for PO",
|
||||
po_number=po_data.get('po_number'),
|
||||
order_date_raw=po_data.get('order_date') or po_data.get('order_date_offset_days'),
|
||||
required_delivery_raw=po_data.get('required_delivery_date') or po_data.get('required_delivery_date_offset_days'))
|
||||
|
||||
# Handle both direct dates and offset-based dates
|
||||
if 'order_date_offset_days' in po_data:
|
||||
adjusted_order_date = session_time + timedelta(days=po_data['order_date_offset_days'])
|
||||
else:
|
||||
adjusted_order_date = parse_date_field(po_data.get('order_date'), session_time, "order_date") or session_time
|
||||
|
||||
if 'required_delivery_date_offset_days' in po_data:
|
||||
adjusted_required_delivery = session_time + timedelta(days=po_data['required_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), session_time, "required_delivery_date")
|
||||
|
||||
if 'estimated_delivery_date_offset_days' in po_data:
|
||||
adjusted_estimated_delivery = session_time + timedelta(days=po_data['estimated_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), session_time, "estimated_delivery_date")
|
||||
|
||||
# Calculate expected delivery date (use estimated delivery if not specified separately)
|
||||
# FIX: Use current UTC time for future delivery dates
|
||||
if 'expected_delivery_date_offset_days' in po_data:
|
||||
adjusted_expected_delivery = current_time + timedelta(days=po_data['expected_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_expected_delivery = adjusted_estimated_delivery # Fallback to estimated delivery
|
||||
|
||||
logger.debug("Dates parsed successfully",
|
||||
po_number=po_data.get('po_number'),
|
||||
order_date=adjusted_order_date,
|
||||
required_delivery=adjusted_required_delivery)
|
||||
|
||||
# Generate a system user UUID for audit fields (demo purposes)
|
||||
system_user_id = uuid.uuid4()
|
||||
|
||||
# Use status directly from JSON - JSON files should contain valid enum values
|
||||
# Valid values: draft, pending_approval, approved, sent_to_supplier, confirmed,
|
||||
# partially_received, completed, cancelled, disputed
|
||||
raw_status = po_data.get('status', 'draft')
|
||||
|
||||
# Validate that the status is a valid enum value
|
||||
valid_statuses = {'draft', 'pending_approval', 'approved', 'sent_to_supplier',
|
||||
'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed'}
|
||||
|
||||
if raw_status not in valid_statuses:
|
||||
logger.warning(
|
||||
"Invalid status value in seed data, using default 'draft'",
|
||||
invalid_status=raw_status,
|
||||
po_number=po_data.get('po_number'),
|
||||
valid_options=sorted(valid_statuses)
|
||||
)
|
||||
raw_status = 'draft'
|
||||
|
||||
# Transform supplier_id to match transformed supplier IDs in suppliers service
|
||||
raw_supplier_id = po_data.get('supplier_id')
|
||||
transformed_supplier_id = transform_id(raw_supplier_id, virtual_uuid) if raw_supplier_id else None
|
||||
|
||||
# Create new PurchaseOrder
|
||||
new_order = PurchaseOrder(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
po_number=f"{session_id[:8]}-{po_data.get('po_number', f'PO-{uuid.uuid4().hex[:8].upper()}')}",
|
||||
supplier_id=str(transformed_supplier_id) if transformed_supplier_id else None,
|
||||
order_date=adjusted_order_date,
|
||||
required_delivery_date=adjusted_required_delivery,
|
||||
estimated_delivery_date=adjusted_estimated_delivery,
|
||||
expected_delivery_date=adjusted_expected_delivery,
|
||||
status=raw_status,
|
||||
priority=po_data.get('priority', 'normal').lower() if po_data.get('priority') else 'normal',
|
||||
subtotal=po_data.get('subtotal', 0.0),
|
||||
tax_amount=po_data.get('tax_amount', 0.0),
|
||||
shipping_cost=po_data.get('shipping_cost', 0.0),
|
||||
discount_amount=po_data.get('discount_amount', 0.0),
|
||||
total_amount=po_data.get('total_amount', 0.0),
|
||||
currency=po_data.get('currency', 'EUR'),
|
||||
delivery_address=po_data.get('delivery_address'),
|
||||
delivery_instructions=po_data.get('delivery_instructions'),
|
||||
delivery_contact=po_data.get('delivery_contact'),
|
||||
delivery_phone=po_data.get('delivery_phone'),
|
||||
requires_approval=po_data.get('requires_approval', False),
|
||||
auto_approved=po_data.get('auto_approved', False),
|
||||
auto_approval_rule_id=po_data.get('auto_approval_rule_id') if po_data.get('auto_approval_rule_id') and len(po_data.get('auto_approval_rule_id', '')) >= 32 else None,
|
||||
rejection_reason=po_data.get('rejection_reason'),
|
||||
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), session_time, "sent_to_supplier_at"),
|
||||
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), session_time, "supplier_confirmation_date"),
|
||||
supplier_reference=po_data.get('supplier_reference'),
|
||||
notes=po_data.get('notes'),
|
||||
internal_notes=po_data.get('internal_notes'),
|
||||
terms_and_conditions=po_data.get('terms_and_conditions'),
|
||||
reasoning_data=po_data.get('reasoning_data'),
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=system_user_id,
|
||||
updated_by=system_user_id
|
||||
)
|
||||
|
||||
# Add expected_delivery_date if the model supports it
|
||||
if hasattr(PurchaseOrder, 'expected_delivery_date'):
|
||||
if 'expected_delivery_date_offset_days' in po_data:
|
||||
# Handle offset-based expected delivery dates
|
||||
expected_delivery = adjusted_order_date + timedelta(
|
||||
days=po_data['expected_delivery_date_offset_days']
|
||||
)
|
||||
else:
|
||||
expected_delivery = adjusted_estimated_delivery
|
||||
new_order.expected_delivery_date = expected_delivery
|
||||
|
||||
db.add(new_order)
|
||||
stats["purchase_orders"] += 1
|
||||
|
||||
# Load Purchase Order Items from seed data
|
||||
for po_item_data in seed_data.get('purchase_order_items', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
item_uuid = uuid.UUID(po_item_data['id'])
|
||||
transformed_id = transform_id(po_item_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse purchase order item UUID",
|
||||
item_id=po_item_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Map purchase_order_id if it exists in our map
|
||||
po_id_value = po_item_data.get('purchase_order_id')
|
||||
if po_id_value:
|
||||
po_id_value = order_id_map.get(uuid.UUID(po_id_value), uuid.UUID(po_id_value))
|
||||
|
||||
new_item = PurchaseOrderItem(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
purchase_order_id=str(po_id_value) if po_id_value else None,
|
||||
inventory_product_id=po_item_data.get('inventory_product_id'),
|
||||
product_name=po_item_data.get('product_name'),
|
||||
product_code=po_item_data.get('product_code'), # Use product_code directly from JSON
|
||||
ordered_quantity=po_item_data.get('ordered_quantity', 0.0),
|
||||
unit_of_measure=po_item_data.get('unit_of_measure'),
|
||||
unit_price=po_item_data.get('unit_price', 0.0),
|
||||
line_total=po_item_data.get('line_total', 0.0),
|
||||
received_quantity=po_item_data.get('received_quantity', 0.0),
|
||||
remaining_quantity=po_item_data.get('remaining_quantity', po_item_data.get('ordered_quantity', 0.0)),
|
||||
quality_requirements=po_item_data.get('quality_requirements'),
|
||||
item_notes=po_item_data.get('item_notes'),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_item)
|
||||
stats["purchase_order_items"] += 1
|
||||
|
||||
# Load Procurement Plans from seed data (if any)
|
||||
for plan_data in seed_data.get('procurement_plans', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
plan_uuid = uuid.UUID(plan_data['id'])
|
||||
transformed_id = transform_id(plan_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse procurement plan UUID",
|
||||
plan_id=plan_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Adjust dates
|
||||
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), session_time, "plan_date")
|
||||
|
||||
new_plan = ProcurementPlan(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
plan_number=plan_data.get('plan_number', f"PROC-{uuid.uuid4().hex[:8].upper()}"),
|
||||
plan_date=adjusted_plan_date,
|
||||
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), session_time, "plan_period_start"),
|
||||
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), session_time, "plan_period_end"),
|
||||
planning_horizon_days=plan_data.get('planning_horizon_days'),
|
||||
status=plan_data.get('status', 'draft'),
|
||||
plan_type=plan_data.get('plan_type'),
|
||||
priority=plan_data.get('priority', 'normal'),
|
||||
business_model=plan_data.get('business_model'),
|
||||
procurement_strategy=plan_data.get('procurement_strategy'),
|
||||
total_requirements=plan_data.get('total_requirements', 0),
|
||||
total_estimated_cost=plan_data.get('total_estimated_cost', 0.0),
|
||||
total_approved_cost=plan_data.get('total_approved_cost', 0.0),
|
||||
cost_variance=plan_data.get('cost_variance', 0.0),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_plan)
|
||||
stats["procurement_plans"] += 1
|
||||
|
||||
# Load Replenishment Plans from seed data (if any)
|
||||
for replan_data in seed_data.get('replenishment_plans', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
replan_uuid = uuid.UUID(replan_data['id'])
|
||||
transformed_id = transform_id(replan_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse replenishment plan UUID",
|
||||
replan_id=replan_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Adjust dates
|
||||
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), session_time, "plan_date")
|
||||
|
||||
new_replan = ReplenishmentPlan(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
plan_number=replan_data.get('plan_number', f"REPL-{uuid.uuid4().hex[:8].upper()}"),
|
||||
plan_date=adjusted_plan_date,
|
||||
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), session_time, "plan_period_start"),
|
||||
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), session_time, "plan_period_end"),
|
||||
planning_horizon_days=replan_data.get('planning_horizon_days'),
|
||||
status=replan_data.get('status', 'draft'),
|
||||
plan_type=replan_data.get('plan_type'),
|
||||
priority=replan_data.get('priority', 'normal'),
|
||||
business_model=replan_data.get('business_model'),
|
||||
total_items=replan_data.get('total_items', 0),
|
||||
total_estimated_cost=replan_data.get('total_estimated_cost', 0.0),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_replan)
|
||||
stats["replenishment_plans"] += 1
|
||||
|
||||
# Commit all loaded data
|
||||
await db.commit()
|
||||
|
||||
# Emit alerts for pending approval POs (CRITICAL for demo dashboard)
|
||||
alerts_emitted = 0
|
||||
try:
|
||||
# Get all pending approval POs that were just created
|
||||
pending_approval_pos = await db.execute(
|
||||
select(PurchaseOrder).where(
|
||||
PurchaseOrder.tenant_id == virtual_uuid,
|
||||
PurchaseOrder.status == 'pending_approval'
|
||||
)
|
||||
)
|
||||
pending_pos = pending_approval_pos.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found pending approval POs for alert emission",
|
||||
count=len(pending_pos),
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
# Emit alerts using refactored function
|
||||
if pending_pos:
|
||||
alerts_emitted = await _emit_po_approval_alerts_for_demo(
|
||||
virtual_tenant_id=virtual_uuid,
|
||||
pending_pos=pending_pos
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to emit PO approval alerts during demo cloning",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
# Don't fail the entire cloning process if alert emission fails
|
||||
|
||||
# Calculate total records
|
||||
total_records = (stats["procurement_plans"] + stats["procurement_requirements"] +
|
||||
stats["purchase_orders"] + stats["purchase_order_items"] +
|
||||
stats["replenishment_plans"] + stats["replenishment_items"])
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Procurement data loading from seed files completed",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "procurement",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats,
|
||||
"alerts_emitted": alerts_emitted
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to load procurement seed data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "procurement",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "procurement",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete all procurement data for a virtual demo tenant"""
|
||||
logger.info("Deleting procurement data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Count records
|
||||
po_count = await db.scalar(select(func.count(PurchaseOrder.id)).where(PurchaseOrder.tenant_id == virtual_uuid))
|
||||
po_item_count = await db.scalar(select(func.count(PurchaseOrderItem.id)).where(PurchaseOrderItem.tenant_id == virtual_uuid))
|
||||
plan_count = await db.scalar(select(func.count(ProcurementPlan.id)).where(ProcurementPlan.tenant_id == virtual_uuid))
|
||||
replan_count = await db.scalar(select(func.count(ReplenishmentPlan.id)).where(ReplenishmentPlan.tenant_id == virtual_uuid))
|
||||
|
||||
# Delete in order
|
||||
await db.execute(delete(PurchaseOrderItem).where(PurchaseOrderItem.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(PurchaseOrder).where(PurchaseOrder.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.plan_id.in_(
|
||||
select(ProcurementPlan.id).where(ProcurementPlan.tenant_id == virtual_uuid)
|
||||
)))
|
||||
await db.execute(delete(ProcurementPlan).where(ProcurementPlan.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(ReplenishmentPlanItem).where(ReplenishmentPlanItem.replenishment_plan_id.in_(
|
||||
select(ReplenishmentPlan.id).where(ReplenishmentPlan.tenant_id == virtual_uuid)
|
||||
)))
|
||||
await db.execute(delete(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == virtual_uuid))
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
logger.info("Procurement data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
||||
|
||||
return {
|
||||
"service": "procurement",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"purchase_orders": po_count,
|
||||
"purchase_order_items": po_item_count,
|
||||
"procurement_plans": plan_count,
|
||||
"replenishment_plans": replan_count,
|
||||
"total": po_count + po_item_count + plan_count + replan_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete procurement data", error=str(e), exc_info=True)
|
||||
await db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
175
services/procurement/app/api/internal_transfer.py
Normal file
175
services/procurement/app/api/internal_transfer.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""
|
||||
Internal Transfer API Endpoints
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Body
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.services.internal_transfer_service import InternalTransferService
|
||||
from app.repositories.purchase_order_repository import PurchaseOrderRepository
|
||||
from app.core.database import get_db
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.clients import get_recipes_client, get_production_client, get_inventory_client
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# Pydantic models for request validation
|
||||
class InternalTransferItem(BaseModel):
|
||||
product_id: str
|
||||
product_name: Optional[str] = None
|
||||
quantity: float
|
||||
unit_of_measure: str = 'units'
|
||||
|
||||
|
||||
class InternalTransferRequest(BaseModel):
|
||||
parent_tenant_id: str
|
||||
items: List[InternalTransferItem]
|
||||
delivery_date: str
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class ApprovalRequest(BaseModel):
|
||||
pass # Empty for now, might add approval notes later
|
||||
|
||||
|
||||
def get_internal_transfer_service(db: AsyncSession = Depends(get_db)) -> InternalTransferService:
|
||||
"""Dependency to get internal transfer service"""
|
||||
purchase_order_repository = PurchaseOrderRepository(db)
|
||||
recipe_client = get_recipes_client(config=settings, service_name="procurement-service")
|
||||
production_client = get_production_client(config=settings, service_name="procurement-service")
|
||||
inventory_client = get_inventory_client(config=settings, service_name="procurement-service")
|
||||
|
||||
return InternalTransferService(
|
||||
purchase_order_repository=purchase_order_repository,
|
||||
recipe_client=recipe_client,
|
||||
production_client=production_client,
|
||||
inventory_client=inventory_client
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/procurement/internal-transfers", response_model=None)
|
||||
async def create_internal_purchase_order(
|
||||
tenant_id: str,
|
||||
transfer_request: InternalTransferRequest,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Create an internal purchase order from child to parent tenant
|
||||
|
||||
**Enterprise Tier Feature**: Internal transfers require Enterprise subscription.
|
||||
"""
|
||||
try:
|
||||
# Validate subscription tier for internal transfers
|
||||
from shared.subscription.plans import PlanFeatures
|
||||
from shared.clients import get_tenant_client
|
||||
|
||||
tenant_client = get_tenant_client(config=settings, service_name="procurement-service")
|
||||
subscription = await tenant_client.get_tenant_subscription(tenant_id)
|
||||
|
||||
if not subscription:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="No active subscription found. Internal transfers require Enterprise tier."
|
||||
)
|
||||
|
||||
# Check if tier supports internal transfers
|
||||
if not PlanFeatures.validate_internal_transfers(subscription.get("plan", "starter")):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Internal transfers require Enterprise tier. Current tier: {subscription.get('plan', 'starter')}"
|
||||
)
|
||||
|
||||
# Parse delivery_date
|
||||
from datetime import datetime
|
||||
delivery_date = datetime.fromisoformat(transfer_request.delivery_date.split('T')[0]).date()
|
||||
|
||||
# Convert Pydantic items to dict
|
||||
items = [item.model_dump() for item in transfer_request.items]
|
||||
|
||||
# Create the internal purchase order
|
||||
result = await internal_transfer_service.create_internal_purchase_order(
|
||||
child_tenant_id=tenant_id,
|
||||
parent_tenant_id=transfer_request.parent_tenant_id,
|
||||
items=items,
|
||||
delivery_date=delivery_date,
|
||||
requested_by_user_id="temp_user_id", # Would come from auth context
|
||||
notes=transfer_request.notes
|
||||
)
|
||||
|
||||
return result
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create internal purchase order: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/procurement/internal-transfers/{po_id}/approve", response_model=None)
|
||||
async def approve_internal_transfer(
|
||||
tenant_id: str,
|
||||
po_id: str,
|
||||
approval_request: Optional[ApprovalRequest] = None,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Approve an internal transfer request
|
||||
"""
|
||||
try:
|
||||
approved_by_user_id = "temp_user_id" # Would come from auth context
|
||||
|
||||
result = await internal_transfer_service.approve_internal_transfer(
|
||||
po_id=po_id,
|
||||
approved_by_user_id=approved_by_user_id
|
||||
)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to approve internal transfer: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/procurement/internal-transfers/pending", response_model=None)
|
||||
async def get_pending_internal_transfers(
|
||||
tenant_id: str,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get pending internal transfers for a tenant
|
||||
"""
|
||||
try:
|
||||
result = await internal_transfer_service.get_pending_internal_transfers(tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get pending internal transfers: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/procurement/internal-transfers/history", response_model=None)
|
||||
async def get_internal_transfer_history(
|
||||
tenant_id: str,
|
||||
parent_tenant_id: Optional[str] = None,
|
||||
child_tenant_id: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
internal_transfer_service: InternalTransferService = Depends(get_internal_transfer_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get internal transfer history with optional filtering
|
||||
"""
|
||||
try:
|
||||
result = await internal_transfer_service.get_internal_transfer_history(
|
||||
tenant_id=tenant_id,
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get internal transfer history: {str(e)}")
|
||||
629
services/procurement/app/api/ml_insights.py
Normal file
629
services/procurement/app/api/ml_insights.py
Normal file
@@ -0,0 +1,629 @@
|
||||
"""
|
||||
ML Insights API Endpoints for Procurement Service
|
||||
|
||||
Provides endpoints to trigger ML insight generation for:
|
||||
- Supplier performance analysis
|
||||
- Price forecasting and timing recommendations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import pandas as pd
|
||||
|
||||
from app.core.database import get_db
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/v1/tenants/{tenant_id}/procurement/ml/insights",
|
||||
tags=["ML Insights"]
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS - SUPPLIER ANALYSIS
|
||||
# ================================================================
|
||||
|
||||
class SupplierAnalysisRequest(BaseModel):
|
||||
"""Request schema for supplier performance analysis"""
|
||||
supplier_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific supplier IDs to analyze. If None, analyzes all suppliers"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
180,
|
||||
description="Days of historical orders to analyze",
|
||||
ge=30,
|
||||
le=730
|
||||
)
|
||||
min_orders: int = Field(
|
||||
10,
|
||||
description="Minimum orders required for analysis",
|
||||
ge=5,
|
||||
le=100
|
||||
)
|
||||
|
||||
|
||||
class SupplierAnalysisResponse(BaseModel):
|
||||
"""Response schema for supplier performance analysis"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
suppliers_analyzed: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
high_risk_suppliers: int
|
||||
insights_by_supplier: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS - PRICE FORECASTING
|
||||
# ================================================================
|
||||
|
||||
class PriceForecastRequest(BaseModel):
|
||||
"""Request schema for price forecasting"""
|
||||
ingredient_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific ingredient IDs to forecast. If None, forecasts all ingredients"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
180,
|
||||
description="Days of historical price data to analyze",
|
||||
ge=90,
|
||||
le=730
|
||||
)
|
||||
forecast_horizon_days: int = Field(
|
||||
30,
|
||||
description="Days to forecast ahead",
|
||||
ge=7,
|
||||
le=90
|
||||
)
|
||||
|
||||
|
||||
class PriceForecastResponse(BaseModel):
|
||||
"""Response schema for price forecasting"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
ingredients_forecasted: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
buy_now_recommendations: int
|
||||
bulk_opportunities: int
|
||||
insights_by_ingredient: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# API ENDPOINTS - SUPPLIER ANALYSIS
|
||||
# ================================================================
|
||||
|
||||
@router.post("/analyze-suppliers", response_model=SupplierAnalysisResponse)
|
||||
async def trigger_supplier_analysis(
|
||||
tenant_id: str,
|
||||
request_data: SupplierAnalysisRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger supplier performance analysis.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical purchase order data for specified suppliers
|
||||
2. Runs the SupplierInsightsOrchestrator to analyze reliability
|
||||
3. Generates insights about supplier performance and risk
|
||||
4. Posts insights to AI Insights Service
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Analysis parameters
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
SupplierAnalysisResponse with analysis results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights supplier analysis requested",
|
||||
tenant_id=tenant_id,
|
||||
supplier_ids=request_data.supplier_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator and clients
|
||||
from app.ml.supplier_insights_orchestrator import SupplierInsightsOrchestrator
|
||||
from app.models.purchase_order import PurchaseOrder
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from app.core.config import settings
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and clients
|
||||
orchestrator = SupplierInsightsOrchestrator(event_publisher=event_publisher)
|
||||
suppliers_client = SuppliersServiceClient(settings)
|
||||
|
||||
# Get suppliers to analyze from suppliers service via API
|
||||
if request_data.supplier_ids:
|
||||
# Fetch specific suppliers
|
||||
suppliers = []
|
||||
for supplier_id in request_data.supplier_ids:
|
||||
supplier = await suppliers_client.get_supplier_by_id(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id
|
||||
)
|
||||
if supplier:
|
||||
suppliers.append(supplier)
|
||||
else:
|
||||
# Fetch all active suppliers (limit to 10)
|
||||
all_suppliers = await suppliers_client.get_all_suppliers(
|
||||
tenant_id=tenant_id,
|
||||
is_active=True
|
||||
)
|
||||
suppliers = (all_suppliers or [])[:10] # Limit to prevent timeout
|
||||
|
||||
if not suppliers:
|
||||
return SupplierAnalysisResponse(
|
||||
success=False,
|
||||
message="No suppliers found for analysis",
|
||||
tenant_id=tenant_id,
|
||||
suppliers_analyzed=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
high_risk_suppliers=0,
|
||||
insights_by_supplier={},
|
||||
errors=["No suppliers found"]
|
||||
)
|
||||
|
||||
# Calculate date range for order history
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each supplier
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
high_risk_suppliers = 0
|
||||
insights_by_supplier = {}
|
||||
errors = []
|
||||
|
||||
for supplier in suppliers:
|
||||
try:
|
||||
supplier_id = str(supplier['id'])
|
||||
supplier_name = supplier.get('name', 'Unknown')
|
||||
logger.info(f"Analyzing supplier {supplier_name} ({supplier_id})")
|
||||
|
||||
# Get purchase orders for this supplier from local database
|
||||
po_query = select(PurchaseOrder).where(
|
||||
PurchaseOrder.tenant_id == UUID(tenant_id),
|
||||
PurchaseOrder.supplier_id == UUID(supplier_id),
|
||||
PurchaseOrder.order_date >= start_date,
|
||||
PurchaseOrder.order_date <= end_date
|
||||
)
|
||||
|
||||
po_result = await db.execute(po_query)
|
||||
purchase_orders = po_result.scalars().all()
|
||||
|
||||
if len(purchase_orders) < request_data.min_orders:
|
||||
logger.warning(
|
||||
f"Insufficient orders for supplier {supplier_id}: "
|
||||
f"{len(purchase_orders)} < {request_data.min_orders} required"
|
||||
)
|
||||
continue
|
||||
|
||||
# Create order history DataFrame
|
||||
order_data = []
|
||||
for po in purchase_orders:
|
||||
# Calculate delivery performance
|
||||
if po.delivery_date and po.expected_delivery_date:
|
||||
days_late = (po.delivery_date - po.expected_delivery_date).days
|
||||
on_time = days_late <= 0
|
||||
else:
|
||||
days_late = 0
|
||||
on_time = True
|
||||
|
||||
# Calculate quality score (based on status)
|
||||
quality_score = 100 if po.status == 'completed' else 80
|
||||
|
||||
order_data.append({
|
||||
'order_date': po.order_date,
|
||||
'expected_delivery_date': po.expected_delivery_date,
|
||||
'delivery_date': po.delivery_date,
|
||||
'days_late': days_late,
|
||||
'on_time': on_time,
|
||||
'quality_score': quality_score,
|
||||
'total_amount': float(po.total_amount) if po.total_amount else 0
|
||||
})
|
||||
|
||||
order_history = pd.DataFrame(order_data)
|
||||
|
||||
# Run supplier analysis
|
||||
results = await orchestrator.analyze_and_post_supplier_insights(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
order_history=order_history,
|
||||
min_orders=request_data.min_orders
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
reliability_score = results.get('reliability_score', 100)
|
||||
if reliability_score < 70:
|
||||
high_risk_suppliers += 1
|
||||
|
||||
insights_by_supplier[supplier_id] = {
|
||||
'supplier_name': supplier_name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'reliability_score': reliability_score,
|
||||
'orders_analyzed': results['orders_analyzed']
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Supplier {supplier_id} analysis complete",
|
||||
insights_posted=results['insights_posted'],
|
||||
reliability_score=reliability_score
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error analyzing supplier {supplier_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = SupplierAnalysisResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully analyzed {len(insights_by_supplier)} suppliers, generated {total_insights_posted} insights",
|
||||
tenant_id=tenant_id,
|
||||
suppliers_analyzed=len(insights_by_supplier),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
high_risk_suppliers=high_risk_suppliers,
|
||||
insights_by_supplier=insights_by_supplier,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights supplier analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted,
|
||||
high_risk_suppliers=high_risk_suppliers
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights supplier analysis failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Supplier analysis failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# API ENDPOINTS - PRICE FORECASTING
|
||||
# ================================================================
|
||||
|
||||
@router.post("/forecast-prices", response_model=PriceForecastResponse)
|
||||
async def trigger_price_forecasting(
|
||||
tenant_id: str,
|
||||
request_data: PriceForecastRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger price forecasting for procurement ingredients.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical price data for specified ingredients
|
||||
2. Runs the PriceInsightsOrchestrator to forecast future prices
|
||||
3. Generates insights about optimal purchase timing
|
||||
4. Posts insights to AI Insights Service
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Forecasting parameters
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
PriceForecastResponse with forecasting results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights price forecasting requested",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_ids=request_data.ingredient_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator and clients
|
||||
from app.ml.price_insights_orchestrator import PriceInsightsOrchestrator
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from app.models.purchase_order import PurchaseOrderItem
|
||||
from app.core.config import settings
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and inventory client
|
||||
orchestrator = PriceInsightsOrchestrator(event_publisher=event_publisher)
|
||||
inventory_client = InventoryServiceClient(settings)
|
||||
|
||||
# Get ingredients to forecast from inventory service via API
|
||||
if request_data.ingredient_ids:
|
||||
# Fetch specific ingredients
|
||||
ingredients = []
|
||||
for ingredient_id in request_data.ingredient_ids:
|
||||
ingredient = await inventory_client.get_ingredient_by_id(
|
||||
ingredient_id=ingredient_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
if ingredient:
|
||||
ingredients.append(ingredient)
|
||||
else:
|
||||
# Fetch all ingredients for tenant (limit to 10)
|
||||
all_ingredients = await inventory_client.get_all_ingredients(tenant_id=tenant_id)
|
||||
ingredients = all_ingredients[:10] if all_ingredients else [] # Limit to prevent timeout
|
||||
|
||||
if not ingredients:
|
||||
return PriceForecastResponse(
|
||||
success=False,
|
||||
message="No ingredients found for forecasting",
|
||||
tenant_id=tenant_id,
|
||||
ingredients_forecasted=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
buy_now_recommendations=0,
|
||||
bulk_opportunities=0,
|
||||
insights_by_ingredient={},
|
||||
errors=["No ingredients found"]
|
||||
)
|
||||
|
||||
# Calculate date range for price history
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each ingredient
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
buy_now_recommendations = 0
|
||||
bulk_opportunities = 0
|
||||
insights_by_ingredient = {}
|
||||
errors = []
|
||||
|
||||
for ingredient in ingredients:
|
||||
try:
|
||||
ingredient_id = str(ingredient['id'])
|
||||
ingredient_name = ingredient.get('name', 'Unknown Ingredient')
|
||||
logger.info(f"Forecasting prices for {ingredient_name} ({ingredient_id})")
|
||||
|
||||
# Get price history from purchase order items
|
||||
poi_query = select(PurchaseOrderItem).where(
|
||||
PurchaseOrderItem.inventory_product_id == UUID(ingredient_id)
|
||||
).join(
|
||||
PurchaseOrderItem.purchase_order
|
||||
).where(
|
||||
PurchaseOrderItem.purchase_order.has(
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
)
|
||||
|
||||
poi_result = await db.execute(poi_query)
|
||||
purchase_items = poi_result.scalars().all()
|
||||
|
||||
if len(purchase_items) < 30:
|
||||
logger.warning(
|
||||
f"Insufficient price history for ingredient {ingredient_id}: "
|
||||
f"{len(purchase_items)} items"
|
||||
)
|
||||
continue
|
||||
|
||||
# Create price history DataFrame
|
||||
price_data = []
|
||||
for item in purchase_items:
|
||||
if item.unit_price and item.quantity:
|
||||
price_data.append({
|
||||
'date': item.purchase_order.order_date,
|
||||
'price': float(item.unit_price),
|
||||
'quantity': float(item.quantity),
|
||||
'supplier_id': str(item.purchase_order.supplier_id)
|
||||
})
|
||||
|
||||
price_history = pd.DataFrame(price_data)
|
||||
price_history = price_history.sort_values('date')
|
||||
|
||||
# Run price forecasting
|
||||
results = await orchestrator.forecast_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
price_history=price_history,
|
||||
forecast_horizon_days=request_data.forecast_horizon_days,
|
||||
min_history_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
recommendation = results.get('recommendation', {})
|
||||
if recommendation.get('action') == 'buy_now':
|
||||
buy_now_recommendations += 1
|
||||
|
||||
bulk_opp = results.get('bulk_opportunity', {})
|
||||
if bulk_opp.get('has_bulk_opportunity'):
|
||||
bulk_opportunities += 1
|
||||
|
||||
insights_by_ingredient[ingredient_id] = {
|
||||
'ingredient_name': ingredient_name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'recommendation': recommendation.get('action'),
|
||||
'has_bulk_opportunity': bulk_opp.get('has_bulk_opportunity', False)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Ingredient {ingredient_id} forecasting complete",
|
||||
insights_posted=results['insights_posted'],
|
||||
recommendation=recommendation.get('action')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error forecasting ingredient {ingredient_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = PriceForecastResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully forecasted {len(insights_by_ingredient)} ingredients, generated {total_insights_posted} insights",
|
||||
tenant_id=tenant_id,
|
||||
ingredients_forecasted=len(insights_by_ingredient),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
buy_now_recommendations=buy_now_recommendations,
|
||||
bulk_opportunities=bulk_opportunities,
|
||||
insights_by_ingredient=insights_by_ingredient,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights price forecasting complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted,
|
||||
buy_now_recommendations=buy_now_recommendations,
|
||||
bulk_opportunities=bulk_opportunities
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights price forecasting failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Price forecasting failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def ml_insights_health():
|
||||
"""Health check for ML insights endpoints"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "procurement-ml-insights",
|
||||
"endpoints": [
|
||||
"POST /ml/insights/analyze-suppliers",
|
||||
"POST /ml/insights/forecast-prices",
|
||||
"POST /internal/ml/generate-price-insights"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL API ENDPOINT - Called by demo session service
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
)
|
||||
|
||||
|
||||
@internal_router.post("/api/v1/tenants/{tenant_id}/procurement/internal/ml/generate-price-insights")
|
||||
async def generate_price_insights_internal(
|
||||
tenant_id: str,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to trigger price insights generation for demo sessions.
|
||||
|
||||
This endpoint is called by the demo-session service after cloning data.
|
||||
It uses the same ML logic as the public endpoint but with optimized defaults.
|
||||
|
||||
Security: Protected by x-internal-service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
request: FastAPI request object
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
{
|
||||
"insights_posted": int,
|
||||
"tenant_id": str,
|
||||
"status": str
|
||||
}
|
||||
"""
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
logger.info("Internal price insights generation triggered", tenant_id=tenant_id)
|
||||
|
||||
try:
|
||||
# Use the existing price forecasting logic with sensible defaults
|
||||
request_data = PriceForecastRequest(
|
||||
ingredient_ids=None, # Analyze all ingredients
|
||||
lookback_days=180, # 6 months of history
|
||||
forecast_horizon_days=30 # Forecast 30 days ahead
|
||||
)
|
||||
|
||||
# Call the existing price forecasting endpoint logic
|
||||
result = await trigger_price_forecasting(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return simplified response for internal use
|
||||
return {
|
||||
"insights_posted": result.total_insights_posted,
|
||||
"tenant_id": tenant_id,
|
||||
"status": "success" if result.success else "failed",
|
||||
"message": result.message,
|
||||
"ingredients_analyzed": result.ingredients_forecasted,
|
||||
"buy_now_recommendations": result.buy_now_recommendations
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Internal price insights generation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Internal price insights generation failed: {str(e)}"
|
||||
)
|
||||
346
services/procurement/app/api/procurement_plans.py
Normal file
346
services/procurement/app/api/procurement_plans.py
Normal file
@@ -0,0 +1,346 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/api/procurement_plans.py
|
||||
# ================================================================
|
||||
"""
|
||||
Procurement Plans API - Endpoints for procurement planning
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import List, Optional
|
||||
from datetime import date
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.services.procurement_service import ProcurementService
|
||||
from app.schemas.procurement_schemas import (
|
||||
ProcurementPlanResponse,
|
||||
GeneratePlanRequest,
|
||||
GeneratePlanResponse,
|
||||
AutoGenerateProcurementRequest,
|
||||
AutoGenerateProcurementResponse,
|
||||
PaginatedProcurementPlans,
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["procurement-plans"])
|
||||
|
||||
|
||||
def get_procurement_service(db: AsyncSession = Depends(get_db)) -> ProcurementService:
|
||||
"""Dependency to get procurement service"""
|
||||
return ProcurementService(db, settings)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# ORCHESTRATOR ENTRY POINT
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("auto-generate"),
|
||||
response_model=AutoGenerateProcurementResponse
|
||||
)
|
||||
async def auto_generate_procurement(
|
||||
request_data: AutoGenerateProcurementRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Auto-generate procurement plan from forecast data (called by Orchestrator)
|
||||
|
||||
This is the main entry point for orchestrated procurement planning.
|
||||
The Orchestrator calls Forecasting Service first, then passes forecast data here.
|
||||
|
||||
Flow:
|
||||
1. Receive forecast data from orchestrator
|
||||
2. Calculate procurement requirements
|
||||
3. Apply Recipe Explosion for locally-produced items
|
||||
4. Create procurement plan
|
||||
5. Optionally create and auto-approve purchase orders
|
||||
|
||||
Returns:
|
||||
AutoGenerateProcurementResponse with plan details and created POs
|
||||
"""
|
||||
try:
|
||||
logger.info("Auto-generate procurement endpoint called",
|
||||
tenant_id=tenant_id,
|
||||
has_forecast_data=bool(request_data.forecast_data))
|
||||
|
||||
result = await service.auto_generate_procurement(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
request=request_data
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in auto_generate_procurement endpoint", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# MANUAL PROCUREMENT PLAN GENERATION
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("plans"),
|
||||
response_model=GeneratePlanResponse
|
||||
)
|
||||
async def generate_procurement_plan(
|
||||
request_data: GeneratePlanRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""
|
||||
Generate a new procurement plan (manual/UI-driven)
|
||||
|
||||
This endpoint is used for manual procurement planning from the UI.
|
||||
Unlike auto_generate_procurement, this generates its own forecasts.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Plan generation parameters
|
||||
|
||||
Returns:
|
||||
GeneratePlanResponse with the created plan
|
||||
"""
|
||||
try:
|
||||
logger.info("Generate procurement plan endpoint called",
|
||||
tenant_id=tenant_id,
|
||||
plan_date=request_data.plan_date)
|
||||
|
||||
result = await service.generate_procurement_plan(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
request=request_data
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating procurement plan", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT PLAN CRUD
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("plans/current"),
|
||||
response_model=Optional[ProcurementPlanResponse]
|
||||
)
|
||||
async def get_current_plan(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get the current day's procurement plan"""
|
||||
try:
|
||||
plan = await service.get_current_plan(uuid.UUID(tenant_id))
|
||||
return plan
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting current plan", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("plans", "plan_id"),
|
||||
response_model=ProcurementPlanResponse
|
||||
)
|
||||
async def get_plan_by_id(
|
||||
plan_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement plan by ID"""
|
||||
try:
|
||||
plan = await service.get_plan_by_id(uuid.UUID(tenant_id), uuid.UUID(plan_id))
|
||||
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Plan not found")
|
||||
|
||||
return plan
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting plan by ID", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("plans/date/{plan_date}"),
|
||||
response_model=Optional[ProcurementPlanResponse]
|
||||
)
|
||||
async def get_plan_by_date(
|
||||
plan_date: date,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Get procurement plan for a specific date"""
|
||||
try:
|
||||
plan = await service.get_plan_by_date(uuid.UUID(tenant_id), plan_date)
|
||||
return plan
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting plan by date", error=str(e), tenant_id=tenant_id, plan_date=plan_date)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("plans"),
|
||||
response_model=PaginatedProcurementPlans
|
||||
)
|
||||
async def list_procurement_plans(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=50, ge=1, le=100),
|
||||
service: ProcurementService = Depends(get_procurement_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""List all procurement plans for tenant with pagination"""
|
||||
try:
|
||||
from app.repositories.procurement_plan_repository import ProcurementPlanRepository
|
||||
|
||||
repo = ProcurementPlanRepository(db)
|
||||
plans = await repo.list_plans(uuid.UUID(tenant_id), skip=skip, limit=limit)
|
||||
total = await repo.count_plans(uuid.UUID(tenant_id))
|
||||
|
||||
plans_response = [ProcurementPlanResponse.model_validate(p) for p in plans]
|
||||
|
||||
return PaginatedProcurementPlans(
|
||||
plans=plans_response,
|
||||
total=total,
|
||||
page=skip // limit + 1,
|
||||
limit=limit,
|
||||
has_more=(skip + limit) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing procurement plans", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_resource_action_route("plans", "plan_id", "status")
|
||||
)
|
||||
async def update_plan_status(
|
||||
plan_id: str,
|
||||
status: str = Query(..., regex="^(draft|pending_approval|approved|in_execution|completed|cancelled)$"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
notes: Optional[str] = None,
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""Update procurement plan status"""
|
||||
try:
|
||||
updated_plan = await service.update_plan_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
plan_id=uuid.UUID(plan_id),
|
||||
status=status,
|
||||
approval_notes=notes
|
||||
)
|
||||
|
||||
if not updated_plan:
|
||||
raise HTTPException(status_code=404, detail="Plan not found")
|
||||
|
||||
return updated_plan
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating plan status", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("plans", "plan_id", "create-purchase-orders")
|
||||
)
|
||||
async def create_purchase_orders_from_plan(
|
||||
plan_id: str,
|
||||
auto_approve: bool = Query(default=False, description="Auto-approve qualifying purchase orders"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service)
|
||||
):
|
||||
"""
|
||||
Create purchase orders from procurement plan requirements
|
||||
|
||||
Groups requirements by supplier and creates POs automatically.
|
||||
Optionally evaluates auto-approval rules for qualifying POs.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
plan_id: Procurement plan UUID
|
||||
auto_approve: Whether to auto-approve qualifying POs
|
||||
|
||||
Returns:
|
||||
Summary of created, approved, and failed purchase orders
|
||||
"""
|
||||
try:
|
||||
result = await service.create_purchase_orders_from_plan(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
plan_id=uuid.UUID(plan_id),
|
||||
auto_approve=auto_approve
|
||||
)
|
||||
|
||||
if not result.get('success'):
|
||||
raise HTTPException(status_code=400, detail=result.get('error', 'Failed to create purchase orders'))
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error creating POs from plan", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# TESTING AND UTILITIES
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("plans", "plan_id", "requirements")
|
||||
)
|
||||
async def get_plan_requirements(
|
||||
plan_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: ProcurementService = Depends(get_procurement_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all requirements for a procurement plan"""
|
||||
try:
|
||||
from app.repositories.procurement_plan_repository import ProcurementRequirementRepository
|
||||
|
||||
repo = ProcurementRequirementRepository(db)
|
||||
requirements = await repo.get_requirements_by_plan(uuid.UUID(plan_id))
|
||||
|
||||
return {
|
||||
"plan_id": plan_id,
|
||||
"requirements_count": len(requirements),
|
||||
"requirements": [
|
||||
{
|
||||
"id": str(req.id),
|
||||
"requirement_number": req.requirement_number,
|
||||
"product_name": req.product_name,
|
||||
"net_requirement": float(req.net_requirement),
|
||||
"unit_of_measure": req.unit_of_measure,
|
||||
"priority": req.priority,
|
||||
"status": req.status,
|
||||
"is_locally_produced": req.is_locally_produced,
|
||||
"bom_explosion_level": req.bom_explosion_level,
|
||||
"supplier_name": req.supplier_name,
|
||||
"estimated_total_cost": float(req.estimated_total_cost or 0)
|
||||
}
|
||||
for req in requirements
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting plan requirements", error=str(e), tenant_id=tenant_id, plan_id=plan_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
602
services/procurement/app/api/purchase_orders.py
Normal file
602
services/procurement/app/api/purchase_orders.py
Normal file
@@ -0,0 +1,602 @@
|
||||
# ================================================================
|
||||
# services/procurement/app/api/purchase_orders.py
|
||||
# ================================================================
|
||||
"""
|
||||
Purchase Orders API - Endpoints for purchase order management
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.services.purchase_order_service import PurchaseOrderService
|
||||
from app.services.overdue_po_detector import OverduePODetector
|
||||
from app.schemas.purchase_order_schemas import (
|
||||
PurchaseOrderCreate,
|
||||
PurchaseOrderUpdate,
|
||||
PurchaseOrderResponse,
|
||||
PurchaseOrderWithSupplierResponse,
|
||||
PurchaseOrderApproval,
|
||||
DeliveryCreate,
|
||||
DeliveryResponse,
|
||||
SupplierInvoiceCreate,
|
||||
SupplierInvoiceResponse,
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.redis_utils import get_value, set_with_ttl
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["purchase-orders"])
|
||||
|
||||
|
||||
def get_po_service(db: AsyncSession = Depends(get_db)) -> PurchaseOrderService:
|
||||
"""Dependency to get purchase order service"""
|
||||
return PurchaseOrderService(db, settings)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER CRUD
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("purchase-orders"),
|
||||
response_model=PurchaseOrderResponse,
|
||||
status_code=201
|
||||
)
|
||||
async def create_purchase_order(
|
||||
po_data: PurchaseOrderCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Create a new purchase order with items
|
||||
|
||||
Creates a PO with automatic approval rules evaluation.
|
||||
Links to procurement plan if procurement_plan_id is provided.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_data: Purchase order creation data
|
||||
|
||||
Returns:
|
||||
PurchaseOrderResponse with created PO details
|
||||
"""
|
||||
try:
|
||||
logger.info("Create PO endpoint called", tenant_id=tenant_id)
|
||||
|
||||
po = await service.create_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_data=po_data
|
||||
)
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating purchase order", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("purchase-orders", "po_id"),
|
||||
response_model=PurchaseOrderWithSupplierResponse
|
||||
)
|
||||
async def get_purchase_order(
|
||||
po_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""Get purchase order by ID with items"""
|
||||
try:
|
||||
po = await service.get_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id)
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderWithSupplierResponse.model_validate(po)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting purchase order", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("purchase-orders"),
|
||||
response_model=List[PurchaseOrderResponse]
|
||||
)
|
||||
async def list_purchase_orders(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=50, ge=1, le=100),
|
||||
supplier_id: Optional[str] = Query(default=None),
|
||||
status: Optional[str] = Query(default=None),
|
||||
enrich_supplier: bool = Query(default=True, description="Include supplier details (slower)"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
List purchase orders with filters and caching (30s TTL)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
skip: Number of records to skip (pagination)
|
||||
limit: Maximum number of records to return
|
||||
supplier_id: Filter by supplier ID (optional)
|
||||
status: Filter by status (optional)
|
||||
enrich_supplier: Whether to enrich with supplier data (default: True)
|
||||
|
||||
Returns:
|
||||
List of purchase orders
|
||||
"""
|
||||
try:
|
||||
# PERFORMANCE OPTIMIZATION: Cache even with status filter for dashboard queries
|
||||
# Only skip cache for supplier_id filter and pagination (skip > 0)
|
||||
cache_key = None
|
||||
if skip == 0 and supplier_id is None:
|
||||
cache_key = f"purchase_orders:{tenant_id}:limit:{limit}:status:{status}:enrich:{enrich_supplier}"
|
||||
try:
|
||||
cached_result = await get_value(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug("Cache hit for purchase orders", cache_key=cache_key, tenant_id=tenant_id, status=status)
|
||||
return [PurchaseOrderResponse(**po) for po in cached_result]
|
||||
except Exception as e:
|
||||
logger.warning("Cache read failed, continuing without cache", cache_key=cache_key, error=str(e))
|
||||
|
||||
# Cache miss - fetch from database
|
||||
pos = await service.list_purchase_orders(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
supplier_id=uuid.UUID(supplier_id) if supplier_id else None,
|
||||
status=status,
|
||||
enrich_supplier=enrich_supplier
|
||||
)
|
||||
|
||||
result = [PurchaseOrderResponse.model_validate(po) for po in pos]
|
||||
|
||||
# PERFORMANCE OPTIMIZATION: Cache the result (20s TTL for purchase orders)
|
||||
if cache_key:
|
||||
try:
|
||||
import json
|
||||
await set_with_ttl(cache_key, json.dumps([po.model_dump() for po in result]), ttl=20)
|
||||
logger.debug("Cached purchase orders", cache_key=cache_key, ttl=20, tenant_id=tenant_id, status=status)
|
||||
except Exception as e:
|
||||
logger.warning("Cache write failed, continuing without caching", cache_key=cache_key, error=str(e))
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing purchase orders", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_resource_detail_route("purchase-orders", "po_id"),
|
||||
response_model=PurchaseOrderResponse
|
||||
)
|
||||
async def update_purchase_order(
|
||||
po_id: str,
|
||||
po_data: PurchaseOrderUpdate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Update purchase order information
|
||||
|
||||
Only draft or pending_approval orders can be modified.
|
||||
Financial field changes trigger automatic total recalculation.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
po_data: Update data
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
po = await service.update_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
po_data=po_data
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating purchase order", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "status")
|
||||
)
|
||||
async def update_order_status(
|
||||
po_id: str,
|
||||
status: str = Query(..., description="New status"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
notes: Optional[str] = Query(default=None),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Update purchase order status
|
||||
|
||||
Validates status transitions to prevent invalid state changes.
|
||||
|
||||
Valid transitions:
|
||||
- draft -> pending_approval, approved, cancelled
|
||||
- pending_approval -> approved, rejected, cancelled
|
||||
- approved -> sent_to_supplier, cancelled
|
||||
- sent_to_supplier -> confirmed, cancelled
|
||||
- confirmed -> in_production, cancelled
|
||||
- in_production -> shipped, cancelled
|
||||
- shipped -> delivered, cancelled
|
||||
- delivered -> completed
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
status: New status
|
||||
notes: Optional status change notes
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
po = await service.update_order_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
status=status,
|
||||
notes=notes
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating PO status", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# APPROVAL WORKFLOW
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "approve"),
|
||||
response_model=PurchaseOrderResponse
|
||||
)
|
||||
async def approve_purchase_order(
|
||||
po_id: str,
|
||||
approval_data: PurchaseOrderApproval,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Approve or reject a purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
approval_data: Approval or rejection data
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
if approval_data.action == "approve":
|
||||
po = await service.approve_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
approved_by=approval_data.approved_by,
|
||||
approval_notes=approval_data.notes
|
||||
)
|
||||
elif approval_data.action == "reject":
|
||||
po = await service.reject_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
rejected_by=approval_data.approved_by,
|
||||
rejection_reason=approval_data.notes or "No reason provided"
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid action. Must be 'approve' or 'reject'")
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error in PO approval workflow", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "cancel"),
|
||||
response_model=PurchaseOrderResponse
|
||||
)
|
||||
async def cancel_purchase_order(
|
||||
po_id: str,
|
||||
reason: str = Query(..., description="Cancellation reason"),
|
||||
cancelled_by: Optional[str] = Query(default=None),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Cancel a purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
reason: Cancellation reason
|
||||
cancelled_by: User ID performing cancellation
|
||||
|
||||
Returns:
|
||||
Cancelled purchase order
|
||||
"""
|
||||
try:
|
||||
po = await service.cancel_purchase_order(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
po_id=uuid.UUID(po_id),
|
||||
cancelled_by=uuid.UUID(cancelled_by) if cancelled_by else None,
|
||||
cancellation_reason=reason
|
||||
)
|
||||
|
||||
if not po:
|
||||
raise HTTPException(status_code=404, detail="Purchase order not found")
|
||||
|
||||
return PurchaseOrderResponse.model_validate(po)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error cancelling purchase order", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# DELIVERY MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_nested_resource_route("purchase-orders", "po_id", "deliveries"),
|
||||
response_model=DeliveryResponse,
|
||||
status_code=201
|
||||
)
|
||||
async def create_delivery(
|
||||
po_id: str,
|
||||
delivery_data: DeliveryCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Create a delivery record for a purchase order
|
||||
|
||||
Tracks delivery scheduling, items, quality inspection, and receipt.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
delivery_data: Delivery creation data
|
||||
|
||||
Returns:
|
||||
DeliveryResponse with created delivery details
|
||||
"""
|
||||
try:
|
||||
# Validate PO ID matches
|
||||
if str(delivery_data.purchase_order_id) != po_id:
|
||||
raise ValueError("Purchase order ID mismatch")
|
||||
|
||||
delivery = await service.create_delivery(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
delivery_data=delivery_data,
|
||||
created_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
return DeliveryResponse.model_validate(delivery)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating delivery", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch(
|
||||
route_builder.build_nested_resource_route("purchase-orders", "po_id", "deliveries") + "/{delivery_id}/status"
|
||||
)
|
||||
async def update_delivery_status(
|
||||
po_id: str,
|
||||
delivery_id: str,
|
||||
status: str = Query(..., description="New delivery status"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Update delivery status
|
||||
|
||||
Valid statuses: scheduled, in_transit, delivered, completed, cancelled
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
delivery_id: Delivery UUID
|
||||
status: New status
|
||||
|
||||
Returns:
|
||||
Updated delivery
|
||||
"""
|
||||
try:
|
||||
delivery = await service.update_delivery_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
delivery_id=uuid.UUID(delivery_id),
|
||||
status=status,
|
||||
updated_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
if not delivery:
|
||||
raise HTTPException(status_code=404, detail="Delivery not found")
|
||||
|
||||
return DeliveryResponse.model_validate(delivery)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating delivery status", error=str(e), delivery_id=delivery_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INVOICE MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_nested_resource_route("purchase-orders", "po_id", "invoices"),
|
||||
response_model=SupplierInvoiceResponse,
|
||||
status_code=201
|
||||
)
|
||||
async def create_invoice(
|
||||
po_id: str,
|
||||
invoice_data: SupplierInvoiceCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
Create a supplier invoice for a purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
invoice_data: Invoice creation data
|
||||
|
||||
Returns:
|
||||
SupplierInvoiceResponse with created invoice details
|
||||
"""
|
||||
try:
|
||||
# Validate PO ID matches
|
||||
if str(invoice_data.purchase_order_id) != po_id:
|
||||
raise ValueError("Purchase order ID mismatch")
|
||||
|
||||
invoice = await service.create_invoice(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
invoice_data=invoice_data,
|
||||
created_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
return SupplierInvoiceResponse.model_validate(invoice)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating invoice", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# OVERDUE PO DETECTION
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("purchase-orders/overdue"),
|
||||
response_model=List[dict]
|
||||
)
|
||||
async def get_overdue_purchase_orders(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(10, ge=1, le=100, description="Max results")
|
||||
):
|
||||
"""
|
||||
Get overdue purchase orders for dashboard display.
|
||||
|
||||
Returns POs that are past their estimated delivery date
|
||||
but not yet marked as delivered.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
limit: Maximum number of results (default: 10)
|
||||
|
||||
Returns:
|
||||
List of overdue PO summaries with severity and days overdue
|
||||
"""
|
||||
try:
|
||||
detector = OverduePODetector()
|
||||
overdue_pos = await detector.get_overdue_pos_for_dashboard(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return overdue_pos
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting overdue POs", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "overdue-status"),
|
||||
response_model=dict
|
||||
)
|
||||
async def check_po_overdue_status(
|
||||
po_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID")
|
||||
):
|
||||
"""
|
||||
Check if a specific PO is overdue.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
|
||||
Returns:
|
||||
Overdue status info or null if not overdue
|
||||
"""
|
||||
try:
|
||||
detector = OverduePODetector()
|
||||
overdue_info = await detector.check_single_po_overdue(
|
||||
po_id=uuid.UUID(po_id),
|
||||
tenant_id=uuid.UUID(tenant_id)
|
||||
)
|
||||
|
||||
if overdue_info:
|
||||
return overdue_info
|
||||
else:
|
||||
return {"overdue": False}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking PO overdue status", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
463
services/procurement/app/api/replenishment.py
Normal file
463
services/procurement/app/api/replenishment.py
Normal file
@@ -0,0 +1,463 @@
|
||||
"""
|
||||
Replenishment Planning API Routes
|
||||
|
||||
Provides endpoints for advanced replenishment planning including:
|
||||
- Generate replenishment plans
|
||||
- View inventory projections
|
||||
- Review supplier allocations
|
||||
- Get planning analytics
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from datetime import date
|
||||
|
||||
from app.schemas.replenishment import (
|
||||
GenerateReplenishmentPlanRequest,
|
||||
GenerateReplenishmentPlanResponse,
|
||||
ReplenishmentPlanResponse,
|
||||
ReplenishmentPlanSummary,
|
||||
InventoryProjectionResponse,
|
||||
SupplierAllocationResponse,
|
||||
SupplierSelectionRequest,
|
||||
SupplierSelectionResult,
|
||||
SafetyStockRequest,
|
||||
SafetyStockResponse,
|
||||
ProjectInventoryRequest,
|
||||
ProjectInventoryResponse,
|
||||
ReplenishmentAnalytics,
|
||||
MOQAggregationRequest,
|
||||
MOQAggregationResponse
|
||||
)
|
||||
from app.services.procurement_service import ProcurementService
|
||||
from app.services.replenishment_planning_service import ReplenishmentPlanningService
|
||||
from app.services.safety_stock_calculator import SafetyStockCalculator
|
||||
from app.services.inventory_projector import InventoryProjector, DailyDemand, ScheduledReceipt
|
||||
from app.services.moq_aggregator import MOQAggregator
|
||||
from app.services.supplier_selector import SupplierSelector
|
||||
from app.core.dependencies import get_db, get_current_tenant_id
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from shared.routing import RouteBuilder
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["replenishment-planning"])
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Replenishment Plan Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/generate"),
|
||||
response_model=GenerateReplenishmentPlanResponse
|
||||
)
|
||||
async def generate_replenishment_plan(
|
||||
request: GenerateReplenishmentPlanRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Generate advanced replenishment plan with:
|
||||
- Lead-time-aware order date calculation
|
||||
- Dynamic safety stock
|
||||
- Inventory projection
|
||||
- Shelf-life management
|
||||
"""
|
||||
try:
|
||||
logger.info("Generating replenishment plan", tenant_id=tenant_id)
|
||||
|
||||
# Initialize replenishment planner
|
||||
planner = ReplenishmentPlanningService(
|
||||
projection_horizon_days=request.projection_horizon_days,
|
||||
default_service_level=request.service_level,
|
||||
default_buffer_days=request.buffer_days
|
||||
)
|
||||
|
||||
# Generate plan
|
||||
plan = await planner.generate_replenishment_plan(
|
||||
tenant_id=str(tenant_id),
|
||||
requirements=request.requirements,
|
||||
forecast_id=request.forecast_id,
|
||||
production_schedule_id=request.production_schedule_id
|
||||
)
|
||||
|
||||
# Export to response
|
||||
plan_dict = planner.export_plan_to_dict(plan)
|
||||
|
||||
return GenerateReplenishmentPlanResponse(**plan_dict)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate replenishment plan",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("replenishment-plans"),
|
||||
response_model=List[ReplenishmentPlanSummary]
|
||||
)
|
||||
async def list_replenishment_plans(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
status: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List replenishment plans for tenant
|
||||
"""
|
||||
try:
|
||||
# Query from database (implementation depends on your repo)
|
||||
# This is a placeholder - implement based on your repository
|
||||
from app.repositories.replenishment_repository import ReplenishmentPlanRepository
|
||||
|
||||
repo = ReplenishmentPlanRepository(db)
|
||||
plans = await repo.list_plans(
|
||||
tenant_id=tenant_id,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
status=status
|
||||
)
|
||||
|
||||
return plans
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list replenishment plans",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("replenishment-plans", "plan_id"),
|
||||
response_model=ReplenishmentPlanResponse
|
||||
)
|
||||
async def get_replenishment_plan(
|
||||
plan_id: UUID = Path(...),
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get replenishment plan by ID
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import ReplenishmentPlanRepository
|
||||
|
||||
repo = ReplenishmentPlanRepository(db)
|
||||
plan = await repo.get_plan_by_id(plan_id, tenant_id)
|
||||
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Replenishment plan not found")
|
||||
|
||||
return plan
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment plan",
|
||||
tenant_id=tenant_id, plan_id=plan_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Inventory Projection Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/inventory-projections/project"),
|
||||
response_model=ProjectInventoryResponse
|
||||
)
|
||||
async def project_inventory(
|
||||
request: ProjectInventoryRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Project inventory levels to identify future stockouts
|
||||
"""
|
||||
try:
|
||||
logger.info("Projecting inventory", tenant_id=tenant_id,
|
||||
ingredient_id=request.ingredient_id)
|
||||
|
||||
projector = InventoryProjector(request.projection_horizon_days)
|
||||
|
||||
# Build daily demand objects
|
||||
daily_demand = [
|
||||
DailyDemand(
|
||||
ingredient_id=request.ingredient_id,
|
||||
date=d['date'],
|
||||
quantity=d['quantity']
|
||||
)
|
||||
for d in request.daily_demand
|
||||
]
|
||||
|
||||
# Build scheduled receipts
|
||||
scheduled_receipts = [
|
||||
ScheduledReceipt(
|
||||
ingredient_id=request.ingredient_id,
|
||||
date=r['date'],
|
||||
quantity=r['quantity'],
|
||||
source=r.get('source', 'purchase_order'),
|
||||
reference_id=r.get('reference_id')
|
||||
)
|
||||
for r in request.scheduled_receipts
|
||||
]
|
||||
|
||||
# Project inventory
|
||||
projection = projector.project_inventory(
|
||||
ingredient_id=request.ingredient_id,
|
||||
ingredient_name=request.ingredient_name,
|
||||
current_stock=request.current_stock,
|
||||
unit_of_measure=request.unit_of_measure,
|
||||
daily_demand=daily_demand,
|
||||
scheduled_receipts=scheduled_receipts
|
||||
)
|
||||
|
||||
# Export to response
|
||||
projection_dict = projector.export_projection_to_dict(projection)
|
||||
|
||||
return ProjectInventoryResponse(**projection_dict)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to project inventory",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("replenishment-plans/inventory-projections"),
|
||||
response_model=List[InventoryProjectionResponse]
|
||||
)
|
||||
async def list_inventory_projections(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
projection_date: Optional[date] = None,
|
||||
stockout_only: bool = False,
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List inventory projections
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import InventoryProjectionRepository
|
||||
|
||||
repo = InventoryProjectionRepository(db)
|
||||
projections = await repo.list_projections(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
projection_date=projection_date,
|
||||
stockout_only=stockout_only,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return projections
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list inventory projections",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Safety Stock Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/safety-stock/calculate"),
|
||||
response_model=SafetyStockResponse
|
||||
)
|
||||
async def calculate_safety_stock(
|
||||
request: SafetyStockRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Calculate dynamic safety stock using statistical methods
|
||||
"""
|
||||
try:
|
||||
logger.info("Calculating safety stock", tenant_id=tenant_id,
|
||||
ingredient_id=request.ingredient_id)
|
||||
|
||||
calculator = SafetyStockCalculator(request.service_level)
|
||||
|
||||
result = calculator.calculate_from_demand_history(
|
||||
daily_demands=request.daily_demands,
|
||||
lead_time_days=request.lead_time_days,
|
||||
service_level=request.service_level
|
||||
)
|
||||
|
||||
return SafetyStockResponse(**calculator.export_to_dict(result))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate safety stock",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Supplier Selection Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/supplier-selections/evaluate"),
|
||||
response_model=SupplierSelectionResult
|
||||
)
|
||||
async def evaluate_supplier_selection(
|
||||
request: SupplierSelectionRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Evaluate supplier options using multi-criteria decision analysis
|
||||
"""
|
||||
try:
|
||||
logger.info("Evaluating supplier selection", tenant_id=tenant_id,
|
||||
ingredient_id=request.ingredient_id)
|
||||
|
||||
selector = SupplierSelector()
|
||||
|
||||
# Convert supplier options
|
||||
from app.services.supplier_selector import SupplierOption
|
||||
supplier_options = [
|
||||
SupplierOption(**opt) for opt in request.supplier_options
|
||||
]
|
||||
|
||||
result = selector.select_suppliers(
|
||||
ingredient_id=request.ingredient_id,
|
||||
ingredient_name=request.ingredient_name,
|
||||
required_quantity=request.required_quantity,
|
||||
supplier_options=supplier_options
|
||||
)
|
||||
|
||||
return SupplierSelectionResult(**selector.export_result_to_dict(result))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to evaluate supplier selection",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("replenishment-plans/supplier-allocations"),
|
||||
response_model=List[SupplierAllocationResponse]
|
||||
)
|
||||
async def list_supplier_allocations(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
requirement_id: Optional[UUID] = None,
|
||||
supplier_id: Optional[UUID] = None,
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List supplier allocations
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import SupplierAllocationRepository
|
||||
|
||||
repo = SupplierAllocationRepository(db)
|
||||
allocations = await repo.list_allocations(
|
||||
tenant_id=tenant_id,
|
||||
requirement_id=requirement_id,
|
||||
supplier_id=supplier_id,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return allocations
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list supplier allocations",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# MOQ Aggregation Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("replenishment-plans/moq-aggregation/aggregate"),
|
||||
response_model=MOQAggregationResponse
|
||||
)
|
||||
async def aggregate_for_moq(
|
||||
request: MOQAggregationRequest,
|
||||
tenant_id: UUID = Depends(get_current_tenant_id)
|
||||
):
|
||||
"""
|
||||
Aggregate requirements to meet Minimum Order Quantities
|
||||
"""
|
||||
try:
|
||||
logger.info("Aggregating requirements for MOQ", tenant_id=tenant_id)
|
||||
|
||||
aggregator = MOQAggregator()
|
||||
|
||||
# Convert requirements and constraints
|
||||
from app.services.moq_aggregator import (
|
||||
ProcurementRequirement as MOQReq,
|
||||
SupplierConstraints
|
||||
)
|
||||
|
||||
requirements = [MOQReq(**req) for req in request.requirements]
|
||||
constraints = {
|
||||
k: SupplierConstraints(**v)
|
||||
for k, v in request.supplier_constraints.items()
|
||||
}
|
||||
|
||||
# Aggregate
|
||||
aggregated_orders = aggregator.aggregate_requirements(
|
||||
requirements=requirements,
|
||||
supplier_constraints=constraints
|
||||
)
|
||||
|
||||
# Calculate efficiency
|
||||
efficiency = aggregator.calculate_order_efficiency(aggregated_orders)
|
||||
|
||||
return MOQAggregationResponse(
|
||||
aggregated_orders=[aggregator.export_to_dict(order) for order in aggregated_orders],
|
||||
efficiency_metrics=efficiency
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to aggregate for MOQ",
|
||||
tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Analytics Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("replenishment-plans"),
|
||||
response_model=ReplenishmentAnalytics
|
||||
)
|
||||
async def get_replenishment_analytics(
|
||||
tenant_id: UUID = Depends(get_current_tenant_id),
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get replenishment planning analytics
|
||||
"""
|
||||
try:
|
||||
from app.repositories.replenishment_repository import ReplenishmentAnalyticsRepository
|
||||
|
||||
repo = ReplenishmentAnalyticsRepository(db)
|
||||
analytics = await repo.get_analytics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment analytics",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
Reference in New Issue
Block a user