New alert service
This commit is contained in:
190
services/procurement/app/api/expected_deliveries.py
Normal file
190
services/procurement/app/api/expected_deliveries.py
Normal file
@@ -0,0 +1,190 @@
|
||||
"""
|
||||
Expected Deliveries API for Procurement Service
|
||||
Public endpoint for expected delivery tracking
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('procurement')
|
||||
router = APIRouter(tags=["expected-deliveries"])
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("expected-deliveries")
|
||||
)
|
||||
async def get_expected_deliveries(
|
||||
tenant_id: str,
|
||||
days_ahead: int = Query(1, description="Number of days to look ahead", ge=0, le=30),
|
||||
include_overdue: bool = Query(True, description="Include overdue deliveries"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get expected deliveries for delivery tracking system.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to query
|
||||
days_ahead: Number of days to look ahead (default 1 = today + tomorrow)
|
||||
include_overdue: Include deliveries past expected date (default True)
|
||||
|
||||
Returns:
|
||||
{
|
||||
"deliveries": [
|
||||
{
|
||||
"po_id": "uuid",
|
||||
"po_number": "PO-2025-123",
|
||||
"supplier_id": "uuid",
|
||||
"supplier_name": "Molinos San José",
|
||||
"supplier_phone": "+34 915 234 567",
|
||||
"expected_delivery_date": "2025-12-02T10:00:00Z",
|
||||
"delivery_window_hours": 4,
|
||||
"status": "sent_to_supplier",
|
||||
"line_items": [...],
|
||||
"total_amount": 540.00,
|
||||
"currency": "EUR"
|
||||
}
|
||||
],
|
||||
"total_count": 8
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse tenant_id
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
logger.info(
|
||||
"Fetching expected deliveries",
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue
|
||||
)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_uuid,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
# Include any delivery from past until end_date
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
else:
|
||||
# Only future deliveries within range
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
# Format deliveries for response
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Get supplier info from supplier service (for now, use supplier_id)
|
||||
# In production, you'd fetch from supplier service or join if same DB
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Try to get supplier details from notes or metadata
|
||||
# This is a simplified approach - in production you'd query supplier service
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
elif "Suministros Hostelería" in po.notes:
|
||||
supplier_name = "Suministros Hostelería"
|
||||
supplier_phone = "+34 911 234 567"
|
||||
elif "Miel Artesana" in po.notes:
|
||||
supplier_name = "Miel Artesana"
|
||||
supplier_phone = "+34 918 765 432"
|
||||
|
||||
# Format line items (limit to first 5)
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
# Default delivery window is 4 hours
|
||||
delivery_window_hours = 4
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat(),
|
||||
"delivery_window_hours": delivery_window_hours,
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries retrieved",
|
||||
tenant_id=tenant_id,
|
||||
count=len(deliveries)
|
||||
)
|
||||
|
||||
return {
|
||||
"deliveries": deliveries,
|
||||
"total_count": len(deliveries)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
197
services/procurement/app/api/internal_delivery.py
Normal file
197
services/procurement/app/api/internal_delivery.py
Normal file
@@ -0,0 +1,197 @@
|
||||
"""
|
||||
Internal Delivery Tracking API for Procurement Service
|
||||
Service-to-service endpoint for expected delivery tracking by orchestrator
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal", tags=["internal"])
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
logger.warning("Unauthorized internal API access attempted")
|
||||
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/expected-deliveries")
|
||||
async def get_expected_deliveries(
|
||||
tenant_id: str = Query(..., description="Tenant UUID"),
|
||||
days_ahead: int = Query(1, description="Number of days to look ahead", ge=0, le=30),
|
||||
include_overdue: bool = Query(True, description="Include overdue deliveries"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Get expected deliveries for delivery tracking system.
|
||||
|
||||
Called by orchestrator's DeliveryTrackingService to monitor upcoming deliveries
|
||||
and generate delivery alerts (arriving_soon, overdue, receipt_incomplete).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to query
|
||||
days_ahead: Number of days to look ahead (default 1 = today + tomorrow)
|
||||
include_overdue: Include deliveries past expected date (default True)
|
||||
|
||||
Returns:
|
||||
{
|
||||
"deliveries": [
|
||||
{
|
||||
"po_id": "uuid",
|
||||
"po_number": "PO-2025-123",
|
||||
"supplier_id": "uuid",
|
||||
"supplier_name": "Molinos San José",
|
||||
"supplier_phone": "+34 915 234 567",
|
||||
"expected_delivery_date": "2025-12-02T10:00:00Z",
|
||||
"delivery_window_hours": 4,
|
||||
"status": "sent_to_supplier",
|
||||
"line_items": [...],
|
||||
"total_amount": 540.00,
|
||||
"currency": "EUR"
|
||||
}
|
||||
],
|
||||
"total_count": 8
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse tenant_id
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
logger.info(
|
||||
"Fetching expected deliveries",
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue
|
||||
)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_uuid,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
# Include any delivery from past until end_date
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
else:
|
||||
# Only future deliveries within range
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
# Format deliveries for response
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Get supplier info from supplier service (for now, use supplier_id)
|
||||
# In production, you'd fetch from supplier service or join if same DB
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Try to get supplier details from notes or metadata
|
||||
# This is a simplified approach - in production you'd query supplier service
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
elif "Suministros Hostelería" in po.notes:
|
||||
supplier_name = "Suministros Hostelería"
|
||||
supplier_phone = "+34 911 234 567"
|
||||
elif "Miel Artesana" in po.notes:
|
||||
supplier_name = "Miel Artesana"
|
||||
supplier_phone = "+34 918 765 432"
|
||||
|
||||
# Format line items (limit to first 5)
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
# Default delivery window is 4 hours
|
||||
delivery_window_hours = 4
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat(),
|
||||
"delivery_window_hours": delivery_window_hours,
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries retrieved",
|
||||
tenant_id=tenant_id,
|
||||
count=len(deliveries)
|
||||
)
|
||||
|
||||
return {
|
||||
"deliveries": deliveries,
|
||||
"total_count": len(deliveries)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
98
services/procurement/app/api/internal_delivery_tracking.py
Normal file
98
services/procurement/app/api/internal_delivery_tracking.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""
|
||||
Internal API for triggering delivery tracking alerts.
|
||||
Used by demo session cloning to generate realistic late delivery alerts.
|
||||
|
||||
Moved from orchestrator service to procurement service (domain ownership).
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/api/internal/delivery-tracking/trigger/{tenant_id}")
|
||||
async def trigger_delivery_tracking(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check deliveries for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger delivery tracking for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after POs are seeded
|
||||
to generate realistic delivery alerts (arriving soon, overdue, etc.).
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID to check deliveries for
|
||||
request: FastAPI request object
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": true,
|
||||
"tenant_id": "uuid",
|
||||
"alerts_generated": 3,
|
||||
"breakdown": {
|
||||
"arriving_soon": 1,
|
||||
"overdue": 1,
|
||||
"receipt_incomplete": 1
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get delivery tracking service from app state
|
||||
delivery_tracking_service = getattr(request.app.state, 'delivery_tracking_service', None)
|
||||
|
||||
if not delivery_tracking_service:
|
||||
logger.error("Delivery tracking service not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Delivery tracking service not available"
|
||||
)
|
||||
|
||||
# Trigger delivery tracking for this tenant
|
||||
logger.info("Triggering delivery tracking", tenant_id=str(tenant_id))
|
||||
result = await delivery_tracking_service.check_expected_deliveries(tenant_id)
|
||||
|
||||
logger.info(
|
||||
"Delivery tracking completed",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("total_alerts", 0)
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"tenant_id": str(tenant_id),
|
||||
"alerts_generated": result.get("total_alerts", 0),
|
||||
"breakdown": {
|
||||
"arriving_soon": result.get("arriving_soon", 0),
|
||||
"overdue": result.get("overdue", 0),
|
||||
"receipt_incomplete": result.get("receipt_incomplete", 0)
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering delivery tracking",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger delivery tracking: {str(e)}"
|
||||
)
|
||||
@@ -17,7 +17,12 @@ from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
|
||||
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
|
||||
from sqlalchemy.orm import selectinload
|
||||
from shared.schemas.reasoning_types import (
|
||||
create_po_reasoning_low_stock,
|
||||
create_po_reasoning_supplier_contract
|
||||
)
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -265,17 +270,16 @@ async def clone_demo_data(
|
||||
# Generate a system user UUID for audit fields (demo purposes)
|
||||
system_user_id = uuid.uuid4()
|
||||
|
||||
# For demo sessions: 30-40% of POs should have delivery scheduled for TODAY
|
||||
# For demo sessions: Adjust expected_delivery_date if it exists
|
||||
# This ensures the ExecutionProgressTracker shows realistic delivery data
|
||||
import random
|
||||
expected_delivery = None
|
||||
if order.status in ['approved', 'sent_to_supplier'] and random.random() < 0.35:
|
||||
# Set delivery for today at various times (8am-6pm)
|
||||
hours_offset = random.randint(8, 18)
|
||||
minutes_offset = random.choice([0, 15, 30, 45])
|
||||
expected_delivery = session_time.replace(hour=hours_offset, minute=minutes_offset, second=0, microsecond=0)
|
||||
else:
|
||||
# Use the adjusted estimated delivery date
|
||||
if hasattr(order, 'expected_delivery_date') and order.expected_delivery_date:
|
||||
# Adjust the existing expected_delivery_date to demo session time
|
||||
expected_delivery = adjust_date_for_demo(
|
||||
order.expected_delivery_date, session_time, BASE_REFERENCE_DATE
|
||||
)
|
||||
elif order.status in ['approved', 'sent_to_supplier', 'confirmed']:
|
||||
# If no expected_delivery_date but order is in delivery status, use estimated_delivery_date
|
||||
expected_delivery = adjusted_estimated_delivery
|
||||
|
||||
# Create new PurchaseOrder - add expected_delivery_date only if column exists (after migration)
|
||||
@@ -433,13 +437,63 @@ async def clone_demo_data(
|
||||
|
||||
total_records = sum(stats.values())
|
||||
|
||||
# FIX DELIVERY ALERT TIMING - Adjust specific POs to guarantee delivery alerts
|
||||
# After cloning, some POs need their expected_delivery_date adjusted relative to session time
|
||||
# to ensure they trigger delivery tracking alerts (arriving soon, overdue, etc.)
|
||||
logger.info("Adjusting delivery PO dates for guaranteed alert triggering")
|
||||
|
||||
# Query for sent_to_supplier POs that have expected_delivery_date
|
||||
result = await db.execute(
|
||||
select(PurchaseOrder)
|
||||
.where(
|
||||
PurchaseOrder.tenant_id == virtual_uuid,
|
||||
PurchaseOrder.status == 'sent_to_supplier',
|
||||
PurchaseOrder.expected_delivery_date.isnot(None)
|
||||
)
|
||||
.limit(5) # Adjust first 5 POs with delivery dates
|
||||
)
|
||||
delivery_pos = result.scalars().all()
|
||||
|
||||
if len(delivery_pos) >= 2:
|
||||
# PO 1: Set to OVERDUE (5 hours ago) - will trigger overdue alert
|
||||
delivery_pos[0].expected_delivery_date = session_time - timedelta(hours=5)
|
||||
delivery_pos[0].required_delivery_date = session_time - timedelta(hours=5)
|
||||
delivery_pos[0].notes = "🔴 OVERDUE: Expected delivery was 5 hours ago - Contact supplier immediately"
|
||||
logger.info(f"Set PO {delivery_pos[0].po_number} to overdue (5 hours ago)")
|
||||
|
||||
# PO 2: Set to ARRIVING SOON (1 hour from now) - will trigger arriving soon alert
|
||||
delivery_pos[1].expected_delivery_date = session_time + timedelta(hours=1)
|
||||
delivery_pos[1].required_delivery_date = session_time + timedelta(hours=1)
|
||||
delivery_pos[1].notes = "📦 ARRIVING SOON: Delivery expected in 1 hour - Prepare for stock receipt"
|
||||
logger.info(f"Set PO {delivery_pos[1].po_number} to arriving soon (1 hour)")
|
||||
|
||||
if len(delivery_pos) >= 4:
|
||||
# PO 3: Set to TODAY AFTERNOON (6 hours from now) - visible in dashboard
|
||||
delivery_pos[2].expected_delivery_date = session_time + timedelta(hours=6)
|
||||
delivery_pos[2].required_delivery_date = session_time + timedelta(hours=6)
|
||||
delivery_pos[2].notes = "📅 TODAY: Delivery scheduled for this afternoon"
|
||||
logger.info(f"Set PO {delivery_pos[2].po_number} to today afternoon (6 hours)")
|
||||
|
||||
# PO 4: Set to TOMORROW MORNING (18 hours from now)
|
||||
delivery_pos[3].expected_delivery_date = session_time + timedelta(hours=18)
|
||||
delivery_pos[3].required_delivery_date = session_time + timedelta(hours=18)
|
||||
delivery_pos[3].notes = "📅 TOMORROW: Morning delivery scheduled"
|
||||
logger.info(f"Set PO {delivery_pos[3].po_number} to tomorrow morning (18 hours)")
|
||||
|
||||
# Commit the adjusted delivery dates
|
||||
await db.commit()
|
||||
logger.info(f"Adjusted {len(delivery_pos)} POs for delivery alert triggering")
|
||||
|
||||
|
||||
# EMIT ALERTS FOR PENDING APPROVAL POs
|
||||
# After cloning, emit PO approval alerts for any pending_approval POs
|
||||
# This ensures the action queue is populated when the demo session starts
|
||||
pending_pos_for_alerts = []
|
||||
for order_id in order_id_map.values():
|
||||
result = await db.execute(
|
||||
select(PurchaseOrder).where(
|
||||
select(PurchaseOrder)
|
||||
.options(selectinload(PurchaseOrder.items))
|
||||
.where(
|
||||
PurchaseOrder.id == order_id,
|
||||
PurchaseOrder.status == 'pending_approval'
|
||||
)
|
||||
@@ -454,12 +508,13 @@ async def clone_demo_data(
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
# Initialize RabbitMQ client for alert emission
|
||||
# Initialize RabbitMQ client for alert emission using UnifiedEventPublisher
|
||||
alerts_emitted = 0
|
||||
if pending_pos_for_alerts:
|
||||
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement")
|
||||
try:
|
||||
await rabbitmq_client.connect()
|
||||
event_publisher = UnifiedEventPublisher(rabbitmq_client, "procurement")
|
||||
|
||||
for po in pending_pos_for_alerts:
|
||||
try:
|
||||
@@ -475,42 +530,77 @@ async def clone_demo_data(
|
||||
|
||||
hours_until = (deadline - now_utc).total_seconds() / 3600
|
||||
|
||||
# Prepare alert payload
|
||||
alert_data = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'tenant_id': str(virtual_uuid),
|
||||
'service': 'procurement',
|
||||
'type': 'po_approval_needed',
|
||||
'alert_type': 'po_approval_needed',
|
||||
'type_class': 'action_needed',
|
||||
'severity': 'high' if po.priority == 'critical' else 'medium',
|
||||
'title': f'Purchase Order #{po.po_number} requires approval',
|
||||
'message': f'Purchase order totaling {po.currency} {po.total_amount:.2f} is pending approval.',
|
||||
'timestamp': now_utc.isoformat(),
|
||||
'metadata': {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'supplier_name': f'Supplier-{po.supplier_id}', # Simplified for demo
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'created_at': po.created_at.isoformat(),
|
||||
'financial_impact': float(po.total_amount),
|
||||
'deadline': deadline.isoformat(),
|
||||
'hours_until_consequence': int(hours_until),
|
||||
'reasoning_data': po.reasoning_data if po.reasoning_data else None, # Include orchestrator reasoning
|
||||
},
|
||||
'actions': ['approve_po', 'reject_po', 'modify_po'],
|
||||
'item_type': 'alert'
|
||||
# Check for reasoning data and generate if missing
|
||||
reasoning_data = po.reasoning_data
|
||||
|
||||
if not reasoning_data:
|
||||
try:
|
||||
# Generate synthetic reasoning data for demo purposes
|
||||
product_names = [item.product_name for item in po.items] if po.items else ["Assorted Bakery Supplies"]
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}" # Fallback name
|
||||
|
||||
# Create realistic looking reasoning based on PO data
|
||||
reasoning_data = create_po_reasoning_low_stock(
|
||||
supplier_name=supplier_name,
|
||||
product_names=product_names,
|
||||
current_stock=15.5, # Simulated
|
||||
required_stock=100.0, # Simulated
|
||||
days_until_stockout=2, # Simulated urgent
|
||||
threshold_percentage=20,
|
||||
affected_products=product_names[:2],
|
||||
estimated_lost_orders=12
|
||||
)
|
||||
logger.info("Generated synthetic reasoning data for demo alert", po_id=str(po.id))
|
||||
except Exception as e:
|
||||
logger.warning("Failed to generate synthetic reasoning data, using ultimate fallback", error=str(e))
|
||||
# Ultimate fallback: Create minimal valid reasoning data structure
|
||||
reasoning_data = {
|
||||
"type": "low_stock_detection",
|
||||
"parameters": {
|
||||
"supplier_name": supplier_name,
|
||||
"product_names": ["Assorted Bakery Supplies"],
|
||||
"product_count": 1,
|
||||
"current_stock": 10.0,
|
||||
"required_stock": 50.0,
|
||||
"days_until_stockout": 2
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "medium",
|
||||
"impact_days": 2
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "demo_fallback",
|
||||
"ai_assisted": False
|
||||
}
|
||||
}
|
||||
logger.info("Used ultimate fallback reasoning_data structure", po_id=str(po.id))
|
||||
|
||||
# Prepare metadata for the alert
|
||||
severity = 'high' if po.priority == 'critical' else 'medium'
|
||||
metadata = {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'supplier_name': f'Supplier-{po.supplier_id}', # Simplified for demo
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'severity': severity,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'created_at': po.created_at.isoformat(),
|
||||
'financial_impact': float(po.total_amount),
|
||||
'deadline': deadline.isoformat(),
|
||||
'hours_until_consequence': int(hours_until),
|
||||
'reasoning_data': reasoning_data, # For enrichment service
|
||||
}
|
||||
|
||||
# Publish to RabbitMQ
|
||||
success = await rabbitmq_client.publish_event(
|
||||
exchange_name='alerts.exchange',
|
||||
routing_key=f'alert.{alert_data["severity"]}.procurement',
|
||||
event_data=alert_data
|
||||
# Use UnifiedEventPublisher.publish_alert() which handles MinimalEvent format automatically
|
||||
success = await event_publisher.publish_alert(
|
||||
event_type='supply_chain.po_approval_needed', # domain.event_type format
|
||||
tenant_id=virtual_uuid,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
@@ -525,7 +615,8 @@ async def clone_demo_data(
|
||||
logger.error(
|
||||
"Failed to emit PO approval alert during cloning",
|
||||
po_id=str(po.id),
|
||||
error=str(e)
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Continue with other POs
|
||||
continue
|
||||
|
||||
@@ -27,6 +27,7 @@ from app.schemas.purchase_order_schemas import (
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from app.utils.cache import get_cached, set_cached, make_cache_key
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -123,10 +124,11 @@ async def list_purchase_orders(
|
||||
limit: int = Query(default=50, ge=1, le=100),
|
||||
supplier_id: Optional[str] = Query(default=None),
|
||||
status: Optional[str] = Query(default=None),
|
||||
enrich_supplier: bool = Query(default=True, description="Include supplier details (slower)"),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
List purchase orders with filters
|
||||
List purchase orders with filters and caching (30s TTL)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
@@ -134,20 +136,46 @@ async def list_purchase_orders(
|
||||
limit: Maximum number of records to return
|
||||
supplier_id: Filter by supplier ID (optional)
|
||||
status: Filter by status (optional)
|
||||
enrich_supplier: Whether to enrich with supplier data (default: True)
|
||||
|
||||
Returns:
|
||||
List of purchase orders
|
||||
"""
|
||||
try:
|
||||
# PERFORMANCE OPTIMIZATION: Cache even with status filter for dashboard queries
|
||||
# Only skip cache for supplier_id filter and pagination (skip > 0)
|
||||
cache_key = None
|
||||
if skip == 0 and supplier_id is None:
|
||||
cache_key = make_cache_key(
|
||||
"purchase_orders",
|
||||
tenant_id,
|
||||
limit=limit,
|
||||
status=status, # Include status in cache key
|
||||
enrich_supplier=enrich_supplier
|
||||
)
|
||||
cached_result = await get_cached(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug("Cache hit for purchase orders", cache_key=cache_key, tenant_id=tenant_id, status=status)
|
||||
return [PurchaseOrderResponse(**po) for po in cached_result]
|
||||
|
||||
# Cache miss - fetch from database
|
||||
pos = await service.list_purchase_orders(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
supplier_id=uuid.UUID(supplier_id) if supplier_id else None,
|
||||
status=status
|
||||
status=status,
|
||||
enrich_supplier=enrich_supplier
|
||||
)
|
||||
|
||||
return [PurchaseOrderResponse.model_validate(po) for po in pos]
|
||||
result = [PurchaseOrderResponse.model_validate(po) for po in pos]
|
||||
|
||||
# PERFORMANCE OPTIMIZATION: Cache the result (20s TTL for purchase orders)
|
||||
if cache_key:
|
||||
await set_cached(cache_key, [po.model_dump() for po in result], ttl=20)
|
||||
logger.debug("Cached purchase orders", cache_key=cache_key, ttl=20, tenant_id=tenant_id, status=status)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing purchase orders", error=str(e), tenant_id=tenant_id)
|
||||
|
||||
@@ -11,8 +11,7 @@ from datetime import datetime, timezone
|
||||
import structlog
|
||||
|
||||
from app.services.overdue_po_detector import OverduePODetector
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import BaseEvent
|
||||
from shared.messaging import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -179,18 +178,19 @@ class OverduePOScheduler:
|
||||
'detected_at': datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Create event
|
||||
event = BaseEvent(
|
||||
service_name='procurement',
|
||||
data=event_data,
|
||||
event_type='po.overdue_detected'
|
||||
)
|
||||
# Create event data structure
|
||||
event_data_full = {
|
||||
'service_name': 'procurement',
|
||||
'event_type': 'po.overdue_detected',
|
||||
'timestamp': datetime.now(timezone.utc).isoformat(),
|
||||
**event_data # Include the original event_data
|
||||
}
|
||||
|
||||
# Publish to RabbitMQ
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name='procurement.events',
|
||||
routing_key='po.overdue',
|
||||
event_data=event.to_dict(),
|
||||
event_data=event_data_full,
|
||||
persistent=True
|
||||
)
|
||||
|
||||
|
||||
@@ -50,9 +50,11 @@ class ProcurementService(StandardFastAPIService):
|
||||
'supplier_selection_history'
|
||||
]
|
||||
|
||||
# Initialize scheduler and rabbitmq client
|
||||
# Initialize scheduler, delivery tracking, and rabbitmq client
|
||||
self.overdue_po_scheduler = None
|
||||
self.delivery_tracking_service = None
|
||||
self.rabbitmq_client = None
|
||||
self.event_publisher = None
|
||||
|
||||
super().__init__(
|
||||
service_name="procurement-service",
|
||||
@@ -67,10 +69,12 @@ class ProcurementService(StandardFastAPIService):
|
||||
|
||||
async def _setup_messaging(self):
|
||||
"""Setup messaging for procurement service"""
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
|
||||
try:
|
||||
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="procurement-service")
|
||||
await self.rabbitmq_client.connect()
|
||||
# Create unified event publisher
|
||||
self.event_publisher = UnifiedEventPublisher(self.rabbitmq_client, "procurement-service")
|
||||
self.logger.info("Procurement service messaging setup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to setup procurement messaging", error=str(e))
|
||||
@@ -91,6 +95,15 @@ class ProcurementService(StandardFastAPIService):
|
||||
|
||||
self.logger.info("Procurement Service starting up...")
|
||||
|
||||
# Start delivery tracking service (APScheduler with leader election)
|
||||
from app.services.delivery_tracking_service import DeliveryTrackingService
|
||||
self.delivery_tracking_service = DeliveryTrackingService(self.event_publisher, settings)
|
||||
await self.delivery_tracking_service.start()
|
||||
self.logger.info("Delivery tracking service started")
|
||||
|
||||
# Store in app state for internal API access
|
||||
app.state.delivery_tracking_service = self.delivery_tracking_service
|
||||
|
||||
# Start overdue PO scheduler
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
self.overdue_po_scheduler = OverduePOScheduler(
|
||||
@@ -106,6 +119,11 @@ class ProcurementService(StandardFastAPIService):
|
||||
"""Custom shutdown logic for procurement service"""
|
||||
self.logger.info("Procurement Service shutting down...")
|
||||
|
||||
# Stop delivery tracking service
|
||||
if self.delivery_tracking_service:
|
||||
await self.delivery_tracking_service.stop()
|
||||
self.logger.info("Delivery tracking service stopped")
|
||||
|
||||
# Stop overdue PO scheduler
|
||||
if self.overdue_po_scheduler:
|
||||
await self.overdue_po_scheduler.stop()
|
||||
@@ -142,7 +160,10 @@ from app.api import internal_transfer # Internal Transfer Routes
|
||||
from app.api import replenishment # Enhanced Replenishment Planning Routes
|
||||
from app.api import analytics # Procurement Analytics Routes
|
||||
from app.api import internal_demo
|
||||
from app.api import internal_delivery # Internal Delivery Tracking Routes
|
||||
from app.api import ml_insights # ML insights endpoint
|
||||
from app.api.expected_deliveries import router as expected_deliveries_router # Expected Deliveries Routes
|
||||
from app.api.internal_delivery_tracking import router as internal_delivery_tracking_router # NEW: Internal trigger endpoint
|
||||
|
||||
service.add_router(procurement_plans_router)
|
||||
service.add_router(purchase_orders_router)
|
||||
@@ -150,7 +171,10 @@ service.add_router(internal_transfer.router, tags=["internal-transfer"]) # Inte
|
||||
service.add_router(replenishment.router, tags=["replenishment"]) # RouteBuilder already includes full path
|
||||
service.add_router(analytics.router, tags=["analytics"]) # RouteBuilder already includes full path
|
||||
service.add_router(internal_demo.router)
|
||||
service.add_router(internal_delivery.router, tags=["internal-delivery"]) # Internal delivery tracking
|
||||
service.add_router(internal_delivery_tracking_router, tags=["internal-delivery-tracking"]) # NEW: Delivery alert trigger
|
||||
service.add_router(ml_insights.router) # ML insights endpoint
|
||||
service.add_router(expected_deliveries_router, tags=["expected-deliveries"]) # Expected deliveries endpoint
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
"""
|
||||
Procurement messaging module
|
||||
"""
|
||||
from .event_publisher import ProcurementEventPublisher
|
||||
|
||||
__all__ = ["ProcurementEventPublisher"]
|
||||
@@ -1,275 +0,0 @@
|
||||
"""
|
||||
Procurement Service Event Publisher
|
||||
Publishes procurement-related events to RabbitMQ
|
||||
"""
|
||||
import uuid
|
||||
from typing import Optional, Dict, Any
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import (
|
||||
PurchaseOrderApprovedEvent,
|
||||
PurchaseOrderRejectedEvent,
|
||||
PurchaseOrderSentToSupplierEvent,
|
||||
DeliveryReceivedEvent
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementEventPublisher:
|
||||
"""Handles publishing of procurement-related events"""
|
||||
|
||||
def __init__(self, rabbitmq_client: Optional[RabbitMQClient] = None):
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.service_name = "procurement"
|
||||
|
||||
async def publish_po_approved_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
po_number: str,
|
||||
supplier_id: uuid.UUID,
|
||||
supplier_name: str,
|
||||
supplier_email: Optional[str],
|
||||
supplier_phone: Optional[str],
|
||||
total_amount: Decimal,
|
||||
currency: str,
|
||||
required_delivery_date: Optional[str],
|
||||
items: list,
|
||||
approved_by: Optional[uuid.UUID],
|
||||
approved_at: str,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Publish purchase order approved event
|
||||
|
||||
This event triggers:
|
||||
- Email/WhatsApp notification to supplier (notification service)
|
||||
- Dashboard refresh (frontend)
|
||||
- Analytics update (reporting service)
|
||||
"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="po.approved")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"supplier_phone": supplier_phone,
|
||||
"total_amount": float(total_amount),
|
||||
"currency": currency,
|
||||
"required_delivery_date": required_delivery_date,
|
||||
"items": [
|
||||
{
|
||||
"inventory_product_id": str(item.get("inventory_product_id")),
|
||||
"product_name": item.get("product_name"),
|
||||
"ordered_quantity": float(item.get("ordered_quantity")),
|
||||
"unit_of_measure": item.get("unit_of_measure"),
|
||||
"unit_price": float(item.get("unit_price")),
|
||||
"line_total": float(item.get("line_total"))
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
"approved_by": str(approved_by) if approved_by else None,
|
||||
"approved_at": approved_at,
|
||||
}
|
||||
|
||||
event = PurchaseOrderApprovedEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
# Publish to procurement.events exchange with routing key po.approved
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="po.approved",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published PO approved event",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(po_id),
|
||||
po_number=po_number,
|
||||
supplier_name=supplier_name
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def publish_po_rejected_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
po_number: str,
|
||||
supplier_id: uuid.UUID,
|
||||
supplier_name: str,
|
||||
rejection_reason: str,
|
||||
rejected_by: Optional[uuid.UUID],
|
||||
rejected_at: str,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Publish purchase order rejected event"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="po.rejected")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"rejection_reason": rejection_reason,
|
||||
"rejected_by": str(rejected_by) if rejected_by else None,
|
||||
"rejected_at": rejected_at,
|
||||
}
|
||||
|
||||
event = PurchaseOrderRejectedEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="po.rejected",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published PO rejected event",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(po_id),
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def publish_po_sent_to_supplier_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
po_number: str,
|
||||
supplier_id: uuid.UUID,
|
||||
supplier_name: str,
|
||||
supplier_email: Optional[str],
|
||||
supplier_phone: Optional[str],
|
||||
total_amount: Decimal,
|
||||
currency: str,
|
||||
sent_at: str,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Publish purchase order sent to supplier event"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="po.sent_to_supplier")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"supplier_phone": supplier_phone,
|
||||
"total_amount": float(total_amount),
|
||||
"currency": currency,
|
||||
"sent_at": sent_at,
|
||||
}
|
||||
|
||||
event = PurchaseOrderSentToSupplierEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="po.sent_to_supplier",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published PO sent to supplier event",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(po_id),
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def publish_delivery_received_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
delivery_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
items: list,
|
||||
received_at: str,
|
||||
received_by: Optional[uuid.UUID],
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Publish delivery received event
|
||||
|
||||
This event triggers:
|
||||
- Automatic stock update (inventory service)
|
||||
- PO status update to 'completed'
|
||||
- Supplier performance metrics update
|
||||
"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="delivery.received")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"delivery_id": str(delivery_id),
|
||||
"po_id": str(po_id),
|
||||
"items": [
|
||||
{
|
||||
"inventory_product_id": str(item.get("inventory_product_id")),
|
||||
"accepted_quantity": float(item.get("accepted_quantity")),
|
||||
"rejected_quantity": float(item.get("rejected_quantity", 0)),
|
||||
"batch_lot_number": item.get("batch_lot_number"),
|
||||
"expiry_date": item.get("expiry_date"),
|
||||
"unit_of_measure": item.get("unit_of_measure")
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
"received_at": received_at,
|
||||
"received_by": str(received_by) if received_by else None,
|
||||
}
|
||||
|
||||
event = DeliveryReceivedEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="delivery.received",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published delivery received event",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=str(delivery_id),
|
||||
po_id=str(po_id)
|
||||
)
|
||||
|
||||
return success
|
||||
@@ -0,0 +1,315 @@
|
||||
"""
|
||||
Replenishment Plan Repository
|
||||
|
||||
Provides database operations for replenishment planning, inventory projections,
|
||||
and supplier allocations.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date
|
||||
from uuid import UUID
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, func
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.replenishment import (
|
||||
ReplenishmentPlan,
|
||||
ReplenishmentPlanItem,
|
||||
InventoryProjection,
|
||||
SupplierAllocation
|
||||
)
|
||||
from app.repositories.base_repository import BaseRepository
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ReplenishmentPlanRepository(BaseRepository[ReplenishmentPlan]):
|
||||
"""Repository for replenishment plan operations"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(ReplenishmentPlan)
|
||||
|
||||
async def list_plans(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
status: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List replenishment plans for a tenant"""
|
||||
try:
|
||||
query = select(ReplenishmentPlan).where(
|
||||
ReplenishmentPlan.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
if status:
|
||||
query = query.where(ReplenishmentPlan.status == status)
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(
|
||||
ReplenishmentPlan.created_at.desc()
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
plans = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(plan.id),
|
||||
"tenant_id": str(plan.tenant_id),
|
||||
"planning_date": plan.planning_date,
|
||||
"projection_horizon_days": plan.projection_horizon_days,
|
||||
"total_items": plan.total_items,
|
||||
"urgent_items": plan.urgent_items,
|
||||
"high_risk_items": plan.high_risk_items,
|
||||
"total_estimated_cost": float(plan.total_estimated_cost),
|
||||
"status": plan.status,
|
||||
"created_at": plan.created_at,
|
||||
"updated_at": plan.updated_at
|
||||
}
|
||||
for plan in plans
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list replenishment plans", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_plan_by_id(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
plan_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get a specific replenishment plan with items"""
|
||||
try:
|
||||
query = select(ReplenishmentPlan).where(
|
||||
and_(
|
||||
ReplenishmentPlan.id == plan_id,
|
||||
ReplenishmentPlan.tenant_id == tenant_id
|
||||
)
|
||||
).options(selectinload(ReplenishmentPlan.items))
|
||||
|
||||
result = await db.execute(query)
|
||||
plan = result.scalar_one_or_none()
|
||||
|
||||
if not plan:
|
||||
return None
|
||||
|
||||
return {
|
||||
"id": str(plan.id),
|
||||
"tenant_id": str(plan.tenant_id),
|
||||
"planning_date": plan.planning_date,
|
||||
"projection_horizon_days": plan.projection_horizon_days,
|
||||
"forecast_id": str(plan.forecast_id) if plan.forecast_id else None,
|
||||
"production_schedule_id": str(plan.production_schedule_id) if plan.production_schedule_id else None,
|
||||
"total_items": plan.total_items,
|
||||
"urgent_items": plan.urgent_items,
|
||||
"high_risk_items": plan.high_risk_items,
|
||||
"total_estimated_cost": float(plan.total_estimated_cost),
|
||||
"status": plan.status,
|
||||
"created_at": plan.created_at,
|
||||
"updated_at": plan.updated_at,
|
||||
"executed_at": plan.executed_at,
|
||||
"items": [
|
||||
{
|
||||
"id": str(item.id),
|
||||
"ingredient_id": str(item.ingredient_id),
|
||||
"ingredient_name": item.ingredient_name,
|
||||
"unit_of_measure": item.unit_of_measure,
|
||||
"base_quantity": float(item.base_quantity),
|
||||
"safety_stock_quantity": float(item.safety_stock_quantity),
|
||||
"final_order_quantity": float(item.final_order_quantity),
|
||||
"order_date": item.order_date,
|
||||
"delivery_date": item.delivery_date,
|
||||
"required_by_date": item.required_by_date,
|
||||
"lead_time_days": item.lead_time_days,
|
||||
"is_urgent": item.is_urgent,
|
||||
"urgency_reason": item.urgency_reason,
|
||||
"waste_risk": item.waste_risk,
|
||||
"stockout_risk": item.stockout_risk,
|
||||
"supplier_id": str(item.supplier_id) if item.supplier_id else None
|
||||
}
|
||||
for item in plan.items
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment plan", error=str(e), plan_id=plan_id)
|
||||
raise
|
||||
|
||||
|
||||
class InventoryProjectionRepository(BaseRepository[InventoryProjection]):
|
||||
"""Repository for inventory projection operations"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(InventoryProjection)
|
||||
|
||||
async def list_projections(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
projection_date: Optional[date] = None,
|
||||
stockout_only: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List inventory projections"""
|
||||
try:
|
||||
query = select(InventoryProjection).where(
|
||||
InventoryProjection.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
if ingredient_id:
|
||||
query = query.where(InventoryProjection.ingredient_id == ingredient_id)
|
||||
|
||||
if projection_date:
|
||||
query = query.where(InventoryProjection.projection_date == projection_date)
|
||||
|
||||
if stockout_only:
|
||||
query = query.where(InventoryProjection.is_stockout == True)
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(
|
||||
InventoryProjection.projection_date.asc()
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
projections = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(proj.id),
|
||||
"tenant_id": str(proj.tenant_id),
|
||||
"ingredient_id": str(proj.ingredient_id),
|
||||
"ingredient_name": proj.ingredient_name,
|
||||
"projection_date": proj.projection_date,
|
||||
"starting_stock": float(proj.starting_stock),
|
||||
"forecasted_consumption": float(proj.forecasted_consumption),
|
||||
"scheduled_receipts": float(proj.scheduled_receipts),
|
||||
"projected_ending_stock": float(proj.projected_ending_stock),
|
||||
"is_stockout": proj.is_stockout,
|
||||
"coverage_gap": float(proj.coverage_gap),
|
||||
"created_at": proj.created_at
|
||||
}
|
||||
for proj in projections
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list inventory projections", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
|
||||
class SupplierAllocationRepository(BaseRepository[SupplierAllocation]):
|
||||
"""Repository for supplier allocation operations"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(SupplierAllocation)
|
||||
|
||||
async def list_allocations(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
requirement_id: Optional[UUID] = None,
|
||||
supplier_id: Optional[UUID] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List supplier allocations
|
||||
|
||||
Note: SupplierAllocation model doesn't have tenant_id, so we filter by requirements
|
||||
"""
|
||||
try:
|
||||
# Build base query - no tenant_id filter since model doesn't have it
|
||||
query = select(SupplierAllocation)
|
||||
|
||||
if requirement_id:
|
||||
query = query.where(SupplierAllocation.requirement_id == requirement_id)
|
||||
|
||||
if supplier_id:
|
||||
query = query.where(SupplierAllocation.supplier_id == supplier_id)
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(
|
||||
SupplierAllocation.created_at.desc()
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
allocations = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": str(alloc.id),
|
||||
"requirement_id": str(alloc.requirement_id) if alloc.requirement_id else None,
|
||||
"replenishment_plan_item_id": str(alloc.replenishment_plan_item_id) if alloc.replenishment_plan_item_id else None,
|
||||
"supplier_id": str(alloc.supplier_id),
|
||||
"supplier_name": alloc.supplier_name,
|
||||
"allocation_type": alloc.allocation_type,
|
||||
"allocated_quantity": float(alloc.allocated_quantity),
|
||||
"allocation_percentage": float(alloc.allocation_percentage),
|
||||
"unit_price": float(alloc.unit_price),
|
||||
"total_cost": float(alloc.total_cost),
|
||||
"lead_time_days": alloc.lead_time_days,
|
||||
"supplier_score": float(alloc.supplier_score),
|
||||
"allocation_reason": alloc.allocation_reason,
|
||||
"created_at": alloc.created_at
|
||||
}
|
||||
for alloc in allocations
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list supplier allocations", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
class ReplenishmentAnalyticsRepository:
|
||||
"""Repository for replenishment analytics"""
|
||||
|
||||
async def get_analytics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get replenishment planning analytics"""
|
||||
try:
|
||||
# Build base query
|
||||
query = select(ReplenishmentPlan).where(
|
||||
ReplenishmentPlan.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
if start_date:
|
||||
query = query.where(ReplenishmentPlan.planning_date >= start_date)
|
||||
|
||||
if end_date:
|
||||
query = query.where(ReplenishmentPlan.planning_date <= end_date)
|
||||
|
||||
result = await db.execute(query)
|
||||
plans = result.scalars().all()
|
||||
|
||||
# Calculate analytics
|
||||
total_plans = len(plans)
|
||||
total_items = sum(plan.total_items for plan in plans)
|
||||
total_urgent = sum(plan.urgent_items for plan in plans)
|
||||
total_high_risk = sum(plan.high_risk_items for plan in plans)
|
||||
total_cost = sum(plan.total_estimated_cost for plan in plans)
|
||||
|
||||
# Status breakdown
|
||||
status_counts = {}
|
||||
for plan in plans:
|
||||
status_counts[plan.status] = status_counts.get(plan.status, 0) + 1
|
||||
|
||||
return {
|
||||
"total_plans": total_plans,
|
||||
"total_items": total_items,
|
||||
"total_urgent_items": total_urgent,
|
||||
"total_high_risk_items": total_high_risk,
|
||||
"total_estimated_cost": float(total_cost),
|
||||
"status_breakdown": status_counts,
|
||||
"average_items_per_plan": total_items / total_plans if total_plans > 0 else 0,
|
||||
"urgent_item_percentage": (total_urgent / total_items * 100) if total_items > 0 else 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get replenishment analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
484
services/procurement/app/services/delivery_tracking_service.py
Normal file
484
services/procurement/app/services/delivery_tracking_service.py
Normal file
@@ -0,0 +1,484 @@
|
||||
"""
|
||||
Delivery Tracking Service - Simplified
|
||||
|
||||
Tracks purchase order deliveries and generates appropriate alerts using EventPublisher:
|
||||
- DELIVERY_ARRIVING_SOON: 2 hours before delivery window
|
||||
- DELIVERY_OVERDUE: 30 minutes after expected delivery time
|
||||
- STOCK_RECEIPT_INCOMPLETE: If delivery not marked as received
|
||||
|
||||
Runs as internal scheduler with leader election.
|
||||
Domain ownership: Procurement service owns all PO and delivery tracking.
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID, uuid4
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DeliveryTrackingService:
|
||||
"""
|
||||
Monitors PO deliveries and generates time-based alerts using EventPublisher.
|
||||
|
||||
Uses APScheduler with leader election to run hourly checks.
|
||||
Only one pod executes checks (others skip if not leader).
|
||||
"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher, config):
|
||||
self.publisher = event_publisher
|
||||
self.config = config
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
self.is_leader = False
|
||||
self.instance_id = str(uuid4())[:8] # Short instance ID for logging
|
||||
|
||||
async def start(self):
|
||||
"""Start the delivery tracking scheduler"""
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
logger.info(
|
||||
"Delivery tracking scheduler started",
|
||||
instance_id=self.instance_id
|
||||
)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the scheduler and release leader lock"""
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown(wait=False)
|
||||
logger.info("Delivery tracking scheduler stopped", instance_id=self.instance_id)
|
||||
|
||||
async def _check_all_tenants(self):
|
||||
"""
|
||||
Check deliveries for all active tenants (with leader election).
|
||||
|
||||
Only one pod executes this - others skip if not leader.
|
||||
"""
|
||||
# Try to acquire leader lock
|
||||
if not await self._try_acquire_leader_lock():
|
||||
logger.debug(
|
||||
"Skipping delivery check - not leader",
|
||||
instance_id=self.instance_id
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info("Starting delivery checks (as leader)", instance_id=self.instance_id)
|
||||
|
||||
# Get all active tenants from database
|
||||
tenants = await self._get_active_tenants()
|
||||
|
||||
total_alerts = 0
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
result = await self.check_expected_deliveries(tenant_id)
|
||||
total_alerts += sum(result.values())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Delivery check failed for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Delivery checks completed",
|
||||
instance_id=self.instance_id,
|
||||
tenants_checked=len(tenants),
|
||||
total_alerts=total_alerts
|
||||
)
|
||||
|
||||
finally:
|
||||
await self._release_leader_lock()
|
||||
|
||||
async def _try_acquire_leader_lock(self) -> bool:
|
||||
"""
|
||||
Try to acquire leader lock for delivery tracking.
|
||||
|
||||
Uses Redis to ensure only one pod runs checks.
|
||||
Returns True if acquired, False if another pod is leader.
|
||||
"""
|
||||
# This simplified version doesn't implement leader election
|
||||
# In a real implementation, you'd use Redis or database locks
|
||||
logger.info("Delivery tracking check running", instance_id=self.instance_id)
|
||||
return True
|
||||
|
||||
async def _release_leader_lock(self):
|
||||
"""Release leader lock"""
|
||||
logger.debug("Delivery tracking check completed", instance_id=self.instance_id)
|
||||
|
||||
async def _get_active_tenants(self) -> List[UUID]:
|
||||
"""
|
||||
Get all active tenants from database.
|
||||
|
||||
Returns list of tenant UUIDs that have purchase orders.
|
||||
"""
|
||||
try:
|
||||
async with self.config.database_manager.get_session() as session:
|
||||
# Get distinct tenant_ids that have purchase orders
|
||||
query = select(PurchaseOrder.tenant_id).distinct()
|
||||
result = await session.execute(query)
|
||||
tenant_ids = [row[0] for row in result.all()]
|
||||
|
||||
logger.debug("Active tenants retrieved", count=len(tenant_ids))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants", error=str(e))
|
||||
return []
|
||||
|
||||
async def check_expected_deliveries(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Check all expected deliveries for a tenant and generate appropriate alerts.
|
||||
|
||||
DIRECT DATABASE ACCESS - No API calls needed!
|
||||
|
||||
Called by:
|
||||
- Scheduled job (hourly at :30)
|
||||
- Manual trigger endpoint (demo cloning)
|
||||
|
||||
Returns:
|
||||
Dict with counts: {
|
||||
'arriving_soon': int,
|
||||
'overdue': int,
|
||||
'receipt_incomplete': int,
|
||||
'total_alerts': int
|
||||
}
|
||||
"""
|
||||
logger.info("Checking expected deliveries", tenant_id=str(tenant_id))
|
||||
|
||||
counts = {
|
||||
'arriving_soon': 0,
|
||||
'overdue': 0,
|
||||
'receipt_incomplete': 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Get expected deliveries directly from database
|
||||
deliveries = await self._get_expected_deliveries_from_db(tenant_id)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for delivery in deliveries:
|
||||
po_id = delivery.get('po_id')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
delivery_window_hours = delivery.get('delivery_window_hours', 4)
|
||||
status = delivery.get('status')
|
||||
|
||||
if not expected_date:
|
||||
continue
|
||||
|
||||
# Parse expected date
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
|
||||
# Make timezone-aware
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
# Calculate delivery window
|
||||
window_start = expected_date
|
||||
window_end = expected_date + timedelta(hours=delivery_window_hours)
|
||||
|
||||
# Check if arriving soon (2 hours before window)
|
||||
arriving_soon_time = window_start - timedelta(hours=2)
|
||||
if arriving_soon_time <= now < window_start and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_arriving_soon_alert(tenant_id, delivery):
|
||||
counts['arriving_soon'] += 1
|
||||
|
||||
# Check if overdue (30 min after window end)
|
||||
overdue_time = window_end + timedelta(minutes=30)
|
||||
if now >= overdue_time and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_overdue_alert(tenant_id, delivery):
|
||||
counts['overdue'] += 1
|
||||
|
||||
# Check if receipt incomplete (delivery window passed, not marked received)
|
||||
if now > window_end and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_receipt_incomplete_alert(tenant_id, delivery):
|
||||
counts['receipt_incomplete'] += 1
|
||||
|
||||
counts['total_alerts'] = sum([counts['arriving_soon'], counts['overdue'], counts['receipt_incomplete']])
|
||||
|
||||
logger.info(
|
||||
"Delivery check completed",
|
||||
tenant_id=str(tenant_id),
|
||||
**counts
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking deliveries",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return counts
|
||||
|
||||
async def _get_expected_deliveries_from_db(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int = 1,
|
||||
include_overdue: bool = True
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Query expected deliveries DIRECTLY from database (no HTTP call).
|
||||
|
||||
This replaces the HTTP call to /api/internal/expected-deliveries.
|
||||
|
||||
Returns:
|
||||
List of delivery dicts with same structure as API endpoint
|
||||
"""
|
||||
try:
|
||||
async with self.config.database_manager.get_session() as session:
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
query = query.where(PurchaseOrder.expected_delivery_date <= end_date)
|
||||
else:
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await session.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries query executed",
|
||||
tenant_id=str(tenant_id),
|
||||
po_count=len(purchase_orders),
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue,
|
||||
now=now.isoformat(),
|
||||
end_date=end_date.isoformat()
|
||||
)
|
||||
|
||||
# Format deliveries (same structure as API endpoint)
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Simple supplier name extraction
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Extract from notes if available
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
|
||||
# Format line items
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat(),
|
||||
"delivery_window_hours": 4, # Default
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
return deliveries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries from database",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
return []
|
||||
|
||||
async def _send_arriving_soon_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send DELIVERY_ARRIVING_SOON alert (2h before delivery window).
|
||||
|
||||
This appears in the action queue with "Mark as Received" action.
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
line_items = delivery.get('line_items', [])
|
||||
|
||||
# Format product list
|
||||
products = [item['product_name'] for item in line_items[:3]]
|
||||
product_list = ", ".join(products)
|
||||
if len(line_items) > 3:
|
||||
product_list += f" (+{len(line_items) - 3} more)"
|
||||
|
||||
# Calculate time until arrival
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
hours_until = (expected_date - datetime.now(timezone.utc)).total_seconds() / 3600
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": delivery.get('supplier_phone'),
|
||||
"expected_delivery_date": expected_date.isoformat(),
|
||||
"line_items": line_items,
|
||||
"hours_until_arrival": hours_until,
|
||||
}
|
||||
|
||||
# Send alert using UnifiedEventPublisher
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_arriving_soon",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sent arriving soon alert",
|
||||
po_number=po_number,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def _send_overdue_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send DELIVERY_OVERDUE alert (30min after expected window).
|
||||
|
||||
Critical priority - needs immediate action (call supplier).
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
|
||||
# Calculate how late
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
hours_late = (datetime.now(timezone.utc) - expected_date).total_seconds() / 3600
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": delivery.get('supplier_phone'),
|
||||
"expected_delivery_date": expected_date.isoformat(),
|
||||
"hours_late": hours_late,
|
||||
"financial_impact": delivery.get('total_amount', 0),
|
||||
"affected_orders": len(delivery.get('affected_production_batches', [])),
|
||||
}
|
||||
|
||||
# Send alert with high severity
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.warning(
|
||||
"Sent overdue delivery alert",
|
||||
po_number=po_number,
|
||||
supplier=supplier_name,
|
||||
hours_late=hours_late
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def _send_receipt_incomplete_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send STOCK_RECEIPT_INCOMPLETE alert.
|
||||
|
||||
Delivery window has passed but stock not marked as received.
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": delivery.get('expected_delivery_date'),
|
||||
}
|
||||
|
||||
# Send alert using UnifiedEventPublisher
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.stock_receipt_incomplete",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sent receipt incomplete alert",
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
416
services/procurement/app/services/procurement_alert_service.py
Normal file
416
services/procurement/app/services/procurement_alert_service.py
Normal file
@@ -0,0 +1,416 @@
|
||||
"""
|
||||
Procurement Alert Service - Simplified
|
||||
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import List, Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementAlertService:
|
||||
"""Simplified procurement alert service using UnifiedEventPublisher"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
async def emit_po_approval_needed(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
total_amount: float,
|
||||
currency: str,
|
||||
items_count: int,
|
||||
required_delivery_date: str
|
||||
):
|
||||
"""Emit PO approval needed event"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount": total_amount,
|
||||
"po_amount": total_amount, # Alias for compatibility
|
||||
"currency": currency,
|
||||
"items_count": items_count,
|
||||
"required_delivery_date": required_delivery_date
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.po_approval_needed",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"po_approval_needed_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
total_amount=total_amount
|
||||
)
|
||||
|
||||
async def emit_delivery_overdue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
supplier_contact: Optional[str],
|
||||
expected_date: str,
|
||||
days_overdue: int,
|
||||
items: List[Dict[str, Any]]
|
||||
):
|
||||
"""Emit delivery overdue alert"""
|
||||
|
||||
# Determine severity based on days overdue
|
||||
if days_overdue > 7:
|
||||
severity = "urgent"
|
||||
elif days_overdue > 3:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date,
|
||||
"days_overdue": days_overdue,
|
||||
"items": items,
|
||||
"items_count": len(items)
|
||||
}
|
||||
|
||||
if supplier_contact:
|
||||
metadata["supplier_contact"] = supplier_contact
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_overdue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
days_overdue=days_overdue
|
||||
)
|
||||
|
||||
async def emit_supplier_performance_issue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
issue_type: str,
|
||||
issue_description: str,
|
||||
affected_orders: int = 0,
|
||||
total_value_affected: Optional[float] = None
|
||||
):
|
||||
"""Emit supplier performance issue alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"issue_type": issue_type,
|
||||
"issue_description": issue_description,
|
||||
"affected_orders": affected_orders
|
||||
}
|
||||
|
||||
if total_value_affected:
|
||||
metadata["total_value_affected"] = total_value_affected
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.supplier_performance_issue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"supplier_performance_issue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_name=supplier_name,
|
||||
issue_type=issue_type
|
||||
)
|
||||
|
||||
async def emit_price_increase_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
ingredient_name: str,
|
||||
old_price: float,
|
||||
new_price: float,
|
||||
increase_percent: float
|
||||
):
|
||||
"""Emit price increase alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"ingredient_name": ingredient_name,
|
||||
"old_price": old_price,
|
||||
"new_price": new_price,
|
||||
"increase_percent": increase_percent
|
||||
}
|
||||
|
||||
# Determine severity based on increase
|
||||
if increase_percent > 20:
|
||||
severity = "high"
|
||||
elif increase_percent > 10:
|
||||
severity = "medium"
|
||||
else:
|
||||
severity = "low"
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.price_increase",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"price_increase_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
increase_percent=increase_percent
|
||||
)
|
||||
|
||||
async def emit_partial_delivery(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
ordered_quantity: float,
|
||||
delivered_quantity: float,
|
||||
missing_quantity: float,
|
||||
ingredient_name: str
|
||||
):
|
||||
"""Emit partial delivery alert"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"ordered_quantity": ordered_quantity,
|
||||
"delivered_quantity": delivered_quantity,
|
||||
"missing_quantity": missing_quantity,
|
||||
"ingredient_name": ingredient_name
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.partial_delivery",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"partial_delivery_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
missing_quantity=missing_quantity
|
||||
)
|
||||
|
||||
async def emit_delivery_quality_issue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
issue_description: str,
|
||||
affected_items: List[Dict[str, Any]],
|
||||
requires_return: bool = False
|
||||
):
|
||||
"""Emit delivery quality issue alert"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"issue_description": issue_description,
|
||||
"affected_items": affected_items,
|
||||
"requires_return": requires_return,
|
||||
"affected_items_count": len(affected_items)
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_quality_issue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_quality_issue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
requires_return=requires_return
|
||||
)
|
||||
|
||||
async def emit_low_supplier_rating(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
current_rating: float,
|
||||
issues_count: int,
|
||||
recommendation: str
|
||||
):
|
||||
"""Emit low supplier rating alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"current_rating": current_rating,
|
||||
"issues_count": issues_count,
|
||||
"recommendation": recommendation
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.low_supplier_rating",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"low_supplier_rating_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_name=supplier_name,
|
||||
current_rating=current_rating
|
||||
)
|
||||
|
||||
# Recommendation methods
|
||||
|
||||
async def emit_supplier_consolidation(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
current_suppliers_count: int,
|
||||
suggested_suppliers: List[str],
|
||||
potential_savings_eur: float
|
||||
):
|
||||
"""Emit supplier consolidation recommendation"""
|
||||
|
||||
metadata = {
|
||||
"current_suppliers_count": current_suppliers_count,
|
||||
"suggested_suppliers": suggested_suppliers,
|
||||
"potential_savings_eur": potential_savings_eur
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.supplier_consolidation",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"supplier_consolidation_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
potential_savings=potential_savings_eur
|
||||
)
|
||||
|
||||
async def emit_bulk_purchase_opportunity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_order_frequency: int,
|
||||
suggested_bulk_size: float,
|
||||
potential_discount_percent: float,
|
||||
estimated_savings_eur: float
|
||||
):
|
||||
"""Emit bulk purchase opportunity recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_order_frequency": current_order_frequency,
|
||||
"suggested_bulk_size": suggested_bulk_size,
|
||||
"potential_discount_percent": potential_discount_percent,
|
||||
"estimated_savings_eur": estimated_savings_eur
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.bulk_purchase_opportunity",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"bulk_purchase_opportunity_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
estimated_savings=estimated_savings_eur
|
||||
)
|
||||
|
||||
async def emit_alternative_supplier_suggestion(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_supplier: str,
|
||||
alternative_supplier: str,
|
||||
price_difference_eur: float,
|
||||
quality_rating: float
|
||||
):
|
||||
"""Emit alternative supplier suggestion"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_supplier": current_supplier,
|
||||
"alternative_supplier": alternative_supplier,
|
||||
"price_difference_eur": price_difference_eur,
|
||||
"quality_rating": quality_rating
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.alternative_supplier_suggestion",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"alternative_supplier_suggestion_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
|
||||
async def emit_reorder_point_optimization(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_reorder_point: float,
|
||||
suggested_reorder_point: float,
|
||||
rationale: str
|
||||
):
|
||||
"""Emit reorder point optimization recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_reorder_point": current_reorder_point,
|
||||
"suggested_reorder_point": suggested_reorder_point,
|
||||
"rationale": rationale
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.reorder_point_optimization",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"reorder_point_optimization_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
@@ -1,16 +1,15 @@
|
||||
"""
|
||||
Procurement Event Service
|
||||
Procurement Event Service - Simplified
|
||||
|
||||
Emits both ALERTS and NOTIFICATIONS for procurement/supply chain events:
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
|
||||
ALERTS (actionable):
|
||||
- po_approval_needed: Purchase order requires approval
|
||||
- po_approval_escalation: PO pending approval too long
|
||||
- delivery_overdue: Delivery past expected date
|
||||
|
||||
NOTIFICATIONS (informational):
|
||||
- po_approved: Purchase order approved
|
||||
- po_rejected: Purchase order rejected
|
||||
- po_sent_to_supplier: PO sent to supplier
|
||||
- delivery_scheduled: Delivery confirmed
|
||||
- delivery_arriving_soon: Delivery arriving within hours
|
||||
@@ -20,25 +19,23 @@ This service demonstrates the mixed event model where a single domain
|
||||
emits both actionable alerts and informational notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy.orm import Session
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.schemas.event_classification import RawEvent, EventClass, EventDomain
|
||||
from shared.alerts.base_service import BaseAlertService
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProcurementEventService(BaseAlertService):
|
||||
class ProcurementEventService:
|
||||
"""
|
||||
Service for emitting procurement/supply chain events (both alerts and notifications).
|
||||
Service for emitting procurement/supply chain events using EventPublisher.
|
||||
"""
|
||||
|
||||
def __init__(self, rabbitmq_url: str = None):
|
||||
super().__init__(service_name="procurement", rabbitmq_url=rabbitmq_url)
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
# ============================================================
|
||||
# ALERTS (Actionable)
|
||||
@@ -46,112 +43,93 @@ class ProcurementEventService(BaseAlertService):
|
||||
|
||||
async def emit_po_approval_needed_alert(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
total_amount_eur: float,
|
||||
items_count: int,
|
||||
urgency_reason: str,
|
||||
delivery_needed_by: Optional[datetime] = None,
|
||||
delivery_needed_by: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit ALERT when purchase order requires approval.
|
||||
|
||||
This is an ALERT (not notification) because it requires user action.
|
||||
"""
|
||||
try:
|
||||
message = f"Purchase order from {supplier_name} needs approval (€{total_amount_eur:.2f}, {items_count} items)"
|
||||
if delivery_needed_by:
|
||||
days_until_needed = (delivery_needed_by - datetime.now(timezone.utc)).days
|
||||
message += f" - Needed in {days_until_needed} days"
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"po_number": po_id, # Add po_number for template compatibility
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": float(total_amount_eur),
|
||||
"total_amount": float(total_amount_eur), # Add total_amount for template compatibility
|
||||
"currency": "EUR", # Add currency for template compatibility
|
||||
"items_count": items_count,
|
||||
"urgency_reason": urgency_reason,
|
||||
"delivery_needed_by": delivery_needed_by,
|
||||
"required_delivery_date": delivery_needed_by, # Add for template compatibility
|
||||
}
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.ALERT,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="po_approval_needed",
|
||||
title=f"Approval Required: PO from {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
actions=["approve_po", "reject_po", "view_po_details"],
|
||||
event_metadata={
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": total_amount_eur,
|
||||
"items_count": items_count,
|
||||
"urgency_reason": urgency_reason,
|
||||
"delivery_needed_by": delivery_needed_by.isoformat() if delivery_needed_by else None,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
# Determine severity based on amount and urgency
|
||||
if total_amount_eur > 1000 or "expedited" in urgency_reason.lower():
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="alert")
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.po_approval_needed",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"PO approval alert emitted: {po_id} (€{total_amount_eur})",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit PO approval alert: {e}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"po_approval_needed_alert_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id,
|
||||
total_amount_eur=total_amount_eur
|
||||
)
|
||||
|
||||
async def emit_delivery_overdue_alert(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
expected_date: datetime,
|
||||
expected_date: str,
|
||||
days_overdue: int,
|
||||
items_affected: List[Dict[str, Any]],
|
||||
) -> None:
|
||||
"""
|
||||
Emit ALERT when delivery is overdue.
|
||||
|
||||
This is an ALERT because it may require contacting supplier or adjusting plans.
|
||||
"""
|
||||
try:
|
||||
message = f"Delivery from {supplier_name} is {days_overdue} days overdue (expected {expected_date.strftime('%Y-%m-%d')})"
|
||||
# Determine severity based on days overdue
|
||||
if days_overdue > 7:
|
||||
severity = "urgent"
|
||||
elif days_overdue > 3:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.ALERT,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_overdue",
|
||||
title=f"Delivery Overdue: {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
actions=["call_supplier", "adjust_production", "find_alternative"],
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date.isoformat(),
|
||||
"days_overdue": days_overdue,
|
||||
"items_affected": items_affected,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date,
|
||||
"days_overdue": days_overdue,
|
||||
"items_affected": items_affected,
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="alert")
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Delivery overdue alert emitted: {delivery_id} ({days_overdue} days)",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery overdue alert: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_overdue_alert_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id,
|
||||
days_overdue=days_overdue
|
||||
)
|
||||
|
||||
# ============================================================
|
||||
# NOTIFICATIONS (Informational)
|
||||
@@ -159,61 +137,40 @@ class ProcurementEventService(BaseAlertService):
|
||||
|
||||
async def emit_po_approved_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
total_amount_eur: float,
|
||||
approved_by: str,
|
||||
expected_delivery_date: Optional[datetime] = None,
|
||||
expected_delivery_date: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when purchase order is approved.
|
||||
|
||||
This is a NOTIFICATION (not alert) - informational only, no action needed.
|
||||
"""
|
||||
try:
|
||||
message = f"Purchase order to {supplier_name} approved by {approved_by} (€{total_amount_eur:.2f})"
|
||||
if expected_delivery_date:
|
||||
message += f" - Expected delivery: {expected_delivery_date.strftime('%Y-%m-%d')}"
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": float(total_amount_eur),
|
||||
"approved_by": approved_by,
|
||||
"expected_delivery_date": expected_delivery_date,
|
||||
"approved_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="po_approved",
|
||||
title=f"PO Approved: {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": total_amount_eur,
|
||||
"approved_by": approved_by,
|
||||
"expected_delivery_date": expected_delivery_date.isoformat() if expected_delivery_date else None,
|
||||
"approved_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.po_approved",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"PO approved notification emitted: {po_id}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit PO approved notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"po_approved_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id
|
||||
)
|
||||
|
||||
async def emit_po_sent_to_supplier_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
supplier_email: str,
|
||||
@@ -221,136 +178,90 @@ class ProcurementEventService(BaseAlertService):
|
||||
"""
|
||||
Emit NOTIFICATION when PO is sent to supplier.
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="po_sent_to_supplier",
|
||||
title=f"PO Sent: {supplier_name}",
|
||||
message=f"Purchase order sent to {supplier_name} ({supplier_email})",
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"sent_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"sent_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.po_sent_to_supplier",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"PO sent notification emitted: {po_id}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit PO sent notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"po_sent_to_supplier_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id
|
||||
)
|
||||
|
||||
async def emit_delivery_scheduled_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
expected_delivery_date: datetime,
|
||||
expected_delivery_date: str,
|
||||
tracking_number: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is scheduled/confirmed.
|
||||
"""
|
||||
try:
|
||||
message = f"Delivery from {supplier_name} scheduled for {expected_delivery_date.strftime('%Y-%m-%d %H:%M')}"
|
||||
if tracking_number:
|
||||
message += f" (Tracking: {tracking_number})"
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": expected_delivery_date,
|
||||
"tracking_number": tracking_number,
|
||||
}
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_scheduled",
|
||||
title=f"Delivery Scheduled: {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": expected_delivery_date.isoformat(),
|
||||
"tracking_number": tracking_number,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_scheduled",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"Delivery scheduled notification emitted: {delivery_id}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery scheduled notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_scheduled_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
|
||||
async def emit_delivery_arriving_soon_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
supplier_name: str,
|
||||
expected_arrival_time: datetime,
|
||||
expected_arrival_time: str,
|
||||
hours_until_arrival: int,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is arriving soon (within hours).
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_arriving_soon",
|
||||
title=f"Delivery Arriving Soon: {supplier_name}",
|
||||
message=f"Delivery from {supplier_name} arriving in {hours_until_arrival} hours",
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_arrival_time": expected_arrival_time.isoformat(),
|
||||
"hours_until_arrival": hours_until_arrival,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_arrival_time": expected_arrival_time,
|
||||
"hours_until_arrival": hours_until_arrival,
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_arriving_soon",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Delivery arriving soon notification emitted: {delivery_id}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery arriving soon notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_arriving_soon_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
|
||||
async def emit_delivery_received_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
@@ -360,36 +271,23 @@ class ProcurementEventService(BaseAlertService):
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is received.
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_received",
|
||||
title=f"Delivery Received: {supplier_name}",
|
||||
message=f"Received {items_received} items from {supplier_name} - Checked by {received_by}",
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"items_received": items_received,
|
||||
"received_by": received_by,
|
||||
"received_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"items_received": items_received,
|
||||
"received_by": received_by,
|
||||
"received_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_received",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Delivery received notification emitted: {delivery_id}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery received notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_received_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
@@ -31,7 +31,7 @@ from shared.clients.forecast_client import ForecastServiceClient
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.clients.recipes_client import RecipesServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging import RabbitMQClient
|
||||
from shared.monitoring.decorators import monitor_performance
|
||||
from shared.utils.tenant_settings_client import TenantSettingsClient
|
||||
|
||||
|
||||
@@ -30,9 +30,10 @@ from app.schemas.purchase_order_schemas import (
|
||||
)
|
||||
from app.core.config import settings
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.messaging.event_publisher import ProcurementEventPublisher
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.utils.cache import delete_cached, make_cache_key
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -46,7 +47,8 @@ class PurchaseOrderService:
|
||||
config: BaseServiceSettings,
|
||||
suppliers_client: Optional[SuppliersServiceClient] = None,
|
||||
rabbitmq_client: Optional[RabbitMQClient] = None,
|
||||
event_publisher: Optional[ProcurementEventPublisher] = None
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None,
|
||||
inventory_client: Optional[InventoryServiceClient] = None
|
||||
):
|
||||
self.db = db
|
||||
self.config = config
|
||||
@@ -58,9 +60,16 @@ class PurchaseOrderService:
|
||||
# Initialize suppliers client for supplier validation
|
||||
self.suppliers_client = suppliers_client or SuppliersServiceClient(config)
|
||||
|
||||
# Initialize inventory client for stock information
|
||||
self.inventory_client = inventory_client or InventoryServiceClient(config)
|
||||
|
||||
# Initialize event publisher for RabbitMQ events
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.event_publisher = event_publisher or ProcurementEventPublisher(rabbitmq_client)
|
||||
self.event_publisher = event_publisher or UnifiedEventPublisher(rabbitmq_client, "procurement")
|
||||
|
||||
# Request-scoped cache for supplier data to avoid redundant API calls
|
||||
# When enriching multiple POs with the same supplier, cache prevents duplicate calls
|
||||
self._supplier_cache: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER CRUD
|
||||
@@ -210,9 +219,24 @@ class PurchaseOrderService:
|
||||
skip: int = 0,
|
||||
limit: int = 50,
|
||||
supplier_id: Optional[uuid.UUID] = None,
|
||||
status: Optional[str] = None
|
||||
status: Optional[str] = None,
|
||||
enrich_supplier: bool = True
|
||||
) -> List[PurchaseOrder]:
|
||||
"""List purchase orders with filters"""
|
||||
"""
|
||||
List purchase orders with filters
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records
|
||||
supplier_id: Optional supplier filter
|
||||
status: Optional status filter
|
||||
enrich_supplier: Whether to fetch and attach supplier details (default: True)
|
||||
Set to False for faster queries when supplier data isn't needed
|
||||
|
||||
Returns:
|
||||
List of purchase orders
|
||||
"""
|
||||
try:
|
||||
# Convert status string to enum if provided
|
||||
status_enum = None
|
||||
@@ -233,9 +257,14 @@ class PurchaseOrderService:
|
||||
status=status_enum
|
||||
)
|
||||
|
||||
# Enrich with supplier information
|
||||
for po in pos:
|
||||
await self._enrich_po_with_supplier(tenant_id, po)
|
||||
# Only enrich with supplier information if requested
|
||||
# When enrich_supplier=False, returns POs with just supplier_id for client-side matching
|
||||
if pos and enrich_supplier:
|
||||
import asyncio
|
||||
# Enrich with supplier information in parallel (Fix #9: Avoid N+1 query pattern)
|
||||
# This fetches all supplier data concurrently instead of sequentially
|
||||
enrichment_tasks = [self._enrich_po_with_supplier(tenant_id, po) for po in pos]
|
||||
await asyncio.gather(*enrichment_tasks, return_exceptions=True)
|
||||
|
||||
return pos
|
||||
except Exception as e:
|
||||
@@ -366,6 +395,25 @@ class PurchaseOrderService:
|
||||
po = await self.po_repo.update_po(po_id, tenant_id, update_data)
|
||||
await self.db.commit()
|
||||
|
||||
# PHASE 2: Invalidate purchase orders cache
|
||||
cache_key = make_cache_key("purchase_orders", str(tenant_id))
|
||||
await delete_cached(cache_key)
|
||||
logger.debug("Invalidated purchase orders cache", cache_key=cache_key, tenant_id=str(tenant_id))
|
||||
|
||||
# Acknowledge PO approval alerts (non-blocking)
|
||||
try:
|
||||
from shared.clients.alert_processor_client import get_alert_processor_client
|
||||
alert_client = get_alert_processor_client(self.config, "procurement")
|
||||
await alert_client.acknowledge_alerts_by_metadata(
|
||||
tenant_id=tenant_id,
|
||||
alert_type="po_approval_needed",
|
||||
metadata_filter={"po_id": str(po_id)}
|
||||
)
|
||||
logger.debug("Acknowledged PO approval alerts", po_id=po_id)
|
||||
except Exception as e:
|
||||
# Log but don't fail the approval process
|
||||
logger.warning("Failed to acknowledge PO approval alerts", po_id=po_id, error=str(e))
|
||||
|
||||
logger.info("Purchase order approved successfully", po_id=po_id)
|
||||
|
||||
# Publish PO approved event (non-blocking, fire-and-forget)
|
||||
@@ -384,20 +432,25 @@ class PurchaseOrderService:
|
||||
for item in items
|
||||
]
|
||||
|
||||
await self.event_publisher.publish_po_approved_event(
|
||||
event_data = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier.get('name', ''),
|
||||
"supplier_email": supplier.get('email'),
|
||||
"supplier_phone": supplier.get('phone'),
|
||||
"total_amount": float(po.total_amount),
|
||||
"currency": po.currency,
|
||||
"required_delivery_date": po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
"items": items_data,
|
||||
"approved_by": str(approved_by),
|
||||
"approved_at": po.approved_at.isoformat()
|
||||
}
|
||||
|
||||
await self.event_publisher.publish_business_event(
|
||||
event_type=EVENT_TYPES.PROCUREMENT.PO_APPROVED,
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
po_number=po.po_number,
|
||||
supplier_id=po.supplier_id,
|
||||
supplier_name=supplier.get('name', ''),
|
||||
supplier_email=supplier.get('email'),
|
||||
supplier_phone=supplier.get('phone'),
|
||||
total_amount=po.total_amount,
|
||||
currency=po.currency,
|
||||
required_delivery_date=po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
items=items_data,
|
||||
approved_by=approved_by,
|
||||
approved_at=po.approved_at.isoformat()
|
||||
data=event_data
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the approval if event publishing fails
|
||||
@@ -449,15 +502,20 @@ class PurchaseOrderService:
|
||||
|
||||
# Publish PO rejected event (non-blocking, fire-and-forget)
|
||||
try:
|
||||
await self.event_publisher.publish_po_rejected_event(
|
||||
event_data = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier.get('name', ''),
|
||||
"rejection_reason": rejection_reason,
|
||||
"rejected_by": str(rejected_by),
|
||||
"rejected_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
await self.event_publisher.publish_business_event(
|
||||
event_type=EVENT_TYPES.PROCUREMENT.PO_REJECTED,
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
po_number=po.po_number,
|
||||
supplier_id=po.supplier_id,
|
||||
supplier_name=supplier.get('name', ''),
|
||||
rejection_reason=rejection_reason,
|
||||
rejected_by=rejected_by,
|
||||
rejected_at=datetime.utcnow().isoformat()
|
||||
data=event_data
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the rejection if event publishing fails
|
||||
@@ -600,13 +658,18 @@ class PurchaseOrderService:
|
||||
"rejection_reason": item_data.rejection_reason
|
||||
})
|
||||
|
||||
await self.event_publisher.publish_delivery_received_event(
|
||||
event_data = {
|
||||
"delivery_id": str(delivery.id),
|
||||
"po_id": str(delivery_data.purchase_order_id),
|
||||
"items": items_data,
|
||||
"received_at": datetime.utcnow().isoformat(),
|
||||
"received_by": str(created_by)
|
||||
}
|
||||
|
||||
await self.event_publisher.publish_business_event(
|
||||
event_type=EVENT_TYPES.PROCUREMENT.DELIVERY_RECEIVED,
|
||||
tenant_id=tenant_id,
|
||||
delivery_id=delivery.id,
|
||||
po_id=delivery_data.purchase_order_id,
|
||||
items=items_data,
|
||||
received_at=datetime.utcnow().isoformat(),
|
||||
received_by=created_by
|
||||
data=event_data
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the delivery creation if event publishing fails
|
||||
@@ -728,6 +791,19 @@ class PurchaseOrderService:
|
||||
) -> None:
|
||||
"""Emit raw alert for PO approval needed with structured parameters"""
|
||||
try:
|
||||
# Calculate urgency fields based on required delivery date
|
||||
now = datetime.utcnow()
|
||||
hours_until_consequence = None
|
||||
deadline = None
|
||||
|
||||
if purchase_order.required_delivery_date:
|
||||
# Deadline for approval is the required delivery date minus supplier lead time
|
||||
# We need to approve it early enough for supplier to deliver on time
|
||||
supplier_lead_time_days = supplier.get('standard_lead_time', 7)
|
||||
approval_deadline = purchase_order.required_delivery_date - timedelta(days=supplier_lead_time_days)
|
||||
deadline = approval_deadline
|
||||
hours_until_consequence = (approval_deadline - now).total_seconds() / 3600
|
||||
|
||||
# Prepare alert payload matching RawAlert schema
|
||||
alert_data = {
|
||||
'id': str(uuid.uuid4()), # Generate unique alert ID
|
||||
@@ -753,8 +829,13 @@ class PurchaseOrderService:
|
||||
# Add urgency context for dashboard prioritization
|
||||
'financial_impact': float(purchase_order.total_amount),
|
||||
'urgency_score': 85, # Default high urgency for pending approvals
|
||||
# Include reasoning data from orchestrator (if available)
|
||||
'reasoning_data': purchase_order.reasoning_data if purchase_order.reasoning_data else None
|
||||
# CRITICAL: Add deadline and hours_until_consequence for enrichment service
|
||||
'deadline': deadline.isoformat() if deadline else None,
|
||||
'hours_until_consequence': round(hours_until_consequence, 1) if hours_until_consequence else None,
|
||||
# Include reasoning data from orchestrator OR build from inventory service
|
||||
'reasoning_data': purchase_order.reasoning_data or await self._build_reasoning_data_fallback(
|
||||
tenant_id, purchase_order, supplier
|
||||
)
|
||||
},
|
||||
'message_params': {
|
||||
'po_number': purchase_order.po_number,
|
||||
@@ -792,6 +873,147 @@ class PurchaseOrderService:
|
||||
)
|
||||
raise
|
||||
|
||||
async def _build_reasoning_data_fallback(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
purchase_order: PurchaseOrder,
|
||||
supplier: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Build rich reasoning data by querying inventory service for actual stock levels
|
||||
|
||||
This method is called when a PO doesn't have reasoning_data (e.g., manually created POs).
|
||||
It queries the inventory service to get real stock levels and builds structured reasoning
|
||||
that can be translated via i18n on the frontend.
|
||||
"""
|
||||
try:
|
||||
# Query inventory service for actual stock levels
|
||||
critical_products = []
|
||||
min_depletion_hours = float('inf')
|
||||
product_names = []
|
||||
|
||||
# Get items from PO - handle both relationship and explicit loading
|
||||
items = purchase_order.items if hasattr(purchase_order, 'items') else []
|
||||
|
||||
for item in items:
|
||||
product_names.append(item.product_name)
|
||||
|
||||
# Only query if we have ingredient_id
|
||||
if not hasattr(item, 'ingredient_id') or not item.ingredient_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Call inventory service to get current stock - with 2 second timeout
|
||||
stock_entries = await self.inventory_client.get_ingredient_stock(
|
||||
ingredient_id=item.ingredient_id,
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
if stock_entries:
|
||||
# Calculate total available stock
|
||||
total_stock = sum(entry.get('quantity', 0) for entry in stock_entries)
|
||||
|
||||
# Estimate daily usage (this would ideally come from forecast service)
|
||||
# For now, use a simple heuristic: if PO quantity is X, daily usage might be X/7
|
||||
estimated_daily_usage = item.quantity / 7.0 if item.quantity else 1.0
|
||||
|
||||
if estimated_daily_usage > 0:
|
||||
hours_until_depletion = (total_stock / estimated_daily_usage) * 24
|
||||
|
||||
# Mark as critical if less than 48 hours (2 days)
|
||||
if hours_until_depletion < 48:
|
||||
critical_products.append(item.product_name)
|
||||
min_depletion_hours = min(min_depletion_hours, hours_until_depletion)
|
||||
|
||||
logger.info(
|
||||
"Calculated stock depletion for PO item",
|
||||
tenant_id=str(tenant_id),
|
||||
product=item.product_name,
|
||||
current_stock=total_stock,
|
||||
hours_until_depletion=round(hours_until_depletion, 1)
|
||||
)
|
||||
|
||||
except Exception as item_error:
|
||||
logger.warning(
|
||||
"Failed to get stock for PO item",
|
||||
error=str(item_error),
|
||||
product=item.product_name,
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
# Continue with other items even if one fails
|
||||
continue
|
||||
|
||||
# Build rich reasoning data based on what we found
|
||||
if critical_products:
|
||||
# Use detailed reasoning type when we have critical products
|
||||
return {
|
||||
"type": "low_stock_detection_detailed",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.get('name', 'Supplier'),
|
||||
"product_names": product_names,
|
||||
"product_count": len(product_names),
|
||||
"critical_products": critical_products,
|
||||
"critical_product_count": len(critical_products),
|
||||
"min_depletion_hours": round(min_depletion_hours, 1) if min_depletion_hours != float('inf') else 48,
|
||||
"potential_loss_eur": float(purchase_order.total_amount * 1.5), # Estimated opportunity cost
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "high",
|
||||
"impact_days": 2
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "manual_with_inventory_check",
|
||||
"ai_assisted": False,
|
||||
"enhanced_mode": True
|
||||
}
|
||||
}
|
||||
else:
|
||||
# Use basic reasoning type when stock levels are not critical
|
||||
return {
|
||||
"type": "low_stock_detection",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.get('name', 'Supplier'),
|
||||
"product_names": product_names,
|
||||
"product_count": len(product_names),
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "medium",
|
||||
"impact_days": 5
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "manual_with_inventory_check",
|
||||
"ai_assisted": False,
|
||||
"enhanced_mode": False
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to build enhanced reasoning data, using basic fallback",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(purchase_order.id)
|
||||
)
|
||||
# Return basic fallback if inventory service is unavailable
|
||||
return {
|
||||
"type": "low_stock_detection",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.get('name', 'Supplier'),
|
||||
"product_names": [item.product_name for item in (purchase_order.items if hasattr(purchase_order, 'items') else [])],
|
||||
"product_count": len(purchase_order.items) if hasattr(purchase_order, 'items') else 0,
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "medium",
|
||||
"impact_days": 5
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "fallback_basic",
|
||||
"ai_assisted": False
|
||||
}
|
||||
}
|
||||
|
||||
async def _get_and_validate_supplier(self, tenant_id: uuid.UUID, supplier_id: uuid.UUID) -> Dict[str, Any]:
|
||||
"""Get and validate supplier from Suppliers Service"""
|
||||
try:
|
||||
@@ -809,14 +1031,40 @@ class PurchaseOrderService:
|
||||
logger.error("Error validating supplier", error=str(e), supplier_id=supplier_id)
|
||||
raise
|
||||
|
||||
async def _get_supplier_cached(self, tenant_id: uuid.UUID, supplier_id: uuid.UUID) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get supplier with request-scoped caching to avoid redundant API calls.
|
||||
|
||||
When enriching multiple POs that share suppliers, this cache prevents
|
||||
duplicate calls to the suppliers service (Fix #11).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
supplier_id: Supplier ID
|
||||
|
||||
Returns:
|
||||
Supplier data dict or None
|
||||
"""
|
||||
cache_key = f"{tenant_id}:{supplier_id}"
|
||||
|
||||
if cache_key not in self._supplier_cache:
|
||||
supplier = await self.suppliers_client.get_supplier(str(tenant_id), str(supplier_id))
|
||||
self._supplier_cache[cache_key] = supplier
|
||||
logger.debug("Supplier cache MISS", tenant_id=str(tenant_id), supplier_id=str(supplier_id))
|
||||
else:
|
||||
logger.debug("Supplier cache HIT", tenant_id=str(tenant_id), supplier_id=str(supplier_id))
|
||||
|
||||
return self._supplier_cache[cache_key]
|
||||
|
||||
async def _enrich_po_with_supplier(self, tenant_id: uuid.UUID, po: PurchaseOrder) -> None:
|
||||
"""Enrich purchase order with supplier information"""
|
||||
try:
|
||||
supplier = await self.suppliers_client.get_supplier(str(tenant_id), str(po.supplier_id))
|
||||
# Use cached supplier lookup to avoid redundant API calls
|
||||
supplier = await self._get_supplier_cached(tenant_id, po.supplier_id)
|
||||
if supplier:
|
||||
# Set supplier_name as a dynamic attribute on the model instance
|
||||
po.supplier_name = supplier.get('name', 'Unknown Supplier')
|
||||
|
||||
|
||||
# Create a supplier summary object with the required fields for the frontend
|
||||
# Using the same structure as the suppliers service SupplierSummary schema
|
||||
supplier_summary = {
|
||||
@@ -840,7 +1088,7 @@ class PurchaseOrderService:
|
||||
'total_orders': supplier.get('total_orders', 0),
|
||||
'total_amount': supplier.get('total_amount', 0)
|
||||
}
|
||||
|
||||
|
||||
# Set the full supplier object as a dynamic attribute
|
||||
po.supplier = supplier_summary
|
||||
except Exception as e:
|
||||
|
||||
26
services/procurement/app/utils/__init__.py
Normal file
26
services/procurement/app/utils/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# services/alert_processor/app/utils/__init__.py
|
||||
"""
|
||||
Utility modules for alert processor service
|
||||
"""
|
||||
|
||||
from .cache import (
|
||||
get_redis_client,
|
||||
close_redis,
|
||||
get_cached,
|
||||
set_cached,
|
||||
delete_cached,
|
||||
delete_pattern,
|
||||
cache_response,
|
||||
make_cache_key,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_redis_client',
|
||||
'close_redis',
|
||||
'get_cached',
|
||||
'set_cached',
|
||||
'delete_cached',
|
||||
'delete_pattern',
|
||||
'cache_response',
|
||||
'make_cache_key',
|
||||
]
|
||||
265
services/procurement/app/utils/cache.py
Normal file
265
services/procurement/app/utils/cache.py
Normal file
@@ -0,0 +1,265 @@
|
||||
# services/orchestrator/app/utils/cache.py
|
||||
"""
|
||||
Redis caching utilities for dashboard endpoints
|
||||
"""
|
||||
|
||||
import json
|
||||
import redis.asyncio as redis
|
||||
from typing import Optional, Any, Callable
|
||||
from functools import wraps
|
||||
import structlog
|
||||
from app.core.config import settings
|
||||
from pydantic import BaseModel
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Redis client instance
|
||||
_redis_client: Optional[redis.Redis] = None
|
||||
|
||||
|
||||
async def get_redis_client() -> redis.Redis:
|
||||
"""Get or create Redis client"""
|
||||
global _redis_client
|
||||
|
||||
if _redis_client is None:
|
||||
try:
|
||||
# Check if TLS is enabled - convert string to boolean properly
|
||||
redis_tls_str = str(getattr(settings, 'REDIS_TLS_ENABLED', 'false')).lower()
|
||||
redis_tls_enabled = redis_tls_str in ('true', '1', 'yes', 'on')
|
||||
|
||||
connection_kwargs = {
|
||||
'host': str(getattr(settings, 'REDIS_HOST', 'localhost')),
|
||||
'port': int(getattr(settings, 'REDIS_PORT', 6379)),
|
||||
'db': int(getattr(settings, 'REDIS_DB', 0)),
|
||||
'decode_responses': True,
|
||||
'socket_connect_timeout': 5,
|
||||
'socket_timeout': 5
|
||||
}
|
||||
|
||||
# Add password if configured
|
||||
redis_password = getattr(settings, 'REDIS_PASSWORD', None)
|
||||
if redis_password:
|
||||
connection_kwargs['password'] = redis_password
|
||||
|
||||
# Add SSL/TLS support if enabled
|
||||
if redis_tls_enabled:
|
||||
import ssl
|
||||
connection_kwargs['ssl'] = True
|
||||
connection_kwargs['ssl_cert_reqs'] = ssl.CERT_NONE
|
||||
logger.debug(f"Redis TLS enabled - connecting with SSL to {connection_kwargs['host']}:{connection_kwargs['port']}")
|
||||
|
||||
_redis_client = redis.Redis(**connection_kwargs)
|
||||
|
||||
# Test connection
|
||||
await _redis_client.ping()
|
||||
logger.info(f"Redis client connected successfully (TLS: {redis_tls_enabled})")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to Redis: {e}. Caching will be disabled.")
|
||||
_redis_client = None
|
||||
|
||||
return _redis_client
|
||||
|
||||
|
||||
async def close_redis():
|
||||
"""Close Redis connection"""
|
||||
global _redis_client
|
||||
if _redis_client:
|
||||
await _redis_client.close()
|
||||
_redis_client = None
|
||||
logger.info("Redis connection closed")
|
||||
|
||||
|
||||
async def get_cached(key: str) -> Optional[Any]:
|
||||
"""
|
||||
Get cached value by key
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
|
||||
Returns:
|
||||
Cached value (deserialized from JSON) or None if not found or error
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return None
|
||||
|
||||
cached = await client.get(key)
|
||||
if cached:
|
||||
logger.debug(f"Cache hit: {key}")
|
||||
return json.loads(cached)
|
||||
else:
|
||||
logger.debug(f"Cache miss: {key}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache get error for key {key}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _serialize_value(value: Any) -> Any:
|
||||
"""
|
||||
Recursively serialize values for JSON storage, handling Pydantic models properly.
|
||||
|
||||
Args:
|
||||
value: Value to serialize
|
||||
|
||||
Returns:
|
||||
JSON-serializable value
|
||||
"""
|
||||
if isinstance(value, BaseModel):
|
||||
# Convert Pydantic model to dictionary
|
||||
return value.model_dump()
|
||||
elif isinstance(value, (list, tuple)):
|
||||
# Recursively serialize list/tuple elements
|
||||
return [_serialize_value(item) for item in value]
|
||||
elif isinstance(value, dict):
|
||||
# Recursively serialize dictionary values
|
||||
return {key: _serialize_value(val) for key, val in value.items()}
|
||||
else:
|
||||
# For other types, use default serialization
|
||||
return value
|
||||
|
||||
|
||||
async def set_cached(key: str, value: Any, ttl: int = 60) -> bool:
|
||||
"""
|
||||
Set cached value with TTL
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
value: Value to cache (will be JSON serialized)
|
||||
ttl: Time to live in seconds
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return False
|
||||
|
||||
# Serialize value properly before JSON encoding
|
||||
serialized_value = _serialize_value(value)
|
||||
serialized = json.dumps(serialized_value)
|
||||
await client.setex(key, ttl, serialized)
|
||||
logger.debug(f"Cache set: {key} (TTL: {ttl}s)")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache set error for key {key}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def delete_cached(key: str) -> bool:
|
||||
"""
|
||||
Delete cached value
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return False
|
||||
|
||||
await client.delete(key)
|
||||
logger.debug(f"Cache deleted: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete error for key {key}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def delete_pattern(pattern: str) -> int:
|
||||
"""
|
||||
Delete all keys matching pattern
|
||||
|
||||
Args:
|
||||
pattern: Redis key pattern (e.g., "dashboard:*")
|
||||
|
||||
Returns:
|
||||
Number of keys deleted
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return 0
|
||||
|
||||
keys = []
|
||||
async for key in client.scan_iter(match=pattern):
|
||||
keys.append(key)
|
||||
|
||||
if keys:
|
||||
deleted = await client.delete(*keys)
|
||||
logger.info(f"Deleted {deleted} keys matching pattern: {pattern}")
|
||||
return deleted
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete pattern error for {pattern}: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
def cache_response(key_prefix: str, ttl: int = 60):
|
||||
"""
|
||||
Decorator to cache endpoint responses
|
||||
|
||||
Args:
|
||||
key_prefix: Prefix for cache key (will be combined with tenant_id)
|
||||
ttl: Time to live in seconds
|
||||
|
||||
Usage:
|
||||
@cache_response("dashboard:health", ttl=30)
|
||||
async def get_health(tenant_id: str):
|
||||
...
|
||||
"""
|
||||
def decorator(func: Callable):
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Extract tenant_id from kwargs or args
|
||||
tenant_id = kwargs.get('tenant_id')
|
||||
if not tenant_id and args:
|
||||
# Try to find tenant_id in args (assuming it's the first argument)
|
||||
tenant_id = args[0] if len(args) > 0 else None
|
||||
|
||||
if not tenant_id:
|
||||
# No tenant_id, skip caching
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
# Build cache key
|
||||
cache_key = f"{key_prefix}:{tenant_id}"
|
||||
|
||||
# Try to get from cache
|
||||
cached_value = await get_cached(cache_key)
|
||||
if cached_value is not None:
|
||||
return cached_value
|
||||
|
||||
# Execute function
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
# Cache result
|
||||
await set_cached(cache_key, result, ttl)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def make_cache_key(prefix: str, tenant_id: str, **params) -> str:
|
||||
"""
|
||||
Create a cache key with optional parameters
|
||||
|
||||
Args:
|
||||
prefix: Key prefix
|
||||
tenant_id: Tenant ID
|
||||
**params: Additional parameters to include in key
|
||||
|
||||
Returns:
|
||||
Cache key string
|
||||
"""
|
||||
key_parts = [prefix, tenant_id]
|
||||
for k, v in sorted(params.items()):
|
||||
if v is not None:
|
||||
key_parts.append(f"{k}:{v}")
|
||||
return ":".join(key_parts)
|
||||
@@ -42,6 +42,7 @@ from shared.schemas.reasoning_types import (
|
||||
create_po_reasoning_supplier_contract
|
||||
)
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
from shared.messaging import RabbitMQClient
|
||||
|
||||
# Configure logging
|
||||
logger = structlog.get_logger()
|
||||
@@ -350,9 +351,52 @@ async def create_purchase_order(
|
||||
contract_quantity=float(total_amount)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to generate reasoning_data: {e}")
|
||||
logger.error(f"Failed to generate reasoning_data, falling back to basic reasoning: {e}")
|
||||
logger.exception(e)
|
||||
pass
|
||||
|
||||
# Fallback: Always generate basic reasoning_data to ensure it exists
|
||||
try:
|
||||
# Get product names from items_data as fallback
|
||||
items_list = items_data or []
|
||||
product_names = [item.get('name', item.get('product_name', f"Product {i+1}")) for i, item in enumerate(items_list)]
|
||||
if not product_names:
|
||||
product_names = ["Demo Product"]
|
||||
|
||||
# Create basic low stock reasoning as fallback
|
||||
reasoning_data = create_po_reasoning_low_stock(
|
||||
supplier_name=supplier.name,
|
||||
product_names=product_names,
|
||||
current_stock=25.0, # Default simulated current stock
|
||||
required_stock=100.0, # Default required stock
|
||||
days_until_stockout=3, # Default days until stockout
|
||||
threshold_percentage=20,
|
||||
affected_products=product_names[:2] # First 2 products affected
|
||||
)
|
||||
logger.info("Successfully generated fallback reasoning_data")
|
||||
except Exception as fallback_error:
|
||||
logger.error(f"Fallback reasoning generation also failed: {fallback_error}")
|
||||
# Ultimate fallback: Create minimal valid reasoning data structure
|
||||
reasoning_data = {
|
||||
"type": "low_stock_detection",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.name,
|
||||
"product_names": ["Demo Product"],
|
||||
"product_count": 1,
|
||||
"current_stock": 10.0,
|
||||
"required_stock": 50.0,
|
||||
"days_until_stockout": 2
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "medium",
|
||||
"impact_days": 2
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "demo_fallback",
|
||||
"ai_assisted": False
|
||||
}
|
||||
}
|
||||
logger.info("Used ultimate fallback reasoning_data structure")
|
||||
|
||||
# Create PO
|
||||
po = PurchaseOrder(
|
||||
@@ -639,18 +683,123 @@ async def seed_purchase_orders_for_tenant(db: AsyncSession, tenant_id: uuid.UUID
|
||||
po12.notes = "📦 ARRIVING SOON: Delivery expected in 8 hours - Prepare for stock receipt"
|
||||
pos_created.append(po12)
|
||||
|
||||
# 13. DELIVERY TODAY MORNING - Scheduled for 10 AM today
|
||||
delivery_today_morning = BASE_REFERENCE_DATE.replace(hour=10, minute=0, second=0, microsecond=0)
|
||||
po13 = await create_purchase_order(
|
||||
db, tenant_id, supplier_high_trust,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
Decimal("625.00"),
|
||||
created_offset_days=-3,
|
||||
items_data=[
|
||||
{"name": "Harina de Trigo T55", "quantity": 500, "unit_price": 0.85, "uom": "kg"},
|
||||
{"name": "Levadura Fresca", "quantity": 25, "unit_price": 8.00, "uom": "kg"}
|
||||
]
|
||||
)
|
||||
po13.expected_delivery_date = delivery_today_morning
|
||||
po13.required_delivery_date = delivery_today_morning
|
||||
po13.notes = "📦 Delivery scheduled for 10 AM - Essential ingredients for morning production"
|
||||
pos_created.append(po13)
|
||||
|
||||
# 14. DELIVERY TODAY AFTERNOON - Scheduled for 3 PM today
|
||||
delivery_today_afternoon = BASE_REFERENCE_DATE.replace(hour=15, minute=0, second=0, microsecond=0)
|
||||
po14 = await create_purchase_order(
|
||||
db, tenant_id, supplier_medium_trust,
|
||||
PurchaseOrderStatus.confirmed,
|
||||
Decimal("380.50"),
|
||||
created_offset_days=-2,
|
||||
items_data=[
|
||||
{"name": "Papel Kraft Bolsas", "quantity": 5000, "unit_price": 0.05, "uom": "unit"},
|
||||
{"name": "Cajas Pastelería", "quantity": 500, "unit_price": 0.26, "uom": "unit"}
|
||||
]
|
||||
)
|
||||
po14.expected_delivery_date = delivery_today_afternoon
|
||||
po14.required_delivery_date = delivery_today_afternoon
|
||||
po14.notes = "📦 Packaging delivery expected at 3 PM"
|
||||
pos_created.append(po14)
|
||||
|
||||
# 15. DELIVERY TOMORROW EARLY - Scheduled for 8 AM tomorrow (high priority)
|
||||
delivery_tomorrow_early = BASE_REFERENCE_DATE + timedelta(days=1, hours=8)
|
||||
po15 = await create_purchase_order(
|
||||
db, tenant_id, supplier_high_trust,
|
||||
PurchaseOrderStatus.approved,
|
||||
Decimal("445.00"),
|
||||
created_offset_days=-1,
|
||||
items_data=[
|
||||
{"name": "Harina Integral", "quantity": 300, "unit_price": 0.95, "uom": "kg"},
|
||||
{"name": "Sal Marina", "quantity": 50, "unit_price": 1.60, "uom": "kg"}
|
||||
]
|
||||
)
|
||||
po15.expected_delivery_date = delivery_tomorrow_early
|
||||
po15.required_delivery_date = delivery_tomorrow_early
|
||||
po15.priority = "high"
|
||||
po15.notes = "🔔 Critical delivery for weekend production - Confirm with supplier"
|
||||
pos_created.append(po15)
|
||||
|
||||
# 16. DELIVERY TOMORROW LATE - Scheduled for 5 PM tomorrow
|
||||
delivery_tomorrow_late = BASE_REFERENCE_DATE + timedelta(days=1, hours=17)
|
||||
po16 = await create_purchase_order(
|
||||
db, tenant_id, supplier_low_trust,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
Decimal("890.00"),
|
||||
created_offset_days=-2,
|
||||
items_data=[
|
||||
{"name": "Chocolate Negro 70%", "quantity": 80, "unit_price": 8.50, "uom": "kg"},
|
||||
{"name": "Cacao en Polvo", "quantity": 30, "unit_price": 7.00, "uom": "kg"}
|
||||
]
|
||||
)
|
||||
po16.expected_delivery_date = delivery_tomorrow_late
|
||||
po16.required_delivery_date = delivery_tomorrow_late
|
||||
po16.notes = "📦 Specialty ingredients for chocolate products"
|
||||
pos_created.append(po16)
|
||||
|
||||
# 17. DELIVERY DAY AFTER - Scheduled for 11 AM in 2 days
|
||||
delivery_day_after = BASE_REFERENCE_DATE + timedelta(days=2, hours=11)
|
||||
po17 = await create_purchase_order(
|
||||
db, tenant_id, supplier_medium_trust,
|
||||
PurchaseOrderStatus.confirmed,
|
||||
Decimal("520.00"),
|
||||
created_offset_days=-1,
|
||||
items_data=[
|
||||
{"name": "Nata 35% MG", "quantity": 100, "unit_price": 3.80, "uom": "l"},
|
||||
{"name": "Queso Crema", "quantity": 40, "unit_price": 3.50, "uom": "kg"}
|
||||
]
|
||||
)
|
||||
po17.expected_delivery_date = delivery_day_after
|
||||
po17.required_delivery_date = delivery_day_after
|
||||
po17.notes = "📦 Dairy delivery for mid-week production"
|
||||
pos_created.append(po17)
|
||||
|
||||
# 18. DELIVERY THIS WEEK - Scheduled for 2 PM in 4 days
|
||||
delivery_this_week = BASE_REFERENCE_DATE + timedelta(days=4, hours=14)
|
||||
po18 = await create_purchase_order(
|
||||
db, tenant_id, supplier_low_trust,
|
||||
PurchaseOrderStatus.approved,
|
||||
Decimal("675.50"),
|
||||
created_offset_days=-1,
|
||||
items_data=[
|
||||
{"name": "Miel de Azahar", "quantity": 50, "unit_price": 8.90, "uom": "kg"},
|
||||
{"name": "Almendras Marcona", "quantity": 40, "unit_price": 9.50, "uom": "kg"},
|
||||
{"name": "Nueces", "quantity": 30, "unit_price": 7.20, "uom": "kg"}
|
||||
]
|
||||
)
|
||||
po18.expected_delivery_date = delivery_this_week
|
||||
po18.required_delivery_date = delivery_this_week
|
||||
po18.notes = "📦 Specialty items for artisan products"
|
||||
pos_created.append(po18)
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Successfully created {len(pos_created)} purchase orders for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
pending_approval=4, # Updated count (includes escalated PO)
|
||||
approved=2,
|
||||
approved=3, # PO #15, #18 + 1 regular
|
||||
completed=2,
|
||||
sent_to_supplier=2, # Overdue + arriving soon
|
||||
sent_to_supplier=4, # PO #11, #12, #13, #16
|
||||
confirmed=3, # PO #14, #17 + 1 regular
|
||||
cancelled=1,
|
||||
disputed=1,
|
||||
dashboard_showcase=3 # New POs specifically for dashboard alerts
|
||||
delivery_showcase=9 # POs #11-18 with delivery tracking
|
||||
)
|
||||
|
||||
return pos_created
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to emit alerts for existing pending purchase orders.
|
||||
|
||||
This is a one-time migration script to create alerts for POs that were
|
||||
created before the alert emission feature was implemented.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directories to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent / 'shared'))
|
||||
|
||||
from sqlalchemy import select, text
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
import structlog
|
||||
|
||||
from app.models.purchase_order import PurchaseOrder
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def main():
|
||||
"""Emit alerts for all pending purchase orders"""
|
||||
|
||||
# Create database engine
|
||||
engine = create_async_engine(settings.DATABASE_URL, echo=False)
|
||||
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
|
||||
|
||||
# Create RabbitMQ client
|
||||
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
try:
|
||||
async with async_session() as session:
|
||||
# Get all pending approval POs
|
||||
query = select(PurchaseOrder).where(
|
||||
PurchaseOrder.status == 'pending_approval'
|
||||
)
|
||||
result = await session.execute(query)
|
||||
pending_pos = result.scalars().all()
|
||||
|
||||
logger.info(f"Found {len(pending_pos)} pending purchase orders")
|
||||
|
||||
for po in pending_pos:
|
||||
try:
|
||||
# Get supplier info from suppliers service (simplified - using stored data)
|
||||
# In production, you'd fetch from suppliers service
|
||||
supplier_name = f"Supplier-{po.supplier_id}"
|
||||
|
||||
# Prepare alert payload
|
||||
from uuid import uuid4
|
||||
from datetime import datetime as dt, timedelta, timezone
|
||||
|
||||
# Get current UTC time with timezone
|
||||
now_utc = dt.now(timezone.utc)
|
||||
|
||||
# Calculate deadline and urgency for priority scoring
|
||||
if po.required_delivery_date:
|
||||
# Ensure deadline is timezone-aware
|
||||
deadline = po.required_delivery_date
|
||||
if deadline.tzinfo is None:
|
||||
deadline = deadline.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
# Default: 7 days for normal priority, 3 days for critical
|
||||
days_until = 3 if po.priority == 'critical' else 7
|
||||
deadline = now_utc + timedelta(days=days_until)
|
||||
|
||||
# Calculate hours until consequence (for urgency scoring)
|
||||
hours_until = (deadline - now_utc).total_seconds() / 3600
|
||||
|
||||
alert_data = {
|
||||
'id': str(uuid4()), # Generate unique alert ID
|
||||
'tenant_id': str(po.tenant_id),
|
||||
'service': 'procurement',
|
||||
'type': 'po_approval_needed',
|
||||
'alert_type': 'po_approval_needed', # Added for dashboard filtering
|
||||
'type_class': 'action_needed', # Critical for dashboard action queue
|
||||
'severity': 'high' if po.priority == 'critical' else 'medium',
|
||||
'title': f'Purchase Order #{po.po_number} requires approval',
|
||||
'message': f'Purchase order totaling {po.currency} {po.total_amount:.2f} is pending approval.',
|
||||
'timestamp': now_utc.isoformat(),
|
||||
'metadata': {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'supplier_name': supplier_name,
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'priority': po.priority,
|
||||
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
'created_at': po.created_at.isoformat(),
|
||||
# Enrichment metadata for priority scoring
|
||||
'financial_impact': float(po.total_amount), # Business impact
|
||||
'deadline': deadline.isoformat(), # Urgency deadline
|
||||
'hours_until_consequence': int(hours_until), # Urgency hours
|
||||
},
|
||||
'actions': [
|
||||
{
|
||||
'action_type': 'approve_po',
|
||||
'label': 'Approve PO',
|
||||
'variant': 'primary',
|
||||
'disabled': False,
|
||||
'endpoint': f'/api/v1/tenants/{po.tenant_id}/purchase-orders/{po.id}/approve',
|
||||
'method': 'POST'
|
||||
},
|
||||
{
|
||||
'action_type': 'reject_po',
|
||||
'label': 'Reject',
|
||||
'variant': 'ghost',
|
||||
'disabled': False,
|
||||
'endpoint': f'/api/v1/tenants/{po.tenant_id}/purchase-orders/{po.id}/reject',
|
||||
'method': 'POST'
|
||||
},
|
||||
{
|
||||
'action_type': 'modify_po',
|
||||
'label': 'Modify',
|
||||
'variant': 'ghost',
|
||||
'disabled': False,
|
||||
'endpoint': f'/api/v1/tenants/{po.tenant_id}/purchase-orders/{po.id}',
|
||||
'method': 'GET'
|
||||
}
|
||||
],
|
||||
'item_type': 'alert'
|
||||
}
|
||||
|
||||
# Publish to RabbitMQ
|
||||
success = await rabbitmq_client.publish_event(
|
||||
exchange_name='alerts.exchange',
|
||||
routing_key=f'alert.{alert_data["severity"]}.procurement',
|
||||
event_data=alert_data
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
f"✓ Alert emitted for PO {po.po_number}",
|
||||
po_id=str(po.id),
|
||||
tenant_id=str(po.tenant_id)
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"✗ Failed to emit alert for PO {po.po_number}",
|
||||
po_id=str(po.id)
|
||||
)
|
||||
|
||||
# Small delay to avoid overwhelming the system
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error(
|
||||
f"Error processing PO {po.po_number}",
|
||||
error=str(e),
|
||||
po_id=str(po.id),
|
||||
traceback=traceback.format_exc()
|
||||
)
|
||||
continue
|
||||
|
||||
logger.info(f"✅ Finished emitting alerts for {len(pending_pos)} purchase orders")
|
||||
|
||||
finally:
|
||||
await rabbitmq_client.disconnect()
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Reference in New Issue
Block a user