New alert service
This commit is contained in:
484
services/procurement/app/services/delivery_tracking_service.py
Normal file
484
services/procurement/app/services/delivery_tracking_service.py
Normal file
@@ -0,0 +1,484 @@
|
||||
"""
|
||||
Delivery Tracking Service - Simplified
|
||||
|
||||
Tracks purchase order deliveries and generates appropriate alerts using EventPublisher:
|
||||
- DELIVERY_ARRIVING_SOON: 2 hours before delivery window
|
||||
- DELIVERY_OVERDUE: 30 minutes after expected delivery time
|
||||
- STOCK_RECEIPT_INCOMPLETE: If delivery not marked as received
|
||||
|
||||
Runs as internal scheduler with leader election.
|
||||
Domain ownership: Procurement service owns all PO and delivery tracking.
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID, uuid4
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DeliveryTrackingService:
|
||||
"""
|
||||
Monitors PO deliveries and generates time-based alerts using EventPublisher.
|
||||
|
||||
Uses APScheduler with leader election to run hourly checks.
|
||||
Only one pod executes checks (others skip if not leader).
|
||||
"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher, config):
|
||||
self.publisher = event_publisher
|
||||
self.config = config
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
self.is_leader = False
|
||||
self.instance_id = str(uuid4())[:8] # Short instance ID for logging
|
||||
|
||||
async def start(self):
|
||||
"""Start the delivery tracking scheduler"""
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
logger.info(
|
||||
"Delivery tracking scheduler started",
|
||||
instance_id=self.instance_id
|
||||
)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the scheduler and release leader lock"""
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown(wait=False)
|
||||
logger.info("Delivery tracking scheduler stopped", instance_id=self.instance_id)
|
||||
|
||||
async def _check_all_tenants(self):
|
||||
"""
|
||||
Check deliveries for all active tenants (with leader election).
|
||||
|
||||
Only one pod executes this - others skip if not leader.
|
||||
"""
|
||||
# Try to acquire leader lock
|
||||
if not await self._try_acquire_leader_lock():
|
||||
logger.debug(
|
||||
"Skipping delivery check - not leader",
|
||||
instance_id=self.instance_id
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info("Starting delivery checks (as leader)", instance_id=self.instance_id)
|
||||
|
||||
# Get all active tenants from database
|
||||
tenants = await self._get_active_tenants()
|
||||
|
||||
total_alerts = 0
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
result = await self.check_expected_deliveries(tenant_id)
|
||||
total_alerts += sum(result.values())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Delivery check failed for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Delivery checks completed",
|
||||
instance_id=self.instance_id,
|
||||
tenants_checked=len(tenants),
|
||||
total_alerts=total_alerts
|
||||
)
|
||||
|
||||
finally:
|
||||
await self._release_leader_lock()
|
||||
|
||||
async def _try_acquire_leader_lock(self) -> bool:
|
||||
"""
|
||||
Try to acquire leader lock for delivery tracking.
|
||||
|
||||
Uses Redis to ensure only one pod runs checks.
|
||||
Returns True if acquired, False if another pod is leader.
|
||||
"""
|
||||
# This simplified version doesn't implement leader election
|
||||
# In a real implementation, you'd use Redis or database locks
|
||||
logger.info("Delivery tracking check running", instance_id=self.instance_id)
|
||||
return True
|
||||
|
||||
async def _release_leader_lock(self):
|
||||
"""Release leader lock"""
|
||||
logger.debug("Delivery tracking check completed", instance_id=self.instance_id)
|
||||
|
||||
async def _get_active_tenants(self) -> List[UUID]:
|
||||
"""
|
||||
Get all active tenants from database.
|
||||
|
||||
Returns list of tenant UUIDs that have purchase orders.
|
||||
"""
|
||||
try:
|
||||
async with self.config.database_manager.get_session() as session:
|
||||
# Get distinct tenant_ids that have purchase orders
|
||||
query = select(PurchaseOrder.tenant_id).distinct()
|
||||
result = await session.execute(query)
|
||||
tenant_ids = [row[0] for row in result.all()]
|
||||
|
||||
logger.debug("Active tenants retrieved", count=len(tenant_ids))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants", error=str(e))
|
||||
return []
|
||||
|
||||
async def check_expected_deliveries(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Check all expected deliveries for a tenant and generate appropriate alerts.
|
||||
|
||||
DIRECT DATABASE ACCESS - No API calls needed!
|
||||
|
||||
Called by:
|
||||
- Scheduled job (hourly at :30)
|
||||
- Manual trigger endpoint (demo cloning)
|
||||
|
||||
Returns:
|
||||
Dict with counts: {
|
||||
'arriving_soon': int,
|
||||
'overdue': int,
|
||||
'receipt_incomplete': int,
|
||||
'total_alerts': int
|
||||
}
|
||||
"""
|
||||
logger.info("Checking expected deliveries", tenant_id=str(tenant_id))
|
||||
|
||||
counts = {
|
||||
'arriving_soon': 0,
|
||||
'overdue': 0,
|
||||
'receipt_incomplete': 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Get expected deliveries directly from database
|
||||
deliveries = await self._get_expected_deliveries_from_db(tenant_id)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for delivery in deliveries:
|
||||
po_id = delivery.get('po_id')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
delivery_window_hours = delivery.get('delivery_window_hours', 4)
|
||||
status = delivery.get('status')
|
||||
|
||||
if not expected_date:
|
||||
continue
|
||||
|
||||
# Parse expected date
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
|
||||
# Make timezone-aware
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
# Calculate delivery window
|
||||
window_start = expected_date
|
||||
window_end = expected_date + timedelta(hours=delivery_window_hours)
|
||||
|
||||
# Check if arriving soon (2 hours before window)
|
||||
arriving_soon_time = window_start - timedelta(hours=2)
|
||||
if arriving_soon_time <= now < window_start and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_arriving_soon_alert(tenant_id, delivery):
|
||||
counts['arriving_soon'] += 1
|
||||
|
||||
# Check if overdue (30 min after window end)
|
||||
overdue_time = window_end + timedelta(minutes=30)
|
||||
if now >= overdue_time and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_overdue_alert(tenant_id, delivery):
|
||||
counts['overdue'] += 1
|
||||
|
||||
# Check if receipt incomplete (delivery window passed, not marked received)
|
||||
if now > window_end and status in ['approved', 'sent_to_supplier']:
|
||||
if await self._send_receipt_incomplete_alert(tenant_id, delivery):
|
||||
counts['receipt_incomplete'] += 1
|
||||
|
||||
counts['total_alerts'] = sum([counts['arriving_soon'], counts['overdue'], counts['receipt_incomplete']])
|
||||
|
||||
logger.info(
|
||||
"Delivery check completed",
|
||||
tenant_id=str(tenant_id),
|
||||
**counts
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking deliveries",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return counts
|
||||
|
||||
async def _get_expected_deliveries_from_db(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int = 1,
|
||||
include_overdue: bool = True
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Query expected deliveries DIRECTLY from database (no HTTP call).
|
||||
|
||||
This replaces the HTTP call to /api/internal/expected-deliveries.
|
||||
|
||||
Returns:
|
||||
List of delivery dicts with same structure as API endpoint
|
||||
"""
|
||||
try:
|
||||
async with self.config.database_manager.get_session() as session:
|
||||
# Calculate date range
|
||||
now = datetime.now(timezone.utc)
|
||||
end_date = now + timedelta(days=days_ahead)
|
||||
|
||||
# Build query for purchase orders with expected delivery dates
|
||||
query = select(PurchaseOrder).options(
|
||||
selectinload(PurchaseOrder.items)
|
||||
).where(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.expected_delivery_date.isnot(None),
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
])
|
||||
)
|
||||
|
||||
# Add date filters
|
||||
if include_overdue:
|
||||
query = query.where(PurchaseOrder.expected_delivery_date <= end_date)
|
||||
else:
|
||||
query = query.where(
|
||||
PurchaseOrder.expected_delivery_date >= now,
|
||||
PurchaseOrder.expected_delivery_date <= end_date
|
||||
)
|
||||
|
||||
# Order by delivery date
|
||||
query = query.order_by(PurchaseOrder.expected_delivery_date.asc())
|
||||
|
||||
# Execute query
|
||||
result = await session.execute(query)
|
||||
purchase_orders = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Expected deliveries query executed",
|
||||
tenant_id=str(tenant_id),
|
||||
po_count=len(purchase_orders),
|
||||
days_ahead=days_ahead,
|
||||
include_overdue=include_overdue,
|
||||
now=now.isoformat(),
|
||||
end_date=end_date.isoformat()
|
||||
)
|
||||
|
||||
# Format deliveries (same structure as API endpoint)
|
||||
deliveries = []
|
||||
|
||||
for po in purchase_orders:
|
||||
# Simple supplier name extraction
|
||||
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}"
|
||||
supplier_phone = None
|
||||
|
||||
# Extract from notes if available
|
||||
if po.notes:
|
||||
if "Molinos San José" in po.notes:
|
||||
supplier_name = "Molinos San José S.L."
|
||||
supplier_phone = "+34 915 234 567"
|
||||
elif "Lácteos del Valle" in po.notes:
|
||||
supplier_name = "Lácteos del Valle S.A."
|
||||
supplier_phone = "+34 913 456 789"
|
||||
elif "Chocolates Valor" in po.notes:
|
||||
supplier_name = "Chocolates Valor"
|
||||
supplier_phone = "+34 965 510 062"
|
||||
|
||||
# Format line items
|
||||
line_items = []
|
||||
for item in po.items[:5]:
|
||||
line_items.append({
|
||||
"product_name": item.product_name,
|
||||
"quantity": float(item.ordered_quantity) if item.ordered_quantity else 0,
|
||||
"unit": item.unit_of_measure or "unit"
|
||||
})
|
||||
|
||||
delivery_dict = {
|
||||
"po_id": str(po.id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": supplier_phone,
|
||||
"expected_delivery_date": po.expected_delivery_date.isoformat(),
|
||||
"delivery_window_hours": 4, # Default
|
||||
"status": po.status.value,
|
||||
"line_items": line_items,
|
||||
"total_amount": float(po.total_amount) if po.total_amount else 0.0,
|
||||
"currency": po.currency
|
||||
}
|
||||
|
||||
deliveries.append(delivery_dict)
|
||||
|
||||
return deliveries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching expected deliveries from database",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
return []
|
||||
|
||||
async def _send_arriving_soon_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send DELIVERY_ARRIVING_SOON alert (2h before delivery window).
|
||||
|
||||
This appears in the action queue with "Mark as Received" action.
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
line_items = delivery.get('line_items', [])
|
||||
|
||||
# Format product list
|
||||
products = [item['product_name'] for item in line_items[:3]]
|
||||
product_list = ", ".join(products)
|
||||
if len(line_items) > 3:
|
||||
product_list += f" (+{len(line_items) - 3} more)"
|
||||
|
||||
# Calculate time until arrival
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
hours_until = (expected_date - datetime.now(timezone.utc)).total_seconds() / 3600
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": delivery.get('supplier_phone'),
|
||||
"expected_delivery_date": expected_date.isoformat(),
|
||||
"line_items": line_items,
|
||||
"hours_until_arrival": hours_until,
|
||||
}
|
||||
|
||||
# Send alert using UnifiedEventPublisher
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_arriving_soon",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sent arriving soon alert",
|
||||
po_number=po_number,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def _send_overdue_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send DELIVERY_OVERDUE alert (30min after expected window).
|
||||
|
||||
Critical priority - needs immediate action (call supplier).
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
expected_date = delivery.get('expected_delivery_date')
|
||||
|
||||
# Calculate how late
|
||||
if isinstance(expected_date, str):
|
||||
expected_date = datetime.fromisoformat(expected_date)
|
||||
if expected_date.tzinfo is None:
|
||||
expected_date = expected_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
hours_late = (datetime.now(timezone.utc) - expected_date).total_seconds() / 3600
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_phone": delivery.get('supplier_phone'),
|
||||
"expected_delivery_date": expected_date.isoformat(),
|
||||
"hours_late": hours_late,
|
||||
"financial_impact": delivery.get('total_amount', 0),
|
||||
"affected_orders": len(delivery.get('affected_production_batches', [])),
|
||||
}
|
||||
|
||||
# Send alert with high severity
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.warning(
|
||||
"Sent overdue delivery alert",
|
||||
po_number=po_number,
|
||||
supplier=supplier_name,
|
||||
hours_late=hours_late
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def _send_receipt_incomplete_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
delivery: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send STOCK_RECEIPT_INCOMPLETE alert.
|
||||
|
||||
Delivery window has passed but stock not marked as received.
|
||||
"""
|
||||
po_number = delivery.get('po_number', 'N/A')
|
||||
supplier_name = delivery.get('supplier_name', 'Supplier')
|
||||
|
||||
metadata = {
|
||||
"po_id": delivery['po_id'],
|
||||
"po_number": po_number,
|
||||
"supplier_id": delivery.get('supplier_id'),
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": delivery.get('expected_delivery_date'),
|
||||
}
|
||||
|
||||
# Send alert using UnifiedEventPublisher
|
||||
success = await self.publisher.publish_alert(
|
||||
event_type="supply_chain.stock_receipt_incomplete",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sent receipt incomplete alert",
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
416
services/procurement/app/services/procurement_alert_service.py
Normal file
416
services/procurement/app/services/procurement_alert_service.py
Normal file
@@ -0,0 +1,416 @@
|
||||
"""
|
||||
Procurement Alert Service - Simplified
|
||||
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import List, Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementAlertService:
|
||||
"""Simplified procurement alert service using UnifiedEventPublisher"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
async def emit_po_approval_needed(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
total_amount: float,
|
||||
currency: str,
|
||||
items_count: int,
|
||||
required_delivery_date: str
|
||||
):
|
||||
"""Emit PO approval needed event"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount": total_amount,
|
||||
"po_amount": total_amount, # Alias for compatibility
|
||||
"currency": currency,
|
||||
"items_count": items_count,
|
||||
"required_delivery_date": required_delivery_date
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.po_approval_needed",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"po_approval_needed_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
total_amount=total_amount
|
||||
)
|
||||
|
||||
async def emit_delivery_overdue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
supplier_contact: Optional[str],
|
||||
expected_date: str,
|
||||
days_overdue: int,
|
||||
items: List[Dict[str, Any]]
|
||||
):
|
||||
"""Emit delivery overdue alert"""
|
||||
|
||||
# Determine severity based on days overdue
|
||||
if days_overdue > 7:
|
||||
severity = "urgent"
|
||||
elif days_overdue > 3:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date,
|
||||
"days_overdue": days_overdue,
|
||||
"items": items,
|
||||
"items_count": len(items)
|
||||
}
|
||||
|
||||
if supplier_contact:
|
||||
metadata["supplier_contact"] = supplier_contact
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_overdue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
days_overdue=days_overdue
|
||||
)
|
||||
|
||||
async def emit_supplier_performance_issue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
issue_type: str,
|
||||
issue_description: str,
|
||||
affected_orders: int = 0,
|
||||
total_value_affected: Optional[float] = None
|
||||
):
|
||||
"""Emit supplier performance issue alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"issue_type": issue_type,
|
||||
"issue_description": issue_description,
|
||||
"affected_orders": affected_orders
|
||||
}
|
||||
|
||||
if total_value_affected:
|
||||
metadata["total_value_affected"] = total_value_affected
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.supplier_performance_issue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"supplier_performance_issue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_name=supplier_name,
|
||||
issue_type=issue_type
|
||||
)
|
||||
|
||||
async def emit_price_increase_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
ingredient_name: str,
|
||||
old_price: float,
|
||||
new_price: float,
|
||||
increase_percent: float
|
||||
):
|
||||
"""Emit price increase alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"ingredient_name": ingredient_name,
|
||||
"old_price": old_price,
|
||||
"new_price": new_price,
|
||||
"increase_percent": increase_percent
|
||||
}
|
||||
|
||||
# Determine severity based on increase
|
||||
if increase_percent > 20:
|
||||
severity = "high"
|
||||
elif increase_percent > 10:
|
||||
severity = "medium"
|
||||
else:
|
||||
severity = "low"
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.price_increase",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"price_increase_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
increase_percent=increase_percent
|
||||
)
|
||||
|
||||
async def emit_partial_delivery(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
ordered_quantity: float,
|
||||
delivered_quantity: float,
|
||||
missing_quantity: float,
|
||||
ingredient_name: str
|
||||
):
|
||||
"""Emit partial delivery alert"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"ordered_quantity": ordered_quantity,
|
||||
"delivered_quantity": delivered_quantity,
|
||||
"missing_quantity": missing_quantity,
|
||||
"ingredient_name": ingredient_name
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.partial_delivery",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"partial_delivery_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
missing_quantity=missing_quantity
|
||||
)
|
||||
|
||||
async def emit_delivery_quality_issue(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
po_id: UUID,
|
||||
po_number: str,
|
||||
supplier_name: str,
|
||||
issue_description: str,
|
||||
affected_items: List[Dict[str, Any]],
|
||||
requires_return: bool = False
|
||||
):
|
||||
"""Emit delivery quality issue alert"""
|
||||
|
||||
metadata = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_name": supplier_name,
|
||||
"issue_description": issue_description,
|
||||
"affected_items": affected_items,
|
||||
"requires_return": requires_return,
|
||||
"affected_items_count": len(affected_items)
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_quality_issue",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"delivery_quality_issue_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_number=po_number,
|
||||
requires_return=requires_return
|
||||
)
|
||||
|
||||
async def emit_low_supplier_rating(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
supplier_name: str,
|
||||
current_rating: float,
|
||||
issues_count: int,
|
||||
recommendation: str
|
||||
):
|
||||
"""Emit low supplier rating alert"""
|
||||
|
||||
metadata = {
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"current_rating": current_rating,
|
||||
"issues_count": issues_count,
|
||||
"recommendation": recommendation
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.low_supplier_rating",
|
||||
tenant_id=tenant_id,
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"low_supplier_rating_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_name=supplier_name,
|
||||
current_rating=current_rating
|
||||
)
|
||||
|
||||
# Recommendation methods
|
||||
|
||||
async def emit_supplier_consolidation(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
current_suppliers_count: int,
|
||||
suggested_suppliers: List[str],
|
||||
potential_savings_eur: float
|
||||
):
|
||||
"""Emit supplier consolidation recommendation"""
|
||||
|
||||
metadata = {
|
||||
"current_suppliers_count": current_suppliers_count,
|
||||
"suggested_suppliers": suggested_suppliers,
|
||||
"potential_savings_eur": potential_savings_eur
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.supplier_consolidation",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"supplier_consolidation_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
potential_savings=potential_savings_eur
|
||||
)
|
||||
|
||||
async def emit_bulk_purchase_opportunity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_order_frequency: int,
|
||||
suggested_bulk_size: float,
|
||||
potential_discount_percent: float,
|
||||
estimated_savings_eur: float
|
||||
):
|
||||
"""Emit bulk purchase opportunity recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_order_frequency": current_order_frequency,
|
||||
"suggested_bulk_size": suggested_bulk_size,
|
||||
"potential_discount_percent": potential_discount_percent,
|
||||
"estimated_savings_eur": estimated_savings_eur
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.bulk_purchase_opportunity",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"bulk_purchase_opportunity_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
estimated_savings=estimated_savings_eur
|
||||
)
|
||||
|
||||
async def emit_alternative_supplier_suggestion(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_supplier: str,
|
||||
alternative_supplier: str,
|
||||
price_difference_eur: float,
|
||||
quality_rating: float
|
||||
):
|
||||
"""Emit alternative supplier suggestion"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_supplier": current_supplier,
|
||||
"alternative_supplier": alternative_supplier,
|
||||
"price_difference_eur": price_difference_eur,
|
||||
"quality_rating": quality_rating
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.alternative_supplier_suggestion",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"alternative_supplier_suggestion_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
|
||||
async def emit_reorder_point_optimization(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_reorder_point: float,
|
||||
suggested_reorder_point: float,
|
||||
rationale: str
|
||||
):
|
||||
"""Emit reorder point optimization recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_reorder_point": current_reorder_point,
|
||||
"suggested_reorder_point": suggested_reorder_point,
|
||||
"rationale": rationale
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="supply_chain.reorder_point_optimization",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"reorder_point_optimization_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
@@ -1,16 +1,15 @@
|
||||
"""
|
||||
Procurement Event Service
|
||||
Procurement Event Service - Simplified
|
||||
|
||||
Emits both ALERTS and NOTIFICATIONS for procurement/supply chain events:
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
|
||||
ALERTS (actionable):
|
||||
- po_approval_needed: Purchase order requires approval
|
||||
- po_approval_escalation: PO pending approval too long
|
||||
- delivery_overdue: Delivery past expected date
|
||||
|
||||
NOTIFICATIONS (informational):
|
||||
- po_approved: Purchase order approved
|
||||
- po_rejected: Purchase order rejected
|
||||
- po_sent_to_supplier: PO sent to supplier
|
||||
- delivery_scheduled: Delivery confirmed
|
||||
- delivery_arriving_soon: Delivery arriving within hours
|
||||
@@ -20,25 +19,23 @@ This service demonstrates the mixed event model where a single domain
|
||||
emits both actionable alerts and informational notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy.orm import Session
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.schemas.event_classification import RawEvent, EventClass, EventDomain
|
||||
from shared.alerts.base_service import BaseAlertService
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProcurementEventService(BaseAlertService):
|
||||
class ProcurementEventService:
|
||||
"""
|
||||
Service for emitting procurement/supply chain events (both alerts and notifications).
|
||||
Service for emitting procurement/supply chain events using EventPublisher.
|
||||
"""
|
||||
|
||||
def __init__(self, rabbitmq_url: str = None):
|
||||
super().__init__(service_name="procurement", rabbitmq_url=rabbitmq_url)
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
# ============================================================
|
||||
# ALERTS (Actionable)
|
||||
@@ -46,112 +43,93 @@ class ProcurementEventService(BaseAlertService):
|
||||
|
||||
async def emit_po_approval_needed_alert(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
total_amount_eur: float,
|
||||
items_count: int,
|
||||
urgency_reason: str,
|
||||
delivery_needed_by: Optional[datetime] = None,
|
||||
delivery_needed_by: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit ALERT when purchase order requires approval.
|
||||
|
||||
This is an ALERT (not notification) because it requires user action.
|
||||
"""
|
||||
try:
|
||||
message = f"Purchase order from {supplier_name} needs approval (€{total_amount_eur:.2f}, {items_count} items)"
|
||||
if delivery_needed_by:
|
||||
days_until_needed = (delivery_needed_by - datetime.now(timezone.utc)).days
|
||||
message += f" - Needed in {days_until_needed} days"
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"po_number": po_id, # Add po_number for template compatibility
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": float(total_amount_eur),
|
||||
"total_amount": float(total_amount_eur), # Add total_amount for template compatibility
|
||||
"currency": "EUR", # Add currency for template compatibility
|
||||
"items_count": items_count,
|
||||
"urgency_reason": urgency_reason,
|
||||
"delivery_needed_by": delivery_needed_by,
|
||||
"required_delivery_date": delivery_needed_by, # Add for template compatibility
|
||||
}
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.ALERT,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="po_approval_needed",
|
||||
title=f"Approval Required: PO from {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
actions=["approve_po", "reject_po", "view_po_details"],
|
||||
event_metadata={
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": total_amount_eur,
|
||||
"items_count": items_count,
|
||||
"urgency_reason": urgency_reason,
|
||||
"delivery_needed_by": delivery_needed_by.isoformat() if delivery_needed_by else None,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
# Determine severity based on amount and urgency
|
||||
if total_amount_eur > 1000 or "expedited" in urgency_reason.lower():
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="alert")
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.po_approval_needed",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"PO approval alert emitted: {po_id} (€{total_amount_eur})",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit PO approval alert: {e}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"po_approval_needed_alert_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id,
|
||||
total_amount_eur=total_amount_eur
|
||||
)
|
||||
|
||||
async def emit_delivery_overdue_alert(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
expected_date: datetime,
|
||||
expected_date: str,
|
||||
days_overdue: int,
|
||||
items_affected: List[Dict[str, Any]],
|
||||
) -> None:
|
||||
"""
|
||||
Emit ALERT when delivery is overdue.
|
||||
|
||||
This is an ALERT because it may require contacting supplier or adjusting plans.
|
||||
"""
|
||||
try:
|
||||
message = f"Delivery from {supplier_name} is {days_overdue} days overdue (expected {expected_date.strftime('%Y-%m-%d')})"
|
||||
# Determine severity based on days overdue
|
||||
if days_overdue > 7:
|
||||
severity = "urgent"
|
||||
elif days_overdue > 3:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.ALERT,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_overdue",
|
||||
title=f"Delivery Overdue: {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
actions=["call_supplier", "adjust_production", "find_alternative"],
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date.isoformat(),
|
||||
"days_overdue": days_overdue,
|
||||
"items_affected": items_affected,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_date": expected_date,
|
||||
"days_overdue": days_overdue,
|
||||
"items_affected": items_affected,
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="alert")
|
||||
await self.publisher.publish_alert(
|
||||
event_type="supply_chain.delivery_overdue",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Delivery overdue alert emitted: {delivery_id} ({days_overdue} days)",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery overdue alert: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_overdue_alert_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id,
|
||||
days_overdue=days_overdue
|
||||
)
|
||||
|
||||
# ============================================================
|
||||
# NOTIFICATIONS (Informational)
|
||||
@@ -159,61 +137,40 @@ class ProcurementEventService(BaseAlertService):
|
||||
|
||||
async def emit_po_approved_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
total_amount_eur: float,
|
||||
approved_by: str,
|
||||
expected_delivery_date: Optional[datetime] = None,
|
||||
expected_delivery_date: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when purchase order is approved.
|
||||
|
||||
This is a NOTIFICATION (not alert) - informational only, no action needed.
|
||||
"""
|
||||
try:
|
||||
message = f"Purchase order to {supplier_name} approved by {approved_by} (€{total_amount_eur:.2f})"
|
||||
if expected_delivery_date:
|
||||
message += f" - Expected delivery: {expected_delivery_date.strftime('%Y-%m-%d')}"
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": float(total_amount_eur),
|
||||
"approved_by": approved_by,
|
||||
"expected_delivery_date": expected_delivery_date,
|
||||
"approved_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="po_approved",
|
||||
title=f"PO Approved: {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"total_amount_eur": total_amount_eur,
|
||||
"approved_by": approved_by,
|
||||
"expected_delivery_date": expected_delivery_date.isoformat() if expected_delivery_date else None,
|
||||
"approved_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.po_approved",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"PO approved notification emitted: {po_id}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit PO approved notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"po_approved_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id
|
||||
)
|
||||
|
||||
async def emit_po_sent_to_supplier_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
supplier_email: str,
|
||||
@@ -221,136 +178,90 @@ class ProcurementEventService(BaseAlertService):
|
||||
"""
|
||||
Emit NOTIFICATION when PO is sent to supplier.
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="po_sent_to_supplier",
|
||||
title=f"PO Sent: {supplier_name}",
|
||||
message=f"Purchase order sent to {supplier_name} ({supplier_email})",
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"sent_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"sent_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.po_sent_to_supplier",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"PO sent notification emitted: {po_id}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit PO sent notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "po_id": po_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"po_sent_to_supplier_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=po_id
|
||||
)
|
||||
|
||||
async def emit_delivery_scheduled_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
expected_delivery_date: datetime,
|
||||
expected_delivery_date: str,
|
||||
tracking_number: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is scheduled/confirmed.
|
||||
"""
|
||||
try:
|
||||
message = f"Delivery from {supplier_name} scheduled for {expected_delivery_date.strftime('%Y-%m-%d %H:%M')}"
|
||||
if tracking_number:
|
||||
message += f" (Tracking: {tracking_number})"
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": expected_delivery_date,
|
||||
"tracking_number": tracking_number,
|
||||
}
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_scheduled",
|
||||
title=f"Delivery Scheduled: {supplier_name}",
|
||||
message=message,
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_delivery_date": expected_delivery_date.isoformat(),
|
||||
"tracking_number": tracking_number,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_scheduled",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"Delivery scheduled notification emitted: {delivery_id}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery scheduled notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_scheduled_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
|
||||
async def emit_delivery_arriving_soon_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
supplier_name: str,
|
||||
expected_arrival_time: datetime,
|
||||
expected_arrival_time: str,
|
||||
hours_until_arrival: int,
|
||||
) -> None:
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is arriving soon (within hours).
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_arriving_soon",
|
||||
title=f"Delivery Arriving Soon: {supplier_name}",
|
||||
message=f"Delivery from {supplier_name} arriving in {hours_until_arrival} hours",
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_arrival_time": expected_arrival_time.isoformat(),
|
||||
"hours_until_arrival": hours_until_arrival,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"supplier_name": supplier_name,
|
||||
"expected_arrival_time": expected_arrival_time,
|
||||
"hours_until_arrival": hours_until_arrival,
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_arriving_soon",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Delivery arriving soon notification emitted: {delivery_id}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery arriving soon notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_arriving_soon_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
|
||||
async def emit_delivery_received_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
tenant_id: UUID,
|
||||
delivery_id: str,
|
||||
po_id: str,
|
||||
supplier_name: str,
|
||||
@@ -360,36 +271,23 @@ class ProcurementEventService(BaseAlertService):
|
||||
"""
|
||||
Emit NOTIFICATION when delivery is received.
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="delivery_received",
|
||||
title=f"Delivery Received: {supplier_name}",
|
||||
message=f"Received {items_received} items from {supplier_name} - Checked by {received_by}",
|
||||
service="procurement",
|
||||
event_metadata={
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"items_received": items_received,
|
||||
"received_by": received_by,
|
||||
"received_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
metadata = {
|
||||
"delivery_id": delivery_id,
|
||||
"po_id": po_id,
|
||||
"supplier_name": supplier_name,
|
||||
"items_received": items_received,
|
||||
"received_by": received_by,
|
||||
"received_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
await self.publisher.publish_notification(
|
||||
event_type="supply_chain.delivery_received",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Delivery received notification emitted: {delivery_id}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit delivery received notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "delivery_id": delivery_id},
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(
|
||||
"delivery_received_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=delivery_id
|
||||
)
|
||||
@@ -31,7 +31,7 @@ from shared.clients.forecast_client import ForecastServiceClient
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.clients.recipes_client import RecipesServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging import RabbitMQClient
|
||||
from shared.monitoring.decorators import monitor_performance
|
||||
from shared.utils.tenant_settings_client import TenantSettingsClient
|
||||
|
||||
|
||||
@@ -30,9 +30,10 @@ from app.schemas.purchase_order_schemas import (
|
||||
)
|
||||
from app.core.config import settings
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.messaging.event_publisher import ProcurementEventPublisher
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.utils.cache import delete_cached, make_cache_key
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -46,7 +47,8 @@ class PurchaseOrderService:
|
||||
config: BaseServiceSettings,
|
||||
suppliers_client: Optional[SuppliersServiceClient] = None,
|
||||
rabbitmq_client: Optional[RabbitMQClient] = None,
|
||||
event_publisher: Optional[ProcurementEventPublisher] = None
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None,
|
||||
inventory_client: Optional[InventoryServiceClient] = None
|
||||
):
|
||||
self.db = db
|
||||
self.config = config
|
||||
@@ -58,9 +60,16 @@ class PurchaseOrderService:
|
||||
# Initialize suppliers client for supplier validation
|
||||
self.suppliers_client = suppliers_client or SuppliersServiceClient(config)
|
||||
|
||||
# Initialize inventory client for stock information
|
||||
self.inventory_client = inventory_client or InventoryServiceClient(config)
|
||||
|
||||
# Initialize event publisher for RabbitMQ events
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.event_publisher = event_publisher or ProcurementEventPublisher(rabbitmq_client)
|
||||
self.event_publisher = event_publisher or UnifiedEventPublisher(rabbitmq_client, "procurement")
|
||||
|
||||
# Request-scoped cache for supplier data to avoid redundant API calls
|
||||
# When enriching multiple POs with the same supplier, cache prevents duplicate calls
|
||||
self._supplier_cache: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER CRUD
|
||||
@@ -210,9 +219,24 @@ class PurchaseOrderService:
|
||||
skip: int = 0,
|
||||
limit: int = 50,
|
||||
supplier_id: Optional[uuid.UUID] = None,
|
||||
status: Optional[str] = None
|
||||
status: Optional[str] = None,
|
||||
enrich_supplier: bool = True
|
||||
) -> List[PurchaseOrder]:
|
||||
"""List purchase orders with filters"""
|
||||
"""
|
||||
List purchase orders with filters
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
skip: Number of records to skip
|
||||
limit: Maximum number of records
|
||||
supplier_id: Optional supplier filter
|
||||
status: Optional status filter
|
||||
enrich_supplier: Whether to fetch and attach supplier details (default: True)
|
||||
Set to False for faster queries when supplier data isn't needed
|
||||
|
||||
Returns:
|
||||
List of purchase orders
|
||||
"""
|
||||
try:
|
||||
# Convert status string to enum if provided
|
||||
status_enum = None
|
||||
@@ -233,9 +257,14 @@ class PurchaseOrderService:
|
||||
status=status_enum
|
||||
)
|
||||
|
||||
# Enrich with supplier information
|
||||
for po in pos:
|
||||
await self._enrich_po_with_supplier(tenant_id, po)
|
||||
# Only enrich with supplier information if requested
|
||||
# When enrich_supplier=False, returns POs with just supplier_id for client-side matching
|
||||
if pos and enrich_supplier:
|
||||
import asyncio
|
||||
# Enrich with supplier information in parallel (Fix #9: Avoid N+1 query pattern)
|
||||
# This fetches all supplier data concurrently instead of sequentially
|
||||
enrichment_tasks = [self._enrich_po_with_supplier(tenant_id, po) for po in pos]
|
||||
await asyncio.gather(*enrichment_tasks, return_exceptions=True)
|
||||
|
||||
return pos
|
||||
except Exception as e:
|
||||
@@ -366,6 +395,25 @@ class PurchaseOrderService:
|
||||
po = await self.po_repo.update_po(po_id, tenant_id, update_data)
|
||||
await self.db.commit()
|
||||
|
||||
# PHASE 2: Invalidate purchase orders cache
|
||||
cache_key = make_cache_key("purchase_orders", str(tenant_id))
|
||||
await delete_cached(cache_key)
|
||||
logger.debug("Invalidated purchase orders cache", cache_key=cache_key, tenant_id=str(tenant_id))
|
||||
|
||||
# Acknowledge PO approval alerts (non-blocking)
|
||||
try:
|
||||
from shared.clients.alert_processor_client import get_alert_processor_client
|
||||
alert_client = get_alert_processor_client(self.config, "procurement")
|
||||
await alert_client.acknowledge_alerts_by_metadata(
|
||||
tenant_id=tenant_id,
|
||||
alert_type="po_approval_needed",
|
||||
metadata_filter={"po_id": str(po_id)}
|
||||
)
|
||||
logger.debug("Acknowledged PO approval alerts", po_id=po_id)
|
||||
except Exception as e:
|
||||
# Log but don't fail the approval process
|
||||
logger.warning("Failed to acknowledge PO approval alerts", po_id=po_id, error=str(e))
|
||||
|
||||
logger.info("Purchase order approved successfully", po_id=po_id)
|
||||
|
||||
# Publish PO approved event (non-blocking, fire-and-forget)
|
||||
@@ -384,20 +432,25 @@ class PurchaseOrderService:
|
||||
for item in items
|
||||
]
|
||||
|
||||
await self.event_publisher.publish_po_approved_event(
|
||||
event_data = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier.get('name', ''),
|
||||
"supplier_email": supplier.get('email'),
|
||||
"supplier_phone": supplier.get('phone'),
|
||||
"total_amount": float(po.total_amount),
|
||||
"currency": po.currency,
|
||||
"required_delivery_date": po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
"items": items_data,
|
||||
"approved_by": str(approved_by),
|
||||
"approved_at": po.approved_at.isoformat()
|
||||
}
|
||||
|
||||
await self.event_publisher.publish_business_event(
|
||||
event_type=EVENT_TYPES.PROCUREMENT.PO_APPROVED,
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
po_number=po.po_number,
|
||||
supplier_id=po.supplier_id,
|
||||
supplier_name=supplier.get('name', ''),
|
||||
supplier_email=supplier.get('email'),
|
||||
supplier_phone=supplier.get('phone'),
|
||||
total_amount=po.total_amount,
|
||||
currency=po.currency,
|
||||
required_delivery_date=po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
items=items_data,
|
||||
approved_by=approved_by,
|
||||
approved_at=po.approved_at.isoformat()
|
||||
data=event_data
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the approval if event publishing fails
|
||||
@@ -449,15 +502,20 @@ class PurchaseOrderService:
|
||||
|
||||
# Publish PO rejected event (non-blocking, fire-and-forget)
|
||||
try:
|
||||
await self.event_publisher.publish_po_rejected_event(
|
||||
event_data = {
|
||||
"po_id": str(po_id),
|
||||
"po_number": po.po_number,
|
||||
"supplier_id": str(po.supplier_id),
|
||||
"supplier_name": supplier.get('name', ''),
|
||||
"rejection_reason": rejection_reason,
|
||||
"rejected_by": str(rejected_by),
|
||||
"rejected_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
await self.event_publisher.publish_business_event(
|
||||
event_type=EVENT_TYPES.PROCUREMENT.PO_REJECTED,
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
po_number=po.po_number,
|
||||
supplier_id=po.supplier_id,
|
||||
supplier_name=supplier.get('name', ''),
|
||||
rejection_reason=rejection_reason,
|
||||
rejected_by=rejected_by,
|
||||
rejected_at=datetime.utcnow().isoformat()
|
||||
data=event_data
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the rejection if event publishing fails
|
||||
@@ -600,13 +658,18 @@ class PurchaseOrderService:
|
||||
"rejection_reason": item_data.rejection_reason
|
||||
})
|
||||
|
||||
await self.event_publisher.publish_delivery_received_event(
|
||||
event_data = {
|
||||
"delivery_id": str(delivery.id),
|
||||
"po_id": str(delivery_data.purchase_order_id),
|
||||
"items": items_data,
|
||||
"received_at": datetime.utcnow().isoformat(),
|
||||
"received_by": str(created_by)
|
||||
}
|
||||
|
||||
await self.event_publisher.publish_business_event(
|
||||
event_type=EVENT_TYPES.PROCUREMENT.DELIVERY_RECEIVED,
|
||||
tenant_id=tenant_id,
|
||||
delivery_id=delivery.id,
|
||||
po_id=delivery_data.purchase_order_id,
|
||||
items=items_data,
|
||||
received_at=datetime.utcnow().isoformat(),
|
||||
received_by=created_by
|
||||
data=event_data
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the delivery creation if event publishing fails
|
||||
@@ -728,6 +791,19 @@ class PurchaseOrderService:
|
||||
) -> None:
|
||||
"""Emit raw alert for PO approval needed with structured parameters"""
|
||||
try:
|
||||
# Calculate urgency fields based on required delivery date
|
||||
now = datetime.utcnow()
|
||||
hours_until_consequence = None
|
||||
deadline = None
|
||||
|
||||
if purchase_order.required_delivery_date:
|
||||
# Deadline for approval is the required delivery date minus supplier lead time
|
||||
# We need to approve it early enough for supplier to deliver on time
|
||||
supplier_lead_time_days = supplier.get('standard_lead_time', 7)
|
||||
approval_deadline = purchase_order.required_delivery_date - timedelta(days=supplier_lead_time_days)
|
||||
deadline = approval_deadline
|
||||
hours_until_consequence = (approval_deadline - now).total_seconds() / 3600
|
||||
|
||||
# Prepare alert payload matching RawAlert schema
|
||||
alert_data = {
|
||||
'id': str(uuid.uuid4()), # Generate unique alert ID
|
||||
@@ -753,8 +829,13 @@ class PurchaseOrderService:
|
||||
# Add urgency context for dashboard prioritization
|
||||
'financial_impact': float(purchase_order.total_amount),
|
||||
'urgency_score': 85, # Default high urgency for pending approvals
|
||||
# Include reasoning data from orchestrator (if available)
|
||||
'reasoning_data': purchase_order.reasoning_data if purchase_order.reasoning_data else None
|
||||
# CRITICAL: Add deadline and hours_until_consequence for enrichment service
|
||||
'deadline': deadline.isoformat() if deadline else None,
|
||||
'hours_until_consequence': round(hours_until_consequence, 1) if hours_until_consequence else None,
|
||||
# Include reasoning data from orchestrator OR build from inventory service
|
||||
'reasoning_data': purchase_order.reasoning_data or await self._build_reasoning_data_fallback(
|
||||
tenant_id, purchase_order, supplier
|
||||
)
|
||||
},
|
||||
'message_params': {
|
||||
'po_number': purchase_order.po_number,
|
||||
@@ -792,6 +873,147 @@ class PurchaseOrderService:
|
||||
)
|
||||
raise
|
||||
|
||||
async def _build_reasoning_data_fallback(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
purchase_order: PurchaseOrder,
|
||||
supplier: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Build rich reasoning data by querying inventory service for actual stock levels
|
||||
|
||||
This method is called when a PO doesn't have reasoning_data (e.g., manually created POs).
|
||||
It queries the inventory service to get real stock levels and builds structured reasoning
|
||||
that can be translated via i18n on the frontend.
|
||||
"""
|
||||
try:
|
||||
# Query inventory service for actual stock levels
|
||||
critical_products = []
|
||||
min_depletion_hours = float('inf')
|
||||
product_names = []
|
||||
|
||||
# Get items from PO - handle both relationship and explicit loading
|
||||
items = purchase_order.items if hasattr(purchase_order, 'items') else []
|
||||
|
||||
for item in items:
|
||||
product_names.append(item.product_name)
|
||||
|
||||
# Only query if we have ingredient_id
|
||||
if not hasattr(item, 'ingredient_id') or not item.ingredient_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Call inventory service to get current stock - with 2 second timeout
|
||||
stock_entries = await self.inventory_client.get_ingredient_stock(
|
||||
ingredient_id=item.ingredient_id,
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
if stock_entries:
|
||||
# Calculate total available stock
|
||||
total_stock = sum(entry.get('quantity', 0) for entry in stock_entries)
|
||||
|
||||
# Estimate daily usage (this would ideally come from forecast service)
|
||||
# For now, use a simple heuristic: if PO quantity is X, daily usage might be X/7
|
||||
estimated_daily_usage = item.quantity / 7.0 if item.quantity else 1.0
|
||||
|
||||
if estimated_daily_usage > 0:
|
||||
hours_until_depletion = (total_stock / estimated_daily_usage) * 24
|
||||
|
||||
# Mark as critical if less than 48 hours (2 days)
|
||||
if hours_until_depletion < 48:
|
||||
critical_products.append(item.product_name)
|
||||
min_depletion_hours = min(min_depletion_hours, hours_until_depletion)
|
||||
|
||||
logger.info(
|
||||
"Calculated stock depletion for PO item",
|
||||
tenant_id=str(tenant_id),
|
||||
product=item.product_name,
|
||||
current_stock=total_stock,
|
||||
hours_until_depletion=round(hours_until_depletion, 1)
|
||||
)
|
||||
|
||||
except Exception as item_error:
|
||||
logger.warning(
|
||||
"Failed to get stock for PO item",
|
||||
error=str(item_error),
|
||||
product=item.product_name,
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
# Continue with other items even if one fails
|
||||
continue
|
||||
|
||||
# Build rich reasoning data based on what we found
|
||||
if critical_products:
|
||||
# Use detailed reasoning type when we have critical products
|
||||
return {
|
||||
"type": "low_stock_detection_detailed",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.get('name', 'Supplier'),
|
||||
"product_names": product_names,
|
||||
"product_count": len(product_names),
|
||||
"critical_products": critical_products,
|
||||
"critical_product_count": len(critical_products),
|
||||
"min_depletion_hours": round(min_depletion_hours, 1) if min_depletion_hours != float('inf') else 48,
|
||||
"potential_loss_eur": float(purchase_order.total_amount * 1.5), # Estimated opportunity cost
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "high",
|
||||
"impact_days": 2
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "manual_with_inventory_check",
|
||||
"ai_assisted": False,
|
||||
"enhanced_mode": True
|
||||
}
|
||||
}
|
||||
else:
|
||||
# Use basic reasoning type when stock levels are not critical
|
||||
return {
|
||||
"type": "low_stock_detection",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.get('name', 'Supplier'),
|
||||
"product_names": product_names,
|
||||
"product_count": len(product_names),
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "medium",
|
||||
"impact_days": 5
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "manual_with_inventory_check",
|
||||
"ai_assisted": False,
|
||||
"enhanced_mode": False
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to build enhanced reasoning data, using basic fallback",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(purchase_order.id)
|
||||
)
|
||||
# Return basic fallback if inventory service is unavailable
|
||||
return {
|
||||
"type": "low_stock_detection",
|
||||
"parameters": {
|
||||
"supplier_name": supplier.get('name', 'Supplier'),
|
||||
"product_names": [item.product_name for item in (purchase_order.items if hasattr(purchase_order, 'items') else [])],
|
||||
"product_count": len(purchase_order.items) if hasattr(purchase_order, 'items') else 0,
|
||||
},
|
||||
"consequence": {
|
||||
"type": "stockout_risk",
|
||||
"severity": "medium",
|
||||
"impact_days": 5
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "fallback_basic",
|
||||
"ai_assisted": False
|
||||
}
|
||||
}
|
||||
|
||||
async def _get_and_validate_supplier(self, tenant_id: uuid.UUID, supplier_id: uuid.UUID) -> Dict[str, Any]:
|
||||
"""Get and validate supplier from Suppliers Service"""
|
||||
try:
|
||||
@@ -809,14 +1031,40 @@ class PurchaseOrderService:
|
||||
logger.error("Error validating supplier", error=str(e), supplier_id=supplier_id)
|
||||
raise
|
||||
|
||||
async def _get_supplier_cached(self, tenant_id: uuid.UUID, supplier_id: uuid.UUID) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get supplier with request-scoped caching to avoid redundant API calls.
|
||||
|
||||
When enriching multiple POs that share suppliers, this cache prevents
|
||||
duplicate calls to the suppliers service (Fix #11).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
supplier_id: Supplier ID
|
||||
|
||||
Returns:
|
||||
Supplier data dict or None
|
||||
"""
|
||||
cache_key = f"{tenant_id}:{supplier_id}"
|
||||
|
||||
if cache_key not in self._supplier_cache:
|
||||
supplier = await self.suppliers_client.get_supplier(str(tenant_id), str(supplier_id))
|
||||
self._supplier_cache[cache_key] = supplier
|
||||
logger.debug("Supplier cache MISS", tenant_id=str(tenant_id), supplier_id=str(supplier_id))
|
||||
else:
|
||||
logger.debug("Supplier cache HIT", tenant_id=str(tenant_id), supplier_id=str(supplier_id))
|
||||
|
||||
return self._supplier_cache[cache_key]
|
||||
|
||||
async def _enrich_po_with_supplier(self, tenant_id: uuid.UUID, po: PurchaseOrder) -> None:
|
||||
"""Enrich purchase order with supplier information"""
|
||||
try:
|
||||
supplier = await self.suppliers_client.get_supplier(str(tenant_id), str(po.supplier_id))
|
||||
# Use cached supplier lookup to avoid redundant API calls
|
||||
supplier = await self._get_supplier_cached(tenant_id, po.supplier_id)
|
||||
if supplier:
|
||||
# Set supplier_name as a dynamic attribute on the model instance
|
||||
po.supplier_name = supplier.get('name', 'Unknown Supplier')
|
||||
|
||||
|
||||
# Create a supplier summary object with the required fields for the frontend
|
||||
# Using the same structure as the suppliers service SupplierSummary schema
|
||||
supplier_summary = {
|
||||
@@ -840,7 +1088,7 @@ class PurchaseOrderService:
|
||||
'total_orders': supplier.get('total_orders', 0),
|
||||
'total_amount': supplier.get('total_amount', 0)
|
||||
}
|
||||
|
||||
|
||||
# Set the full supplier object as a dynamic attribute
|
||||
po.supplier = supplier_summary
|
||||
except Exception as e:
|
||||
|
||||
Reference in New Issue
Block a user