Add POI feature and imporve the overall backend implementation
This commit is contained in:
@@ -13,6 +13,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.services.purchase_order_service import PurchaseOrderService
|
||||
from app.services.overdue_po_detector import OverduePODetector
|
||||
from app.schemas.purchase_order_schemas import (
|
||||
PurchaseOrderCreate,
|
||||
PurchaseOrderUpdate,
|
||||
@@ -25,6 +26,7 @@ from app.schemas.purchase_order_schemas import (
|
||||
SupplierInvoiceResponse,
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -368,6 +370,7 @@ async def create_delivery(
|
||||
po_id: str,
|
||||
delivery_data: DeliveryCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
@@ -391,7 +394,7 @@ async def create_delivery(
|
||||
delivery = await service.create_delivery(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
delivery_data=delivery_data,
|
||||
created_by=uuid.uuid4() # TODO: Get from auth context
|
||||
created_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
return DeliveryResponse.model_validate(delivery)
|
||||
@@ -411,6 +414,7 @@ async def update_delivery_status(
|
||||
delivery_id: str,
|
||||
status: str = Query(..., description="New delivery status"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
@@ -431,7 +435,7 @@ async def update_delivery_status(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
delivery_id=uuid.UUID(delivery_id),
|
||||
status=status,
|
||||
updated_by=uuid.uuid4() # TODO: Get from auth context
|
||||
updated_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
if not delivery:
|
||||
@@ -461,6 +465,7 @@ async def create_invoice(
|
||||
po_id: str,
|
||||
invoice_data: SupplierInvoiceCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
service: PurchaseOrderService = Depends(get_po_service)
|
||||
):
|
||||
"""
|
||||
@@ -482,7 +487,7 @@ async def create_invoice(
|
||||
invoice = await service.create_invoice(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
invoice_data=invoice_data,
|
||||
created_by=uuid.uuid4() # TODO: Get from auth context
|
||||
created_by=uuid.UUID(current_user.get("user_id"))
|
||||
)
|
||||
|
||||
return SupplierInvoiceResponse.model_validate(invoice)
|
||||
@@ -492,3 +497,77 @@ async def create_invoice(
|
||||
except Exception as e:
|
||||
logger.error("Error creating invoice", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ================================================================
|
||||
# OVERDUE PO DETECTION
|
||||
# ================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("purchase-orders/overdue"),
|
||||
response_model=List[dict]
|
||||
)
|
||||
async def get_overdue_purchase_orders(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(10, ge=1, le=100, description="Max results")
|
||||
):
|
||||
"""
|
||||
Get overdue purchase orders for dashboard display.
|
||||
|
||||
Returns POs that are past their estimated delivery date
|
||||
but not yet marked as delivered.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
limit: Maximum number of results (default: 10)
|
||||
|
||||
Returns:
|
||||
List of overdue PO summaries with severity and days overdue
|
||||
"""
|
||||
try:
|
||||
detector = OverduePODetector()
|
||||
overdue_pos = await detector.get_overdue_pos_for_dashboard(
|
||||
tenant_id=uuid.UUID(tenant_id),
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return overdue_pos
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting overdue POs", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("purchase-orders", "po_id", "overdue-status"),
|
||||
response_model=dict
|
||||
)
|
||||
async def check_po_overdue_status(
|
||||
po_id: str,
|
||||
tenant_id: str = Path(..., description="Tenant ID")
|
||||
):
|
||||
"""
|
||||
Check if a specific PO is overdue.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
po_id: Purchase order UUID
|
||||
|
||||
Returns:
|
||||
Overdue status info or null if not overdue
|
||||
"""
|
||||
try:
|
||||
detector = OverduePODetector()
|
||||
overdue_info = await detector.check_single_po_overdue(
|
||||
po_id=uuid.UUID(po_id),
|
||||
tenant_id=uuid.UUID(tenant_id)
|
||||
)
|
||||
|
||||
if overdue_info:
|
||||
return overdue_info
|
||||
else:
|
||||
return {"overdue": False}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking PO overdue status", error=str(e), po_id=po_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
6
services/procurement/app/jobs/__init__.py
Normal file
6
services/procurement/app/jobs/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
Procurement background jobs and schedulers
|
||||
"""
|
||||
from .overdue_po_scheduler import OverduePOScheduler
|
||||
|
||||
__all__ = ["OverduePOScheduler"]
|
||||
216
services/procurement/app/jobs/overdue_po_scheduler.py
Normal file
216
services/procurement/app/jobs/overdue_po_scheduler.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""
|
||||
Overdue PO Scheduler
|
||||
|
||||
Background scheduler that periodically checks for overdue purchase orders
|
||||
and publishes alerts for them.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Optional
|
||||
from datetime import datetime, timezone
|
||||
import structlog
|
||||
|
||||
from app.services.overdue_po_detector import OverduePODetector
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import BaseEvent
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class OverduePOScheduler:
|
||||
"""
|
||||
Overdue PO Scheduler
|
||||
|
||||
Background task that periodically checks for overdue POs
|
||||
and publishes alerts.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
rabbitmq_client: Optional[RabbitMQClient] = None,
|
||||
check_interval_seconds: int = 3600, # 1 hour default
|
||||
):
|
||||
"""
|
||||
Initialize overdue PO scheduler.
|
||||
|
||||
Args:
|
||||
rabbitmq_client: RabbitMQ client for publishing events
|
||||
check_interval_seconds: Seconds between checks (default: 3600 = 1 hour)
|
||||
"""
|
||||
self.detector = OverduePODetector()
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.check_interval_seconds = check_interval_seconds
|
||||
|
||||
self._task: Optional[asyncio.Task] = None
|
||||
self._running = False
|
||||
|
||||
logger.info(
|
||||
"Overdue PO Scheduler initialized",
|
||||
check_interval_seconds=check_interval_seconds
|
||||
)
|
||||
|
||||
async def start(self):
|
||||
"""Start the scheduler background task"""
|
||||
if self._running:
|
||||
logger.warning("Overdue PO Scheduler already running")
|
||||
return
|
||||
|
||||
self._running = True
|
||||
self._task = asyncio.create_task(self._run_scheduler())
|
||||
|
||||
logger.info("Overdue PO Scheduler started")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the scheduler background task"""
|
||||
if not self._running:
|
||||
return
|
||||
|
||||
self._running = False
|
||||
|
||||
if self._task:
|
||||
self._task.cancel()
|
||||
try:
|
||||
await self._task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Overdue PO Scheduler stopped")
|
||||
|
||||
async def _run_scheduler(self):
|
||||
"""Main scheduler loop"""
|
||||
logger.info("Overdue PO Scheduler loop started")
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
await self._process_cycle()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Overdue PO scheduler cycle failed",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Wait for next cycle
|
||||
try:
|
||||
await asyncio.sleep(self.check_interval_seconds)
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
|
||||
logger.info("Overdue PO Scheduler loop ended")
|
||||
|
||||
async def _process_cycle(self):
|
||||
"""Process one scheduler cycle - detect and alert on overdue POs"""
|
||||
logger.info("Starting overdue PO detection cycle")
|
||||
|
||||
try:
|
||||
# Detect all overdue POs across all tenants
|
||||
overdue_pos = await self.detector.detect_overdue_pos()
|
||||
|
||||
if not overdue_pos:
|
||||
logger.info("No overdue POs detected in this cycle")
|
||||
return
|
||||
|
||||
# Group by severity
|
||||
by_severity = {
|
||||
'critical': [],
|
||||
'high': [],
|
||||
'medium': [],
|
||||
'low': []
|
||||
}
|
||||
|
||||
for po in overdue_pos:
|
||||
severity = po.get('severity', 'medium')
|
||||
by_severity[severity].append(po)
|
||||
|
||||
# Log summary
|
||||
logger.warning(
|
||||
"Overdue POs detected",
|
||||
total=len(overdue_pos),
|
||||
critical=len(by_severity['critical']),
|
||||
high=len(by_severity['high']),
|
||||
medium=len(by_severity['medium']),
|
||||
low=len(by_severity['low'])
|
||||
)
|
||||
|
||||
# Publish events for critical and high severity
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
critical_and_high = by_severity['critical'] + by_severity['high']
|
||||
|
||||
for po in critical_and_high:
|
||||
await self._publish_overdue_alert(po)
|
||||
|
||||
logger.info(
|
||||
"Published overdue alerts",
|
||||
count=len(critical_and_high)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"RabbitMQ not available, skipping alert publishing",
|
||||
overdue_count=len(by_severity['critical'] + by_severity['high'])
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in overdue PO detection cycle",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
async def _publish_overdue_alert(self, po_summary: dict):
|
||||
"""
|
||||
Publish an overdue PO alert event.
|
||||
|
||||
Args:
|
||||
po_summary: Overdue PO summary from detector
|
||||
"""
|
||||
try:
|
||||
event_data = {
|
||||
'po_id': po_summary['po_id'],
|
||||
'tenant_id': po_summary['tenant_id'],
|
||||
'po_number': po_summary['po_number'],
|
||||
'supplier_id': po_summary['supplier_id'],
|
||||
'status': po_summary['status'],
|
||||
'total_amount': po_summary['total_amount'],
|
||||
'currency': po_summary['currency'],
|
||||
'estimated_delivery_date': po_summary['estimated_delivery_date'],
|
||||
'days_overdue': po_summary['days_overdue'],
|
||||
'severity': po_summary['severity'],
|
||||
'priority': po_summary['priority'],
|
||||
'detected_at': datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Create event
|
||||
event = BaseEvent(
|
||||
service_name='procurement',
|
||||
data=event_data,
|
||||
event_type='po.overdue_detected'
|
||||
)
|
||||
|
||||
# Publish to RabbitMQ
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name='procurement.events',
|
||||
routing_key='po.overdue',
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published overdue alert",
|
||||
po_number=po_summary['po_number'],
|
||||
days_overdue=po_summary['days_overdue'],
|
||||
severity=po_summary['severity']
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to publish overdue alert",
|
||||
po_number=po_summary['po_number']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error publishing overdue alert",
|
||||
error=str(e),
|
||||
po_number=po_summary.get('po_number'),
|
||||
exc_info=True
|
||||
)
|
||||
@@ -11,12 +11,13 @@ from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
from app.jobs.overdue_po_scheduler import OverduePOScheduler
|
||||
|
||||
|
||||
class ProcurementService(StandardFastAPIService):
|
||||
"""Procurement Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "00001"
|
||||
expected_migration_version = "001_unified_initial_schema"
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations"""
|
||||
@@ -49,6 +50,10 @@ class ProcurementService(StandardFastAPIService):
|
||||
'supplier_selection_history'
|
||||
]
|
||||
|
||||
# Initialize scheduler and rabbitmq client
|
||||
self.overdue_po_scheduler = None
|
||||
self.rabbitmq_client = None
|
||||
|
||||
super().__init__(
|
||||
service_name="procurement-service",
|
||||
app_name=settings.APP_NAME,
|
||||
@@ -56,18 +61,58 @@ class ProcurementService(StandardFastAPIService):
|
||||
version=settings.VERSION,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=procurement_expected_tables
|
||||
expected_tables=procurement_expected_tables,
|
||||
enable_messaging=True # Enable RabbitMQ for event publishing
|
||||
)
|
||||
|
||||
async def _setup_messaging(self):
|
||||
"""Setup messaging for procurement service"""
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
try:
|
||||
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="procurement-service")
|
||||
await self.rabbitmq_client.connect()
|
||||
self.logger.info("Procurement service messaging setup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to setup procurement messaging", error=str(e))
|
||||
raise
|
||||
|
||||
async def _cleanup_messaging(self):
|
||||
"""Cleanup messaging for procurement service"""
|
||||
try:
|
||||
if self.rabbitmq_client:
|
||||
await self.rabbitmq_client.disconnect()
|
||||
self.logger.info("Procurement service messaging cleanup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Error during procurement messaging cleanup", error=str(e))
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for procurement service"""
|
||||
await super().on_startup(app)
|
||||
|
||||
self.logger.info("Procurement Service starting up...")
|
||||
# Future: Initialize any background services if needed
|
||||
|
||||
# Start overdue PO scheduler
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
self.overdue_po_scheduler = OverduePOScheduler(
|
||||
rabbitmq_client=self.rabbitmq_client,
|
||||
check_interval_seconds=3600 # Check every hour
|
||||
)
|
||||
await self.overdue_po_scheduler.start()
|
||||
self.logger.info("Overdue PO scheduler started")
|
||||
else:
|
||||
self.logger.warning("RabbitMQ not available, overdue PO scheduler not started")
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for procurement service"""
|
||||
self.logger.info("Procurement Service shutting down...")
|
||||
|
||||
# Stop overdue PO scheduler
|
||||
if self.overdue_po_scheduler:
|
||||
await self.overdue_po_scheduler.stop()
|
||||
self.logger.info("Overdue PO scheduler stopped")
|
||||
|
||||
await super().on_shutdown(app)
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return procurement-specific features"""
|
||||
return [
|
||||
|
||||
6
services/procurement/app/messaging/__init__.py
Normal file
6
services/procurement/app/messaging/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
Procurement messaging module
|
||||
"""
|
||||
from .event_publisher import ProcurementEventPublisher
|
||||
|
||||
__all__ = ["ProcurementEventPublisher"]
|
||||
275
services/procurement/app/messaging/event_publisher.py
Normal file
275
services/procurement/app/messaging/event_publisher.py
Normal file
@@ -0,0 +1,275 @@
|
||||
"""
|
||||
Procurement Service Event Publisher
|
||||
Publishes procurement-related events to RabbitMQ
|
||||
"""
|
||||
import uuid
|
||||
from typing import Optional, Dict, Any
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import (
|
||||
PurchaseOrderApprovedEvent,
|
||||
PurchaseOrderRejectedEvent,
|
||||
PurchaseOrderSentToSupplierEvent,
|
||||
DeliveryReceivedEvent
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementEventPublisher:
|
||||
"""Handles publishing of procurement-related events"""
|
||||
|
||||
def __init__(self, rabbitmq_client: Optional[RabbitMQClient] = None):
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.service_name = "procurement"
|
||||
|
||||
async def publish_po_approved_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
po_number: str,
|
||||
supplier_id: uuid.UUID,
|
||||
supplier_name: str,
|
||||
supplier_email: Optional[str],
|
||||
supplier_phone: Optional[str],
|
||||
total_amount: Decimal,
|
||||
currency: str,
|
||||
required_delivery_date: Optional[str],
|
||||
items: list,
|
||||
approved_by: Optional[uuid.UUID],
|
||||
approved_at: str,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Publish purchase order approved event
|
||||
|
||||
This event triggers:
|
||||
- Email/WhatsApp notification to supplier (notification service)
|
||||
- Dashboard refresh (frontend)
|
||||
- Analytics update (reporting service)
|
||||
"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="po.approved")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"supplier_phone": supplier_phone,
|
||||
"total_amount": float(total_amount),
|
||||
"currency": currency,
|
||||
"required_delivery_date": required_delivery_date,
|
||||
"items": [
|
||||
{
|
||||
"inventory_product_id": str(item.get("inventory_product_id")),
|
||||
"product_name": item.get("product_name"),
|
||||
"ordered_quantity": float(item.get("ordered_quantity")),
|
||||
"unit_of_measure": item.get("unit_of_measure"),
|
||||
"unit_price": float(item.get("unit_price")),
|
||||
"line_total": float(item.get("line_total"))
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
"approved_by": str(approved_by) if approved_by else None,
|
||||
"approved_at": approved_at,
|
||||
}
|
||||
|
||||
event = PurchaseOrderApprovedEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
# Publish to procurement.events exchange with routing key po.approved
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="po.approved",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published PO approved event",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(po_id),
|
||||
po_number=po_number,
|
||||
supplier_name=supplier_name
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def publish_po_rejected_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
po_number: str,
|
||||
supplier_id: uuid.UUID,
|
||||
supplier_name: str,
|
||||
rejection_reason: str,
|
||||
rejected_by: Optional[uuid.UUID],
|
||||
rejected_at: str,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Publish purchase order rejected event"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="po.rejected")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"rejection_reason": rejection_reason,
|
||||
"rejected_by": str(rejected_by) if rejected_by else None,
|
||||
"rejected_at": rejected_at,
|
||||
}
|
||||
|
||||
event = PurchaseOrderRejectedEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="po.rejected",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published PO rejected event",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(po_id),
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def publish_po_sent_to_supplier_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
po_number: str,
|
||||
supplier_id: uuid.UUID,
|
||||
supplier_name: str,
|
||||
supplier_email: Optional[str],
|
||||
supplier_phone: Optional[str],
|
||||
total_amount: Decimal,
|
||||
currency: str,
|
||||
sent_at: str,
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Publish purchase order sent to supplier event"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="po.sent_to_supplier")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"po_id": str(po_id),
|
||||
"po_number": po_number,
|
||||
"supplier_id": str(supplier_id),
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_email": supplier_email,
|
||||
"supplier_phone": supplier_phone,
|
||||
"total_amount": float(total_amount),
|
||||
"currency": currency,
|
||||
"sent_at": sent_at,
|
||||
}
|
||||
|
||||
event = PurchaseOrderSentToSupplierEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="po.sent_to_supplier",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published PO sent to supplier event",
|
||||
tenant_id=str(tenant_id),
|
||||
po_id=str(po_id),
|
||||
po_number=po_number
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
async def publish_delivery_received_event(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
delivery_id: uuid.UUID,
|
||||
po_id: uuid.UUID,
|
||||
items: list,
|
||||
received_at: str,
|
||||
received_by: Optional[uuid.UUID],
|
||||
correlation_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Publish delivery received event
|
||||
|
||||
This event triggers:
|
||||
- Automatic stock update (inventory service)
|
||||
- PO status update to 'completed'
|
||||
- Supplier performance metrics update
|
||||
"""
|
||||
if not self.rabbitmq_client:
|
||||
logger.warning("RabbitMQ client not available, event not published", event="delivery.received")
|
||||
return False
|
||||
|
||||
event_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"delivery_id": str(delivery_id),
|
||||
"po_id": str(po_id),
|
||||
"items": [
|
||||
{
|
||||
"inventory_product_id": str(item.get("inventory_product_id")),
|
||||
"accepted_quantity": float(item.get("accepted_quantity")),
|
||||
"rejected_quantity": float(item.get("rejected_quantity", 0)),
|
||||
"batch_lot_number": item.get("batch_lot_number"),
|
||||
"expiry_date": item.get("expiry_date"),
|
||||
"unit_of_measure": item.get("unit_of_measure")
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
"received_at": received_at,
|
||||
"received_by": str(received_by) if received_by else None,
|
||||
}
|
||||
|
||||
event = DeliveryReceivedEvent(
|
||||
service_name=self.service_name,
|
||||
data=event_data,
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
success = await self.rabbitmq_client.publish_event(
|
||||
exchange_name="procurement.events",
|
||||
routing_key="delivery.received",
|
||||
event_data=event.to_dict(),
|
||||
persistent=True
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Published delivery received event",
|
||||
tenant_id=str(tenant_id),
|
||||
delivery_id=str(delivery_id),
|
||||
po_id=str(po_id)
|
||||
)
|
||||
|
||||
return success
|
||||
266
services/procurement/app/services/overdue_po_detector.py
Normal file
266
services/procurement/app/services/overdue_po_detector.py
Normal file
@@ -0,0 +1,266 @@
|
||||
"""
|
||||
Overdue Purchase Order Detector
|
||||
|
||||
Detects POs that are past their estimated delivery date and triggers alerts.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Dict, Any, Optional
|
||||
import structlog
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderStatus
|
||||
from app.core.database import database_manager
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class OverduePODetector:
|
||||
"""
|
||||
Detects and reports overdue purchase orders.
|
||||
|
||||
A PO is considered overdue if:
|
||||
- Status is 'approved' or 'sent_to_supplier' (not yet delivered)
|
||||
- estimated_delivery_date is in the past
|
||||
- Has not been marked as completed or cancelled
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize overdue PO detector"""
|
||||
self.overdue_threshold_hours = 24 # Grace period before marking overdue
|
||||
|
||||
async def detect_overdue_pos(
|
||||
self,
|
||||
tenant_id: Optional[uuid.UUID] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Detect all overdue POs.
|
||||
|
||||
Args:
|
||||
tenant_id: Optional tenant filter. If None, checks all tenants.
|
||||
|
||||
Returns:
|
||||
List of overdue PO summaries
|
||||
"""
|
||||
try:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with database_manager.get_session() as session:
|
||||
# Build query for overdue POs
|
||||
query = select(PurchaseOrder).where(
|
||||
and_(
|
||||
# Only check POs that are in-flight (approved or sent to supplier)
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
]),
|
||||
# Must have an estimated delivery date
|
||||
PurchaseOrder.estimated_delivery_date.isnot(None),
|
||||
# Delivery date is in the past
|
||||
PurchaseOrder.estimated_delivery_date < now
|
||||
)
|
||||
)
|
||||
|
||||
# Add tenant filter if provided
|
||||
if tenant_id:
|
||||
query = query.where(PurchaseOrder.tenant_id == tenant_id)
|
||||
|
||||
result = await session.execute(query)
|
||||
overdue_pos = result.scalars().all()
|
||||
|
||||
# Calculate days overdue for each PO
|
||||
overdue_summaries = []
|
||||
for po in overdue_pos:
|
||||
days_overdue = (now - po.estimated_delivery_date).days
|
||||
hours_overdue = (now - po.estimated_delivery_date).total_seconds() / 3600
|
||||
|
||||
overdue_summaries.append({
|
||||
'po_id': str(po.id),
|
||||
'tenant_id': str(po.tenant_id),
|
||||
'po_number': po.po_number,
|
||||
'supplier_id': str(po.supplier_id),
|
||||
'status': po.status.value,
|
||||
'total_amount': float(po.total_amount),
|
||||
'currency': po.currency,
|
||||
'approved_at': po.approved_at.isoformat() if po.approved_at else None,
|
||||
'estimated_delivery_date': po.estimated_delivery_date.isoformat(),
|
||||
'days_overdue': days_overdue,
|
||||
'hours_overdue': round(hours_overdue, 1),
|
||||
'severity': self._calculate_severity(days_overdue),
|
||||
'priority': po.priority
|
||||
})
|
||||
|
||||
if overdue_summaries:
|
||||
logger.warning(
|
||||
"Detected overdue purchase orders",
|
||||
count=len(overdue_summaries),
|
||||
tenant_id=str(tenant_id) if tenant_id else "all"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"No overdue purchase orders detected",
|
||||
tenant_id=str(tenant_id) if tenant_id else "all"
|
||||
)
|
||||
|
||||
return overdue_summaries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error detecting overdue POs",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id) if tenant_id else "all",
|
||||
exc_info=True
|
||||
)
|
||||
return []
|
||||
|
||||
async def get_overdue_count_by_tenant(self) -> Dict[str, int]:
|
||||
"""
|
||||
Get count of overdue POs grouped by tenant.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping tenant_id to overdue count
|
||||
"""
|
||||
try:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with database_manager.get_session() as session:
|
||||
query = select(
|
||||
PurchaseOrder.tenant_id,
|
||||
PurchaseOrder.id
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
]),
|
||||
PurchaseOrder.estimated_delivery_date.isnot(None),
|
||||
PurchaseOrder.estimated_delivery_date < now
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
rows = result.all()
|
||||
|
||||
# Count by tenant
|
||||
tenant_counts: Dict[str, int] = {}
|
||||
for tenant_id, _ in rows:
|
||||
tenant_id_str = str(tenant_id)
|
||||
tenant_counts[tenant_id_str] = tenant_counts.get(tenant_id_str, 0) + 1
|
||||
|
||||
return tenant_counts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting overdue counts", error=str(e), exc_info=True)
|
||||
return {}
|
||||
|
||||
def _calculate_severity(self, days_overdue: int) -> str:
|
||||
"""
|
||||
Calculate severity level based on days overdue.
|
||||
|
||||
Args:
|
||||
days_overdue: Number of days past delivery date
|
||||
|
||||
Returns:
|
||||
Severity level: 'low', 'medium', 'high', 'critical'
|
||||
"""
|
||||
if days_overdue <= 1:
|
||||
return 'low'
|
||||
elif days_overdue <= 3:
|
||||
return 'medium'
|
||||
elif days_overdue <= 7:
|
||||
return 'high'
|
||||
else:
|
||||
return 'critical'
|
||||
|
||||
async def get_overdue_pos_for_dashboard(
|
||||
self,
|
||||
tenant_id: uuid.UUID,
|
||||
limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get overdue POs formatted for dashboard display.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
limit: Max number of results
|
||||
|
||||
Returns:
|
||||
List of overdue POs with dashboard-friendly format
|
||||
"""
|
||||
overdue_pos = await self.detect_overdue_pos(tenant_id)
|
||||
|
||||
# Sort by severity and days overdue (most critical first)
|
||||
severity_order = {'critical': 0, 'high': 1, 'medium': 2, 'low': 3}
|
||||
overdue_pos.sort(
|
||||
key=lambda x: (severity_order.get(x['severity'], 999), -x['days_overdue'])
|
||||
)
|
||||
|
||||
# Limit results
|
||||
return overdue_pos[:limit]
|
||||
|
||||
async def check_single_po_overdue(
|
||||
self,
|
||||
po_id: uuid.UUID,
|
||||
tenant_id: uuid.UUID
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Check if a single PO is overdue.
|
||||
|
||||
Args:
|
||||
po_id: PO ID
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Overdue info if PO is overdue, None otherwise
|
||||
"""
|
||||
try:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
async with database_manager.get_session() as session:
|
||||
query = select(PurchaseOrder).where(
|
||||
and_(
|
||||
PurchaseOrder.id == po_id,
|
||||
PurchaseOrder.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
po = result.scalar_one_or_none()
|
||||
|
||||
if not po:
|
||||
return None
|
||||
|
||||
# Check if overdue
|
||||
if (
|
||||
po.status in [
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
] and
|
||||
po.estimated_delivery_date and
|
||||
po.estimated_delivery_date < now
|
||||
):
|
||||
days_overdue = (now - po.estimated_delivery_date).days
|
||||
|
||||
return {
|
||||
'po_id': str(po.id),
|
||||
'po_number': po.po_number,
|
||||
'days_overdue': days_overdue,
|
||||
'severity': self._calculate_severity(days_overdue),
|
||||
'estimated_delivery_date': po.estimated_delivery_date.isoformat()
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking single PO overdue status",
|
||||
error=str(e),
|
||||
po_id=str(po_id),
|
||||
exc_info=True
|
||||
)
|
||||
return None
|
||||
@@ -31,6 +31,8 @@ from app.schemas.purchase_order_schemas import (
|
||||
from app.core.config import settings
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.messaging.event_publisher import ProcurementEventPublisher
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -42,7 +44,9 @@ class PurchaseOrderService:
|
||||
self,
|
||||
db: AsyncSession,
|
||||
config: BaseServiceSettings,
|
||||
suppliers_client: Optional[SuppliersServiceClient] = None
|
||||
suppliers_client: Optional[SuppliersServiceClient] = None,
|
||||
rabbitmq_client: Optional[RabbitMQClient] = None,
|
||||
event_publisher: Optional[ProcurementEventPublisher] = None
|
||||
):
|
||||
self.db = db
|
||||
self.config = config
|
||||
@@ -54,6 +58,10 @@ class PurchaseOrderService:
|
||||
# Initialize suppliers client for supplier validation
|
||||
self.suppliers_client = suppliers_client or SuppliersServiceClient(config)
|
||||
|
||||
# Initialize event publisher for RabbitMQ events
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.event_publisher = event_publisher or ProcurementEventPublisher(rabbitmq_client)
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER CRUD
|
||||
# ================================================================
|
||||
@@ -311,7 +319,7 @@ class PurchaseOrderService:
|
||||
approved_by: uuid.UUID,
|
||||
approval_notes: Optional[str] = None
|
||||
) -> Optional[PurchaseOrder]:
|
||||
"""Approve a purchase order"""
|
||||
"""Approve a purchase order and publish approval event"""
|
||||
try:
|
||||
logger.info("Approving purchase order", po_id=po_id)
|
||||
|
||||
@@ -322,18 +330,67 @@ class PurchaseOrderService:
|
||||
if po.status not in ['draft', 'pending_approval']:
|
||||
raise ValueError(f"Cannot approve order with status {po.status}")
|
||||
|
||||
# Get supplier details for event and delivery calculation
|
||||
supplier = await self._get_and_validate_supplier(tenant_id, po.supplier_id)
|
||||
|
||||
# Calculate estimated delivery date based on supplier lead time
|
||||
approved_at = datetime.utcnow()
|
||||
standard_lead_time = supplier.get('standard_lead_time', 7) # Default 7 days
|
||||
estimated_delivery_date = approved_at + timedelta(days=standard_lead_time)
|
||||
|
||||
update_data = {
|
||||
'status': 'approved',
|
||||
'approved_by': approved_by,
|
||||
'approved_at': datetime.utcnow(),
|
||||
'approved_at': approved_at,
|
||||
'estimated_delivery_date': estimated_delivery_date,
|
||||
'updated_by': approved_by,
|
||||
'updated_at': datetime.utcnow()
|
||||
'updated_at': approved_at
|
||||
}
|
||||
|
||||
po = await self.po_repo.update_po(po_id, tenant_id, update_data)
|
||||
await self.db.commit()
|
||||
|
||||
logger.info("Purchase order approved successfully", po_id=po_id)
|
||||
|
||||
# Publish PO approved event (non-blocking, fire-and-forget)
|
||||
try:
|
||||
# Get PO items for event
|
||||
items = await self.item_repo.get_items_by_po(po_id)
|
||||
items_data = [
|
||||
{
|
||||
"inventory_product_id": item.inventory_product_id,
|
||||
"product_name": item.product_name or item.product_code,
|
||||
"ordered_quantity": item.ordered_quantity,
|
||||
"unit_of_measure": item.unit_of_measure,
|
||||
"unit_price": item.unit_price,
|
||||
"line_total": item.line_total
|
||||
}
|
||||
for item in items
|
||||
]
|
||||
|
||||
await self.event_publisher.publish_po_approved_event(
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
po_number=po.po_number,
|
||||
supplier_id=po.supplier_id,
|
||||
supplier_name=supplier.get('name', ''),
|
||||
supplier_email=supplier.get('email'),
|
||||
supplier_phone=supplier.get('phone'),
|
||||
total_amount=po.total_amount,
|
||||
currency=po.currency,
|
||||
required_delivery_date=po.required_delivery_date.isoformat() if po.required_delivery_date else None,
|
||||
items=items_data,
|
||||
approved_by=approved_by,
|
||||
approved_at=po.approved_at.isoformat()
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the approval if event publishing fails
|
||||
logger.warning(
|
||||
"Failed to publish PO approved event",
|
||||
po_id=str(po_id),
|
||||
error=str(event_error)
|
||||
)
|
||||
|
||||
return po
|
||||
|
||||
except Exception as e:
|
||||
@@ -348,7 +405,7 @@ class PurchaseOrderService:
|
||||
rejected_by: uuid.UUID,
|
||||
rejection_reason: str
|
||||
) -> Optional[PurchaseOrder]:
|
||||
"""Reject a purchase order"""
|
||||
"""Reject a purchase order and publish rejection event"""
|
||||
try:
|
||||
logger.info("Rejecting purchase order", po_id=po_id)
|
||||
|
||||
@@ -359,6 +416,9 @@ class PurchaseOrderService:
|
||||
if po.status not in ['draft', 'pending_approval']:
|
||||
raise ValueError(f"Cannot reject order with status {po.status}")
|
||||
|
||||
# Get supplier details for event
|
||||
supplier = await self._get_and_validate_supplier(tenant_id, po.supplier_id)
|
||||
|
||||
update_data = {
|
||||
'status': 'rejected',
|
||||
'rejection_reason': rejection_reason,
|
||||
@@ -370,6 +430,27 @@ class PurchaseOrderService:
|
||||
await self.db.commit()
|
||||
|
||||
logger.info("Purchase order rejected", po_id=po_id)
|
||||
|
||||
# Publish PO rejected event (non-blocking, fire-and-forget)
|
||||
try:
|
||||
await self.event_publisher.publish_po_rejected_event(
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
po_number=po.po_number,
|
||||
supplier_id=po.supplier_id,
|
||||
supplier_name=supplier.get('name', ''),
|
||||
rejection_reason=rejection_reason,
|
||||
rejected_by=rejected_by,
|
||||
rejected_at=datetime.utcnow().isoformat()
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the rejection if event publishing fails
|
||||
logger.warning(
|
||||
"Failed to publish PO rejected event",
|
||||
po_id=str(po_id),
|
||||
error=str(event_error)
|
||||
)
|
||||
|
||||
return po
|
||||
|
||||
except Exception as e:
|
||||
@@ -485,6 +566,40 @@ class PurchaseOrderService:
|
||||
delivery_id=delivery.id,
|
||||
delivery_number=delivery_number)
|
||||
|
||||
# Publish delivery received event (non-blocking, fire-and-forget)
|
||||
try:
|
||||
# Get all delivery items for the event
|
||||
items_data = []
|
||||
for item_data in delivery_data.items:
|
||||
items_data.append({
|
||||
"inventory_product_id": str(item_data.inventory_product_id),
|
||||
"ordered_quantity": float(item_data.ordered_quantity),
|
||||
"delivered_quantity": float(item_data.delivered_quantity),
|
||||
"accepted_quantity": float(item_data.accepted_quantity),
|
||||
"rejected_quantity": float(item_data.rejected_quantity),
|
||||
"batch_lot_number": item_data.batch_lot_number,
|
||||
"expiry_date": item_data.expiry_date.isoformat() if item_data.expiry_date else None,
|
||||
"quality_grade": item_data.quality_grade,
|
||||
"quality_issues": item_data.quality_issues,
|
||||
"rejection_reason": item_data.rejection_reason
|
||||
})
|
||||
|
||||
await self.event_publisher.publish_delivery_received_event(
|
||||
tenant_id=tenant_id,
|
||||
delivery_id=delivery.id,
|
||||
po_id=delivery_data.purchase_order_id,
|
||||
items=items_data,
|
||||
received_at=datetime.utcnow().isoformat(),
|
||||
received_by=created_by
|
||||
)
|
||||
except Exception as event_error:
|
||||
# Log but don't fail the delivery creation if event publishing fails
|
||||
logger.warning(
|
||||
"Failed to publish delivery received event",
|
||||
delivery_id=str(delivery.id),
|
||||
error=str(event_error)
|
||||
)
|
||||
|
||||
return delivery
|
||||
|
||||
except Exception as e:
|
||||
|
||||
Reference in New Issue
Block a user