Initial commit - production deployment
This commit is contained in:
1
services/suppliers/app/__init__.py
Normal file
1
services/suppliers/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/suppliers/app/__init__.py
|
||||
1
services/suppliers/app/api/__init__.py
Normal file
1
services/suppliers/app/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/suppliers/app/api/__init__.py
|
||||
575
services/suppliers/app/api/analytics.py
Normal file
575
services/suppliers/app/api/analytics.py
Normal file
@@ -0,0 +1,575 @@
|
||||
# services/suppliers/app/api/analytics.py
|
||||
"""
|
||||
Supplier Analytics API endpoints (ANALYTICS)
|
||||
Consolidates performance metrics, delivery stats, and all analytics operations
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, analytics_tier_required
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.performance_service import PerformanceTrackingService, AlertService
|
||||
from app.services.dashboard_service import DashboardService
|
||||
from app.schemas.performance import (
|
||||
PerformanceMetric, Alert, PerformanceDashboardSummary,
|
||||
SupplierPerformanceInsights, PerformanceAnalytics, BusinessModelInsights,
|
||||
AlertSummary, PerformanceReportRequest, ExportDataResponse
|
||||
)
|
||||
from app.models.performance import PerformancePeriod, PerformanceMetricType, AlertType, AlertSeverity
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["analytics"])
|
||||
|
||||
|
||||
# ===== Dependency Injection =====
|
||||
|
||||
async def get_performance_service() -> PerformanceTrackingService:
|
||||
"""Get performance tracking service"""
|
||||
return PerformanceTrackingService()
|
||||
|
||||
async def get_alert_service() -> AlertService:
|
||||
"""Get alert service"""
|
||||
return AlertService()
|
||||
|
||||
async def get_dashboard_service() -> DashboardService:
|
||||
"""Get dashboard service"""
|
||||
return DashboardService()
|
||||
|
||||
|
||||
# ===== Performance Metrics =====
|
||||
|
||||
@router.post(
|
||||
route_builder.build_analytics_route("performance/{supplier_id}/calculate"),
|
||||
response_model=PerformanceMetric
|
||||
)
|
||||
async def calculate_supplier_performance(
|
||||
tenant_id: UUID = Path(...),
|
||||
supplier_id: UUID = Path(...),
|
||||
period: PerformancePeriod = Query(...),
|
||||
period_start: datetime = Query(...),
|
||||
period_end: datetime = Query(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
performance_service: PerformanceTrackingService = Depends(get_performance_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Calculate performance metrics for a supplier"""
|
||||
try:
|
||||
metric = await performance_service.calculate_supplier_performance(
|
||||
db, supplier_id, tenant_id, period, period_start, period_end
|
||||
)
|
||||
|
||||
if not metric:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Unable to calculate performance metrics"
|
||||
)
|
||||
|
||||
logger.info("Performance metrics calculated",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier_id),
|
||||
period=period.value)
|
||||
|
||||
return metric
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating performance metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to calculate performance metrics"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/{supplier_id}/metrics"),
|
||||
response_model=List[PerformanceMetric]
|
||||
)
|
||||
async def get_supplier_performance_metrics(
|
||||
tenant_id: UUID = Path(...),
|
||||
supplier_id: UUID = Path(...),
|
||||
metric_type: Optional[PerformanceMetricType] = Query(None),
|
||||
period: Optional[PerformancePeriod] = Query(None),
|
||||
date_from: Optional[datetime] = Query(None),
|
||||
date_to: Optional[datetime] = Query(None),
|
||||
limit: int = Query(50, ge=1, le=500),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get performance metrics for a supplier"""
|
||||
try:
|
||||
from app.models.performance import SupplierPerformanceMetric
|
||||
from sqlalchemy import select, and_, desc
|
||||
|
||||
# Build query for performance metrics
|
||||
query = select(SupplierPerformanceMetric).where(
|
||||
and_(
|
||||
SupplierPerformanceMetric.supplier_id == supplier_id,
|
||||
SupplierPerformanceMetric.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
# Apply filters
|
||||
if metric_type:
|
||||
query = query.where(SupplierPerformanceMetric.metric_type == metric_type)
|
||||
|
||||
if date_from:
|
||||
query = query.where(SupplierPerformanceMetric.calculated_at >= date_from)
|
||||
|
||||
if date_to:
|
||||
query = query.where(SupplierPerformanceMetric.calculated_at <= date_to)
|
||||
|
||||
# Order by most recent and apply limit
|
||||
query = query.order_by(desc(SupplierPerformanceMetric.calculated_at)).limit(limit)
|
||||
|
||||
result = await db.execute(query)
|
||||
metrics = result.scalars().all()
|
||||
|
||||
logger.info("Retrieved performance metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier_id),
|
||||
count=len(metrics))
|
||||
|
||||
return metrics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve performance metrics"
|
||||
)
|
||||
|
||||
|
||||
# ===== Alert Management =====
|
||||
|
||||
@router.post(
|
||||
route_builder.build_analytics_route("performance/alerts/evaluate"),
|
||||
response_model=List[Alert]
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def evaluate_performance_alerts(
|
||||
tenant_id: UUID = Path(...),
|
||||
supplier_id: Optional[UUID] = Query(None, description="Specific supplier to evaluate"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
alert_service: AlertService = Depends(get_alert_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Evaluate and create performance-based alerts"""
|
||||
try:
|
||||
alerts = await alert_service.evaluate_performance_alerts(db, tenant_id, supplier_id)
|
||||
|
||||
logger.info("Performance alerts evaluated",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_created=len(alerts))
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error evaluating performance alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to evaluate performance alerts"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/alerts"),
|
||||
response_model=List[Alert]
|
||||
)
|
||||
async def get_supplier_alerts(
|
||||
tenant_id: UUID = Path(...),
|
||||
supplier_id: Optional[UUID] = Query(None),
|
||||
alert_type: Optional[AlertType] = Query(None),
|
||||
severity: Optional[AlertSeverity] = Query(None),
|
||||
date_from: Optional[datetime] = Query(None),
|
||||
date_to: Optional[datetime] = Query(None),
|
||||
limit: int = Query(50, ge=1, le=500),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get supplier alerts with filtering"""
|
||||
try:
|
||||
from app.models.performance import SupplierAlert
|
||||
from sqlalchemy import select, and_, desc
|
||||
|
||||
# Build query for alerts
|
||||
query = select(SupplierAlert).where(
|
||||
SupplierAlert.tenant_id == tenant_id
|
||||
)
|
||||
|
||||
# Apply filters
|
||||
if supplier_id:
|
||||
query = query.where(SupplierAlert.supplier_id == supplier_id)
|
||||
|
||||
if alert_type:
|
||||
query = query.where(SupplierAlert.alert_type == alert_type)
|
||||
|
||||
if severity:
|
||||
query = query.where(SupplierAlert.severity == severity)
|
||||
|
||||
if date_from:
|
||||
query = query.where(SupplierAlert.created_at >= date_from)
|
||||
|
||||
if date_to:
|
||||
query = query.where(SupplierAlert.created_at <= date_to)
|
||||
|
||||
# Order by most recent and apply limit
|
||||
query = query.order_by(desc(SupplierAlert.created_at)).limit(limit)
|
||||
|
||||
result = await db.execute(query)
|
||||
alerts = result.scalars().all()
|
||||
|
||||
logger.info("Retrieved supplier alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
count=len(alerts))
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve supplier alerts"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/alerts/summary"),
|
||||
response_model=List[AlertSummary]
|
||||
)
|
||||
async def get_alert_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
date_from: Optional[datetime] = Query(None),
|
||||
date_to: Optional[datetime] = Query(None),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get alert summary by type and severity"""
|
||||
try:
|
||||
summary = await dashboard_service.get_alert_summary(db, tenant_id, date_from, date_to)
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting alert summary",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve alert summary"
|
||||
)
|
||||
|
||||
|
||||
# ===== Dashboard Analytics =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("performance/summary"),
|
||||
response_model=PerformanceDashboardSummary
|
||||
)
|
||||
async def get_performance_dashboard_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
date_from: Optional[datetime] = Query(None),
|
||||
date_to: Optional[datetime] = Query(None),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get comprehensive performance dashboard summary"""
|
||||
try:
|
||||
summary = await dashboard_service.get_performance_dashboard_summary(
|
||||
db, tenant_id, date_from, date_to
|
||||
)
|
||||
|
||||
logger.info("Performance dashboard summary retrieved",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting dashboard summary",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve dashboard summary"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/{supplier_id}/insights"),
|
||||
response_model=SupplierPerformanceInsights
|
||||
)
|
||||
async def get_supplier_performance_insights(
|
||||
tenant_id: UUID = Path(...),
|
||||
supplier_id: UUID = Path(...),
|
||||
days_back: int = Query(30, ge=1, le=365),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get detailed performance insights for a specific supplier"""
|
||||
try:
|
||||
insights = await dashboard_service.get_supplier_performance_insights(
|
||||
db, tenant_id, supplier_id, days_back
|
||||
)
|
||||
|
||||
logger.info("Supplier performance insights retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier_id))
|
||||
|
||||
return insights
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier insights",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve supplier insights"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/performance"),
|
||||
response_model=PerformanceAnalytics
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_performance_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
period_days: int = Query(90, ge=1, le=365),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get advanced performance analytics"""
|
||||
try:
|
||||
analytics = await dashboard_service.get_performance_analytics(
|
||||
db, tenant_id, period_days
|
||||
)
|
||||
|
||||
logger.info("Performance analytics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
period_days=period_days)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance analytics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve performance analytics"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/business-model"),
|
||||
response_model=BusinessModelInsights
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_business_model_insights(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get business model detection and insights"""
|
||||
try:
|
||||
insights = await dashboard_service.get_business_model_insights(db, tenant_id)
|
||||
|
||||
logger.info("Business model insights retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
detected_model=insights.detected_model)
|
||||
|
||||
return insights
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting business model insights",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve business model insights"
|
||||
)
|
||||
|
||||
|
||||
# ===== Export and Reporting =====
|
||||
|
||||
@router.post(
|
||||
route_builder.build_analytics_route("performance/reports/generate"),
|
||||
response_model=ExportDataResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def generate_performance_report(
|
||||
report_request: PerformanceReportRequest,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Generate a performance report"""
|
||||
try:
|
||||
# TODO: Implement report generation
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail="Report generation not yet implemented"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating performance report",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to generate performance report"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/export")
|
||||
)
|
||||
async def export_performance_data(
|
||||
tenant_id: UUID = Path(...),
|
||||
format: str = Query("json", description="Export format: json, csv, excel"),
|
||||
date_from: Optional[datetime] = Query(None),
|
||||
date_to: Optional[datetime] = Query(None),
|
||||
supplier_ids: Optional[List[UUID]] = Query(None),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Export performance data"""
|
||||
try:
|
||||
if format.lower() not in ["json", "csv", "excel"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Unsupported export format. Use: json, csv, excel"
|
||||
)
|
||||
|
||||
# TODO: Implement data export
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail="Data export not yet implemented"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error exporting performance data",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to export performance data"
|
||||
)
|
||||
|
||||
|
||||
# ===== Configuration and Health =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/config")
|
||||
)
|
||||
async def get_performance_config(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep)
|
||||
):
|
||||
"""Get performance tracking configuration"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
|
||||
config = {
|
||||
"performance_tracking": {
|
||||
"enabled": settings.PERFORMANCE_TRACKING_ENABLED,
|
||||
"calculation_interval_minutes": settings.PERFORMANCE_CALCULATION_INTERVAL_MINUTES,
|
||||
"cache_ttl_seconds": settings.PERFORMANCE_CACHE_TTL
|
||||
},
|
||||
"thresholds": {
|
||||
"excellent_delivery_rate": settings.EXCELLENT_DELIVERY_RATE,
|
||||
"good_delivery_rate": settings.GOOD_DELIVERY_RATE,
|
||||
"acceptable_delivery_rate": settings.ACCEPTABLE_DELIVERY_RATE,
|
||||
"poor_delivery_rate": settings.POOR_DELIVERY_RATE,
|
||||
"excellent_quality_rate": settings.EXCELLENT_QUALITY_RATE,
|
||||
"good_quality_rate": settings.GOOD_QUALITY_RATE,
|
||||
"acceptable_quality_rate": settings.ACCEPTABLE_QUALITY_RATE,
|
||||
"poor_quality_rate": settings.POOR_QUALITY_RATE
|
||||
},
|
||||
"alerts": {
|
||||
"enabled": settings.ALERTS_ENABLED,
|
||||
"evaluation_interval_minutes": settings.ALERT_EVALUATION_INTERVAL_MINUTES,
|
||||
"retention_days": settings.ALERT_RETENTION_DAYS,
|
||||
"critical_delivery_delay_hours": settings.CRITICAL_DELIVERY_DELAY_HOURS,
|
||||
"critical_quality_rejection_rate": settings.CRITICAL_QUALITY_REJECTION_RATE
|
||||
},
|
||||
"dashboard": {
|
||||
"cache_ttl_seconds": settings.DASHBOARD_CACHE_TTL,
|
||||
"refresh_interval_seconds": settings.DASHBOARD_REFRESH_INTERVAL,
|
||||
"default_analytics_period_days": settings.DEFAULT_ANALYTICS_PERIOD_DAYS
|
||||
},
|
||||
"business_model": {
|
||||
"detection_enabled": settings.ENABLE_BUSINESS_MODEL_DETECTION,
|
||||
"central_bakery_threshold": settings.CENTRAL_BAKERY_THRESHOLD_SUPPLIERS,
|
||||
"individual_bakery_threshold": settings.INDIVIDUAL_BAKERY_THRESHOLD_SUPPLIERS
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance config",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve performance configuration"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("performance/health")
|
||||
)
|
||||
async def get_performance_health(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep)
|
||||
):
|
||||
"""Get performance service health status"""
|
||||
try:
|
||||
return {
|
||||
"service": "suppliers-performance",
|
||||
"status": "healthy",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"tenant_id": str(tenant_id),
|
||||
"features": {
|
||||
"performance_tracking": "enabled",
|
||||
"alerts": "enabled",
|
||||
"dashboard_analytics": "enabled",
|
||||
"business_model_detection": "enabled"
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance health",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get performance health status"
|
||||
)
|
||||
237
services/suppliers/app/api/audit.py
Normal file
237
services/suppliers/app/api/audit.py
Normal file
@@ -0,0 +1,237 @@
|
||||
# services/suppliers/app/api/audit.py
|
||||
"""
|
||||
Audit Logs API - Retrieve audit trail for suppliers service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.models.audit_log_schemas import (
|
||||
AuditLogResponse,
|
||||
AuditLogListResponse,
|
||||
AuditLogStatsResponse
|
||||
)
|
||||
from app.core.database import database_manager
|
||||
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
router = APIRouter(tags=["audit-logs"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""Database session dependency"""
|
||||
async with database_manager.get_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs"),
|
||||
response_model=AuditLogListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_logs(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
|
||||
action: Optional[str] = Query(None, description="Filter by action type"),
|
||||
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
|
||||
severity: Optional[str] = Query(None, description="Filter by severity level"),
|
||||
search: Optional[str] = Query(None, description="Search in description field"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit logs for suppliers service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit logs",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"),
|
||||
filters={
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"action": action,
|
||||
"resource_type": resource_type,
|
||||
"severity": severity
|
||||
}
|
||||
)
|
||||
|
||||
# Build query filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
if user_id:
|
||||
filters.append(AuditLog.user_id == user_id)
|
||||
if action:
|
||||
filters.append(AuditLog.action == action)
|
||||
if resource_type:
|
||||
filters.append(AuditLog.resource_type == resource_type)
|
||||
if severity:
|
||||
filters.append(AuditLog.severity == severity)
|
||||
if search:
|
||||
filters.append(AuditLog.description.ilike(f"%{search}%"))
|
||||
|
||||
# Count total matching records
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
# Fetch paginated results
|
||||
query = (
|
||||
select(AuditLog)
|
||||
.where(and_(*filters))
|
||||
.order_by(AuditLog.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
audit_logs = result.scalars().all()
|
||||
|
||||
# Convert to response models
|
||||
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit logs",
|
||||
tenant_id=tenant_id,
|
||||
total=total,
|
||||
returned=len(items)
|
||||
)
|
||||
|
||||
return AuditLogListResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
has_more=(offset + len(items)) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit logs",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs/stats"),
|
||||
response_model=AuditLogStatsResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_log_stats(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit log statistics for suppliers service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
# Build base filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
|
||||
# Total events
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total_events = total_result.scalar() or 0
|
||||
|
||||
# Events by action
|
||||
action_query = (
|
||||
select(AuditLog.action, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.action)
|
||||
)
|
||||
action_result = await db.execute(action_query)
|
||||
events_by_action = {row.action: row.count for row in action_result}
|
||||
|
||||
# Events by severity
|
||||
severity_query = (
|
||||
select(AuditLog.severity, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.severity)
|
||||
)
|
||||
severity_result = await db.execute(severity_query)
|
||||
events_by_severity = {row.severity: row.count for row in severity_result}
|
||||
|
||||
# Events by resource type
|
||||
resource_query = (
|
||||
select(AuditLog.resource_type, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.resource_type)
|
||||
)
|
||||
resource_result = await db.execute(resource_query)
|
||||
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
|
||||
|
||||
# Date range
|
||||
date_range_query = (
|
||||
select(
|
||||
func.min(AuditLog.created_at).label('min_date'),
|
||||
func.max(AuditLog.created_at).label('max_date')
|
||||
)
|
||||
.where(and_(*filters))
|
||||
)
|
||||
date_result = await db.execute(date_range_query)
|
||||
date_row = date_result.one()
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
total_events=total_events
|
||||
)
|
||||
|
||||
return AuditLogStatsResponse(
|
||||
total_events=total_events,
|
||||
events_by_action=events_by_action,
|
||||
events_by_severity=events_by_severity,
|
||||
events_by_resource_type=events_by_resource_type,
|
||||
date_range={
|
||||
"min": date_row.min_date,
|
||||
"max": date_row.max_date
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit log statistics",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit log statistics: {str(e)}"
|
||||
)
|
||||
45
services/suppliers/app/api/internal.py
Normal file
45
services/suppliers/app/api/internal.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""
|
||||
Internal API for Suppliers Service
|
||||
Handles internal service-to-service operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.models.suppliers import Supplier, SupplierStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal", tags=["internal"])
|
||||
|
||||
|
||||
@router.get("/count")
|
||||
async def get_supplier_count(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get count of active suppliers for onboarding status check.
|
||||
Internal endpoint for tenant service.
|
||||
"""
|
||||
try:
|
||||
count = await db.scalar(
|
||||
select(func.count()).select_from(Supplier)
|
||||
.where(
|
||||
Supplier.tenant_id == UUID(tenant_id),
|
||||
Supplier.status == SupplierStatus.active
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"count": count or 0,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get supplier count", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get supplier count: {str(e)}")
|
||||
401
services/suppliers/app/api/internal_demo.py
Normal file
401
services/suppliers/app/api/internal_demo.py
Normal file
@@ -0,0 +1,401 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Suppliers Service
|
||||
Service-to-service endpoint for cloning supplier data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete
|
||||
import structlog
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.suppliers import Supplier
|
||||
from app.core.config import settings
|
||||
|
||||
# Import demo_dates utilities at the top level
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone suppliers service data for a virtual demo tenant
|
||||
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting suppliers data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "05-suppliers.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "05-suppliers.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "05-suppliers.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"suppliers": 0
|
||||
}
|
||||
|
||||
# Create Suppliers
|
||||
for supplier_data in seed_data.get('suppliers', []):
|
||||
# Transform supplier ID using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
supplier_uuid = uuid.UUID(supplier_data['id'])
|
||||
transformed_id = transform_id(supplier_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse supplier UUID",
|
||||
supplier_id=supplier_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in supplier data: {str(e)}"
|
||||
)
|
||||
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_created_at = parse_date_field(
|
||||
supplier_data.get('created_at'),
|
||||
session_time,
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = parse_date_field(
|
||||
supplier_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
) or adjusted_created_at # Fallback to created_at if not provided
|
||||
|
||||
# Map supplier_type to enum if it's a string
|
||||
from app.models.suppliers import SupplierType, SupplierStatus, PaymentTerms
|
||||
|
||||
supplier_type_value = supplier_data.get('supplier_type')
|
||||
if supplier_type_value is None:
|
||||
# Default to multi if supplier_type not provided
|
||||
supplier_type_value = SupplierType.multi
|
||||
elif isinstance(supplier_type_value, str):
|
||||
try:
|
||||
supplier_type_value = SupplierType[supplier_type_value]
|
||||
except KeyError:
|
||||
supplier_type_value = SupplierType.multi
|
||||
|
||||
# Map payment_terms to enum if it's a string
|
||||
payment_terms_value = supplier_data.get('payment_terms', 'net_30')
|
||||
if isinstance(payment_terms_value, str):
|
||||
try:
|
||||
payment_terms_value = PaymentTerms[payment_terms_value]
|
||||
except KeyError:
|
||||
payment_terms_value = PaymentTerms.net_30
|
||||
|
||||
# Map status to enum if provided
|
||||
status_value = supplier_data.get('status', 'active')
|
||||
if isinstance(status_value, str):
|
||||
try:
|
||||
status_value = SupplierStatus[status_value]
|
||||
except KeyError:
|
||||
status_value = SupplierStatus.active
|
||||
|
||||
# Map created_by and updated_by - use a system user UUID if not provided
|
||||
system_user_id = uuid.UUID('00000000-0000-0000-0000-000000000000')
|
||||
created_by = supplier_data.get('created_by', str(system_user_id))
|
||||
updated_by = supplier_data.get('updated_by', str(system_user_id))
|
||||
|
||||
new_supplier = Supplier(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
name=supplier_data['name'],
|
||||
supplier_code=supplier_data.get('supplier_code'),
|
||||
tax_id=supplier_data.get('tax_id'),
|
||||
registration_number=supplier_data.get('registration_number'),
|
||||
supplier_type=supplier_type_value,
|
||||
status=status_value,
|
||||
contact_person=supplier_data.get('contact_person'),
|
||||
email=supplier_data.get('email'),
|
||||
phone=supplier_data.get('phone'),
|
||||
mobile=supplier_data.get('mobile'),
|
||||
website=supplier_data.get('website'),
|
||||
address_line1=supplier_data.get('address_line1'),
|
||||
address_line2=supplier_data.get('address_line2'),
|
||||
city=supplier_data.get('city'),
|
||||
state_province=supplier_data.get('state_province'),
|
||||
postal_code=supplier_data.get('postal_code'),
|
||||
country=supplier_data.get('country'),
|
||||
payment_terms=payment_terms_value,
|
||||
credit_limit=supplier_data.get('credit_limit', 0.0),
|
||||
currency=supplier_data.get('currency', 'EUR'),
|
||||
standard_lead_time=supplier_data.get('standard_lead_time', 3),
|
||||
minimum_order_amount=supplier_data.get('minimum_order_amount'),
|
||||
delivery_area=supplier_data.get('delivery_area'),
|
||||
quality_rating=supplier_data.get('quality_rating', 0.0),
|
||||
delivery_rating=supplier_data.get('delivery_rating', 0.0),
|
||||
total_orders=supplier_data.get('total_orders', 0),
|
||||
total_amount=supplier_data.get('total_amount', 0.0),
|
||||
trust_score=supplier_data.get('trust_score', 0.0),
|
||||
is_preferred_supplier=supplier_data.get('is_preferred_supplier', False),
|
||||
auto_approve_enabled=supplier_data.get('auto_approve_enabled', False),
|
||||
total_pos_count=supplier_data.get('total_pos_count', 0),
|
||||
approved_pos_count=supplier_data.get('approved_pos_count', 0),
|
||||
on_time_delivery_rate=supplier_data.get('on_time_delivery_rate', 0.0),
|
||||
fulfillment_rate=supplier_data.get('fulfillment_rate', 0.0),
|
||||
last_performance_update=parse_date_field(
|
||||
supplier_data.get('last_performance_update'),
|
||||
session_time,
|
||||
"last_performance_update"
|
||||
),
|
||||
approved_by=supplier_data.get('approved_by'),
|
||||
approved_at=parse_date_field(
|
||||
supplier_data.get('approved_at'),
|
||||
session_time,
|
||||
"approved_at"
|
||||
),
|
||||
rejection_reason=supplier_data.get('rejection_reason'),
|
||||
notes=supplier_data.get('notes'),
|
||||
certifications=supplier_data.get('certifications'),
|
||||
business_hours=supplier_data.get('business_hours'),
|
||||
specializations=supplier_data.get('specializations'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at,
|
||||
created_by=created_by,
|
||||
updated_by=updated_by
|
||||
)
|
||||
db.add(new_supplier)
|
||||
stats["suppliers"] += 1
|
||||
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Suppliers data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
suppliers_cloned=stats["suppliers"],
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "suppliers",
|
||||
"status": "completed",
|
||||
"records_cloned": stats["suppliers"],
|
||||
"duration_ms": duration_ms,
|
||||
"details": {
|
||||
"suppliers": stats["suppliers"],
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone suppliers data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "suppliers",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "suppliers",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"suppliers": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Delete suppliers
|
||||
result = await db.execute(
|
||||
delete(Supplier)
|
||||
.where(Supplier.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["suppliers"] = result.rowcount
|
||||
|
||||
records_deleted["total"] = records_deleted["suppliers"]
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"demo_data_deleted",
|
||||
service="suppliers",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "suppliers",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="suppliers",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
276
services/suppliers/app/api/supplier_operations.py
Normal file
276
services/suppliers/app/api/supplier_operations.py
Normal file
@@ -0,0 +1,276 @@
|
||||
# services/suppliers/app/api/supplier_operations.py
|
||||
"""
|
||||
Supplier Business Operations API endpoints (BUSINESS)
|
||||
Handles approvals, status updates, active/top suppliers, and delivery/PO operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.database import get_db
|
||||
from app.services.supplier_service import SupplierService
|
||||
from app.schemas.suppliers import (
|
||||
SupplierApproval, SupplierResponse, SupplierSummary, SupplierStatistics
|
||||
)
|
||||
from app.models.suppliers import SupplierType
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
|
||||
|
||||
router = APIRouter(tags=["supplier-operations"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service", AuditLog)
|
||||
|
||||
|
||||
# ===== Supplier Operations =====
|
||||
|
||||
@router.get(route_builder.build_operations_route("statistics"), response_model=SupplierStatistics)
|
||||
async def get_supplier_statistics(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get supplier statistics for dashboard"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
stats = await service.get_supplier_statistics(UUID(tenant_id))
|
||||
return SupplierStatistics(**stats)
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier statistics", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve statistics")
|
||||
|
||||
|
||||
@router.get(route_builder.build_operations_route("suppliers/active"), response_model=List[SupplierSummary])
|
||||
async def get_active_suppliers(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all active suppliers"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
suppliers = await service.get_active_suppliers(UUID(tenant_id))
|
||||
return [SupplierSummary.from_orm(supplier) for supplier in suppliers]
|
||||
except Exception as e:
|
||||
logger.error("Error getting active suppliers", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve active suppliers")
|
||||
|
||||
|
||||
@router.get(route_builder.build_operations_route("suppliers/top"), response_model=List[SupplierSummary])
|
||||
async def get_top_suppliers(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(10, ge=1, le=50, description="Number of top suppliers to return"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get top performing suppliers"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
suppliers = await service.get_top_suppliers(UUID(tenant_id), limit)
|
||||
return [SupplierSummary.from_orm(supplier) for supplier in suppliers]
|
||||
except Exception as e:
|
||||
logger.error("Error getting top suppliers", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve top suppliers")
|
||||
|
||||
|
||||
@router.get(route_builder.build_operations_route("suppliers/pending-review"), response_model=List[SupplierSummary])
|
||||
async def get_suppliers_needing_review(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
days_since_last_order: int = Query(30, ge=1, le=365, description="Days since last order"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get suppliers that may need performance review"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
suppliers = await service.get_suppliers_needing_review(
|
||||
UUID(tenant_id), days_since_last_order
|
||||
)
|
||||
return [SupplierSummary.from_orm(supplier) for supplier in suppliers]
|
||||
except Exception as e:
|
||||
logger.error("Error getting suppliers needing review", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve suppliers needing review")
|
||||
|
||||
|
||||
@router.post(route_builder.build_resource_action_route("", "supplier_id", "approve"), response_model=SupplierResponse)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def approve_supplier(
|
||||
approval_data: SupplierApproval,
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Approve or reject a pending supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Check supplier exists
|
||||
existing_supplier = await service.get_supplier(supplier_id)
|
||||
if not existing_supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
if approval_data.action == "approve":
|
||||
supplier = await service.approve_supplier(
|
||||
supplier_id=supplier_id,
|
||||
approved_by=current_user["user_id"],
|
||||
notes=approval_data.notes
|
||||
)
|
||||
elif approval_data.action == "reject":
|
||||
if not approval_data.notes:
|
||||
raise HTTPException(status_code=400, detail="Rejection reason is required")
|
||||
supplier = await service.reject_supplier(
|
||||
supplier_id=supplier_id,
|
||||
rejection_reason=approval_data.notes,
|
||||
rejected_by=current_user["user_id"]
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="Invalid action")
|
||||
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=400, detail="Supplier is not in pending approval status")
|
||||
|
||||
return SupplierResponse.from_orm(supplier)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error processing supplier approval", supplier_id=str(supplier_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to process supplier approval")
|
||||
|
||||
|
||||
@router.get(route_builder.build_resource_detail_route("types", "supplier_type"), response_model=List[SupplierSummary])
|
||||
async def get_suppliers_by_type(
|
||||
supplier_type: str = Path(..., description="Supplier type"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get suppliers by type"""
|
||||
try:
|
||||
# Validate supplier type
|
||||
try:
|
||||
type_enum = SupplierType(supplier_type.upper())
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid supplier type")
|
||||
|
||||
service = SupplierService(db)
|
||||
suppliers = await service.get_suppliers_by_type(UUID(tenant_id), type_enum)
|
||||
return [SupplierSummary.from_orm(supplier) for supplier in suppliers]
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting suppliers by type", supplier_type=supplier_type, error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve suppliers by type")
|
||||
|
||||
|
||||
@router.get(route_builder.build_operations_route("count"))
|
||||
async def get_supplier_count(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get total count of suppliers for a tenant
|
||||
Used for subscription usage tracking and dashboard metrics
|
||||
"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
suppliers = await service.get_active_suppliers(tenant_id=UUID(current_user["tenant_id"]))
|
||||
count = len(suppliers)
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier count", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve supplier count")
|
||||
|
||||
# ============================================================================
|
||||
# Tenant Data Deletion Operations (Internal Service Only)
|
||||
# ============================================================================
|
||||
|
||||
from shared.auth.access_control import service_only_access
|
||||
from shared.services.tenant_deletion import TenantDataDeletionResult
|
||||
from app.services.tenant_deletion_service import SuppliersTenantDeletionService
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all suppliers data for a tenant (Internal service only)
|
||||
"""
|
||||
try:
|
||||
logger.info("suppliers.tenant_deletion.api_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = SuppliersTenantDeletionService(db)
|
||||
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Tenant data deletion completed successfully",
|
||||
"summary": result.to_dict()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("suppliers.tenant_deletion.api_error", tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete tenant data: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def preview_tenant_data_deletion(
|
||||
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Preview what data would be deleted for a tenant (dry-run)
|
||||
"""
|
||||
try:
|
||||
logger.info("suppliers.tenant_deletion.preview_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = SuppliersTenantDeletionService(db)
|
||||
preview_data = await deletion_service.get_tenant_data_preview(tenant_id)
|
||||
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=deletion_service.service_name)
|
||||
result.deleted_counts = preview_data
|
||||
result.success = True
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant deletion preview failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"service": "suppliers-service",
|
||||
"data_counts": result.deleted_counts,
|
||||
"total_items": sum(result.deleted_counts.values())
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("suppliers.tenant_deletion.preview_error", tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to preview tenant data deletion: {str(e)}")
|
||||
722
services/suppliers/app/api/suppliers.py
Normal file
722
services/suppliers/app/api/suppliers.py
Normal file
@@ -0,0 +1,722 @@
|
||||
# services/suppliers/app/api/suppliers.py
|
||||
"""
|
||||
Supplier CRUD API endpoints (ATOMIC)
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
import httpx
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.core.database import get_db
|
||||
from app.services.supplier_service import SupplierService
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
from app.models import AuditLog
|
||||
from app.schemas.suppliers import (
|
||||
SupplierCreate, SupplierUpdate, SupplierResponse, SupplierSummary,
|
||||
SupplierSearchParams, SupplierDeletionSummary,
|
||||
SupplierPriceListCreate, SupplierPriceListUpdate, SupplierPriceListResponse
|
||||
)
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('suppliers')
|
||||
|
||||
|
||||
router = APIRouter(tags=["suppliers"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service", AuditLog)
|
||||
|
||||
@router.post(route_builder.build_base_route(""), response_model=SupplierResponse)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_supplier(
|
||||
supplier_data: SupplierCreate,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new supplier"""
|
||||
try:
|
||||
# CRITICAL: Check subscription limit before creating
|
||||
from app.core.config import settings
|
||||
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
try:
|
||||
limit_check_response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/suppliers/can-add",
|
||||
headers={
|
||||
"x-user-id": str(current_user.get('user_id')),
|
||||
"x-tenant-id": str(tenant_id)
|
||||
}
|
||||
)
|
||||
|
||||
if limit_check_response.status_code == 200:
|
||||
limit_check = limit_check_response.json()
|
||||
|
||||
if not limit_check.get('can_add', False):
|
||||
logger.warning(
|
||||
"Supplier limit exceeded",
|
||||
tenant_id=tenant_id,
|
||||
current=limit_check.get('current_count'),
|
||||
max=limit_check.get('max_allowed'),
|
||||
reason=limit_check.get('reason')
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=402,
|
||||
detail={
|
||||
"error": "supplier_limit_exceeded",
|
||||
"message": limit_check.get('reason', 'Supplier limit exceeded'),
|
||||
"current_count": limit_check.get('current_count'),
|
||||
"max_allowed": limit_check.get('max_allowed'),
|
||||
"upgrade_required": True
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Failed to check supplier limit, allowing creation",
|
||||
tenant_id=tenant_id,
|
||||
status_code=limit_check_response.status_code
|
||||
)
|
||||
except httpx.TimeoutException:
|
||||
logger.warning("Timeout checking supplier limit, allowing creation", tenant_id=tenant_id)
|
||||
except httpx.RequestError as e:
|
||||
logger.warning("Error checking supplier limit, allowing creation", tenant_id=tenant_id, error=str(e))
|
||||
|
||||
service = SupplierService(db)
|
||||
|
||||
# Get user role from current_user dict
|
||||
user_role = current_user.get("role", "member").lower()
|
||||
|
||||
supplier = await service.create_supplier(
|
||||
tenant_id=UUID(tenant_id),
|
||||
supplier_data=supplier_data,
|
||||
created_by=current_user["user_id"],
|
||||
created_by_role=user_role
|
||||
)
|
||||
|
||||
logger.info("Supplier created successfully", tenant_id=tenant_id, supplier_id=str(supplier.id), supplier_name=supplier.name)
|
||||
|
||||
return SupplierResponse.from_orm(supplier)
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating supplier", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to create supplier")
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route(""), response_model=List[SupplierSummary])
|
||||
async def list_suppliers(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
search_term: Optional[str] = Query(None, description="Search term"),
|
||||
supplier_type: Optional[str] = Query(None, description="Supplier type filter"),
|
||||
status: Optional[str] = Query(None, description="Status filter"),
|
||||
limit: int = Query(50, ge=1, le=1000, description="Number of results to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of results to skip"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""List suppliers with optional filters"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
search_params = SupplierSearchParams(
|
||||
search_term=search_term,
|
||||
supplier_type=supplier_type,
|
||||
status=status,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
suppliers = await service.search_suppliers(
|
||||
tenant_id=UUID(tenant_id),
|
||||
search_params=search_params
|
||||
)
|
||||
return [SupplierSummary.from_orm(supplier) for supplier in suppliers]
|
||||
except Exception as e:
|
||||
logger.error("Error listing suppliers", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve suppliers")
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("batch"), response_model=List[SupplierSummary])
|
||||
async def get_suppliers_batch(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
ids: str = Query(..., description="Comma-separated supplier IDs"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get multiple suppliers in a single call for performance optimization.
|
||||
|
||||
This endpoint is designed to eliminate N+1 query patterns when fetching
|
||||
supplier data for multiple purchase orders or other entities.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ids: Comma-separated supplier IDs (e.g., "abc123,def456,xyz789")
|
||||
|
||||
Returns:
|
||||
List of supplier summaries for the requested IDs
|
||||
"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Parse comma-separated IDs
|
||||
supplier_ids = [id.strip() for id in ids.split(",") if id.strip()]
|
||||
|
||||
if not supplier_ids:
|
||||
return []
|
||||
|
||||
if len(supplier_ids) > 100:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Maximum 100 supplier IDs allowed per batch request"
|
||||
)
|
||||
|
||||
# Convert to UUIDs
|
||||
try:
|
||||
uuid_ids = [UUID(id) for id in supplier_ids]
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid supplier ID format: {e}")
|
||||
|
||||
# Fetch suppliers
|
||||
suppliers = await service.get_suppliers_batch(tenant_id=UUID(tenant_id), supplier_ids=uuid_ids)
|
||||
|
||||
logger.info(
|
||||
"Batch retrieved suppliers",
|
||||
tenant_id=tenant_id,
|
||||
requested_count=len(supplier_ids),
|
||||
found_count=len(suppliers)
|
||||
)
|
||||
|
||||
return [SupplierSummary.from_orm(supplier) for supplier in suppliers]
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error batch retrieving suppliers", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve suppliers")
|
||||
|
||||
|
||||
@router.get(route_builder.build_resource_detail_route("", "supplier_id"), response_model=SupplierResponse)
|
||||
async def get_supplier(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get supplier by ID"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
return SupplierResponse.from_orm(supplier)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier", supplier_id=str(supplier_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve supplier")
|
||||
|
||||
|
||||
@router.put(route_builder.build_resource_detail_route("", "supplier_id"), response_model=SupplierResponse)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_supplier(
|
||||
supplier_data: SupplierUpdate,
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update supplier information"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Check supplier exists
|
||||
existing_supplier = await service.get_supplier(supplier_id)
|
||||
if not existing_supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
supplier = await service.update_supplier(
|
||||
supplier_id=supplier_id,
|
||||
supplier_data=supplier_data,
|
||||
updated_by=current_user["user_id"]
|
||||
)
|
||||
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
return SupplierResponse.from_orm(supplier)
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error updating supplier", supplier_id=str(supplier_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to update supplier")
|
||||
|
||||
|
||||
@router.delete(route_builder.build_resource_detail_route("", "supplier_id"))
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_supplier(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete supplier (soft delete, Admin+ only)"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Check supplier exists
|
||||
existing_supplier = await service.get_supplier(supplier_id)
|
||||
if not existing_supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
# Capture supplier data before deletion
|
||||
supplier_data = {
|
||||
"supplier_name": existing_supplier.name,
|
||||
"supplier_type": existing_supplier.supplier_type,
|
||||
"contact_person": existing_supplier.contact_person,
|
||||
"email": existing_supplier.email
|
||||
}
|
||||
|
||||
success = await service.delete_supplier(supplier_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
# Log audit event for supplier deletion
|
||||
try:
|
||||
# Get sync db session for audit logging
|
||||
from app.core.database import SessionLocal
|
||||
sync_db = SessionLocal()
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=sync_db,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="supplier",
|
||||
resource_id=str(supplier_id),
|
||||
resource_data=supplier_data,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} deleted supplier",
|
||||
endpoint=f"/suppliers/{supplier_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
sync_db.commit()
|
||||
finally:
|
||||
sync_db.close()
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Deleted supplier",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Supplier deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting supplier", supplier_id=str(supplier_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to delete supplier")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "hard"),
|
||||
response_model=SupplierDeletionSummary
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def hard_delete_supplier(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Hard delete supplier and all associated data (Admin/Owner only, permanent)"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Check supplier exists
|
||||
existing_supplier = await service.get_supplier(supplier_id)
|
||||
if not existing_supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
# Capture supplier data before deletion
|
||||
supplier_data = {
|
||||
"id": str(existing_supplier.id),
|
||||
"name": existing_supplier.name,
|
||||
"status": existing_supplier.status.value,
|
||||
"supplier_code": existing_supplier.supplier_code
|
||||
}
|
||||
|
||||
# Perform hard deletion
|
||||
deletion_summary = await service.hard_delete_supplier(supplier_id, UUID(tenant_id))
|
||||
|
||||
# Log audit event for hard deletion
|
||||
try:
|
||||
# Get sync db session for audit logging
|
||||
from app.core.database import SessionLocal
|
||||
sync_db = SessionLocal()
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=sync_db,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="supplier",
|
||||
resource_id=str(supplier_id),
|
||||
resource_data=supplier_data,
|
||||
description=f"Hard deleted supplier '{supplier_data['name']}' and all associated data",
|
||||
endpoint=f"/suppliers/{supplier_id}/hard",
|
||||
method="DELETE",
|
||||
metadata=deletion_summary
|
||||
)
|
||||
sync_db.commit()
|
||||
finally:
|
||||
sync_db.close()
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Hard deleted supplier",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user["user_id"],
|
||||
deletion_summary=deletion_summary)
|
||||
|
||||
return deletion_summary
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error hard deleting supplier", supplier_id=str(supplier_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to hard delete supplier")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("count"),
|
||||
response_model=dict
|
||||
)
|
||||
async def count_suppliers(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get count of suppliers for a tenant"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Use search with maximum allowed limit to get all suppliers
|
||||
search_params = SupplierSearchParams(limit=1000)
|
||||
suppliers = await service.search_suppliers(
|
||||
tenant_id=UUID(tenant_id),
|
||||
search_params=search_params
|
||||
)
|
||||
|
||||
count = len(suppliers)
|
||||
logger.info("Retrieved supplier count", tenant_id=tenant_id, count=count)
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error counting suppliers", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to count suppliers")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "products"),
|
||||
response_model=List[Dict[str, Any]]
|
||||
)
|
||||
async def get_supplier_products(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
is_active: bool = Query(True, description="Filter by active price lists"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get list of product IDs that a supplier provides
|
||||
Returns a list of inventory product IDs from the supplier's price list
|
||||
"""
|
||||
try:
|
||||
# Query supplier price lists
|
||||
query = select(SupplierPriceList).where(
|
||||
SupplierPriceList.tenant_id == UUID(tenant_id),
|
||||
SupplierPriceList.supplier_id == supplier_id
|
||||
)
|
||||
|
||||
if is_active:
|
||||
query = query.where(SupplierPriceList.is_active == True)
|
||||
|
||||
result = await db.execute(query)
|
||||
price_lists = result.scalars().all()
|
||||
|
||||
# Extract unique product IDs
|
||||
product_ids = list(set([str(pl.inventory_product_id) for pl in price_lists]))
|
||||
|
||||
logger.info(
|
||||
"Retrieved supplier products",
|
||||
supplier_id=str(supplier_id),
|
||||
product_count=len(product_ids)
|
||||
)
|
||||
|
||||
return [{"inventory_product_id": pid} for pid in product_ids]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting supplier products",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to retrieve supplier products"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists"),
|
||||
response_model=List[SupplierPriceListResponse]
|
||||
)
|
||||
async def get_supplier_price_lists(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
is_active: bool = Query(True, description="Filter by active price lists"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all price list items for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
price_lists = await service.get_supplier_price_lists(
|
||||
supplier_id=supplier_id,
|
||||
tenant_id=UUID(tenant_id),
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Retrieved supplier price lists",
|
||||
supplier_id=str(supplier_id),
|
||||
count=len(price_lists)
|
||||
)
|
||||
|
||||
return [SupplierPriceListResponse.from_orm(pl) for pl in price_lists]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting supplier price lists",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to retrieve supplier price lists"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists/{price_list_id}"),
|
||||
response_model=SupplierPriceListResponse
|
||||
)
|
||||
async def get_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_id: UUID = Path(..., description="Price List ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
price_list = await service.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
|
||||
if not price_list:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
logger.info(
|
||||
"Retrieved supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return SupplierPriceListResponse.from_orm(price_list)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to retrieve supplier price list item"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists"),
|
||||
response_model=SupplierPriceListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_data: SupplierPriceListCreate = None,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Verify supplier exists
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
price_list = await service.create_supplier_price_list(
|
||||
supplier_id=supplier_id,
|
||||
price_list_data=price_list_data,
|
||||
tenant_id=UUID(tenant_id),
|
||||
created_by=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Created supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list.id)
|
||||
)
|
||||
|
||||
return SupplierPriceListResponse.from_orm(price_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error creating supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to create supplier price list item"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists/{price_list_id}"),
|
||||
response_model=SupplierPriceListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_id: UUID = Path(..., description="Price List ID"),
|
||||
price_list_data: SupplierPriceListUpdate = None,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update a price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Verify supplier and price list exist
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
price_list = await service.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
if not price_list:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
updated_price_list = await service.update_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
price_list_data=price_list_data,
|
||||
tenant_id=UUID(tenant_id),
|
||||
updated_by=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Updated supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return SupplierPriceListResponse.from_orm(updated_price_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error updating supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to update supplier price list item"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists/{price_list_id}")
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_id: UUID = Path(..., description="Price List ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete a price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Verify supplier and price list exist
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
price_list = await service.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
if not price_list:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
success = await service.delete_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
logger.info(
|
||||
"Deleted supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return {"message": "Price list item deleted successfully"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error deleting supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to delete supplier price list item"
|
||||
)
|
||||
790
services/suppliers/app/consumers/alert_event_consumer.py
Normal file
790
services/suppliers/app/consumers/alert_event_consumer.py
Normal file
@@ -0,0 +1,790 @@
|
||||
"""
|
||||
Alert Event Consumer
|
||||
Processes supplier alert events from RabbitMQ and sends notifications
|
||||
Handles email and Slack notifications for critical alerts
|
||||
"""
|
||||
import json
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from shared.messaging import RabbitMQClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class AlertEventConsumer:
|
||||
"""
|
||||
Consumes supplier alert events and sends notifications
|
||||
Handles email and Slack notifications for critical alerts
|
||||
"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
self.notification_config = self._load_notification_config()
|
||||
|
||||
def _load_notification_config(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Load notification configuration from environment
|
||||
|
||||
Returns:
|
||||
Configuration dict with email/Slack settings
|
||||
"""
|
||||
import os
|
||||
|
||||
return {
|
||||
'enabled': os.getenv('ALERT_NOTIFICATION_ENABLED', 'true').lower() == 'true',
|
||||
'email': {
|
||||
'enabled': os.getenv('ALERT_EMAIL_ENABLED', 'true').lower() == 'true',
|
||||
'recipients': os.getenv('ALERT_EMAIL_RECIPIENTS', 'procurement@company.com').split(','),
|
||||
'from_address': os.getenv('ALERT_EMAIL_FROM', 'noreply@bakery-ia.com'),
|
||||
'smtp_host': os.getenv('SMTP_HOST', 'localhost'),
|
||||
'smtp_port': int(os.getenv('SMTP_PORT', '587')),
|
||||
'smtp_username': os.getenv('SMTP_USERNAME', ''),
|
||||
'smtp_password': os.getenv('SMTP_PASSWORD', ''),
|
||||
'use_tls': os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
|
||||
},
|
||||
'slack': {
|
||||
'enabled': os.getenv('ALERT_SLACK_ENABLED', 'false').lower() == 'true',
|
||||
'webhook_url': os.getenv('ALERT_SLACK_WEBHOOK_URL', ''),
|
||||
'channel': os.getenv('ALERT_SLACK_CHANNEL', '#procurement'),
|
||||
'username': os.getenv('ALERT_SLACK_USERNAME', 'Supplier Alert Bot')
|
||||
},
|
||||
'rate_limiting': {
|
||||
'enabled': os.getenv('ALERT_RATE_LIMITING_ENABLED', 'true').lower() == 'true',
|
||||
'max_per_hour': int(os.getenv('ALERT_MAX_PER_HOUR', '10')),
|
||||
'max_per_day': int(os.getenv('ALERT_MAX_PER_DAY', '50'))
|
||||
}
|
||||
}
|
||||
|
||||
async def consume_alert_events(
|
||||
self,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
"""
|
||||
Start consuming alert events from RabbitMQ
|
||||
"""
|
||||
async def process_message(message):
|
||||
"""Process a single alert event message"""
|
||||
try:
|
||||
async with message.process():
|
||||
# Parse event data
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info(
|
||||
"Received alert event",
|
||||
event_id=event_data.get('event_id'),
|
||||
event_type=event_data.get('event_type'),
|
||||
tenant_id=event_data.get('tenant_id')
|
||||
)
|
||||
|
||||
# Process the event
|
||||
await self.process_alert_event(event_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing alert event",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Start consuming events
|
||||
await rabbitmq_client.consume_events(
|
||||
exchange_name="suppliers.events",
|
||||
queue_name="suppliers.alerts.notifications",
|
||||
routing_key="suppliers.alert.*",
|
||||
callback=process_message
|
||||
)
|
||||
|
||||
logger.info("Started consuming alert events")
|
||||
|
||||
async def process_alert_event(self, event_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Process an alert event based on type
|
||||
|
||||
Args:
|
||||
event_data: Full event payload from RabbitMQ
|
||||
|
||||
Returns:
|
||||
bool: True if processed successfully
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['enabled']:
|
||||
logger.info("Alert notifications disabled, skipping")
|
||||
return True
|
||||
|
||||
event_type = event_data.get('event_type')
|
||||
data = event_data.get('data', {})
|
||||
tenant_id = event_data.get('tenant_id')
|
||||
|
||||
if not tenant_id:
|
||||
logger.warning("Alert event missing tenant_id", event_data=event_data)
|
||||
return False
|
||||
|
||||
# Route to appropriate handler
|
||||
if event_type == 'suppliers.alert.cost_variance':
|
||||
success = await self._handle_cost_variance_alert(tenant_id, data)
|
||||
elif event_type == 'suppliers.alert.quality':
|
||||
success = await self._handle_quality_alert(tenant_id, data)
|
||||
elif event_type == 'suppliers.alert.delivery':
|
||||
success = await self._handle_delivery_alert(tenant_id, data)
|
||||
else:
|
||||
logger.warning("Unknown alert event type", event_type=event_type)
|
||||
success = True # Mark as processed to avoid retry
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Alert event processed successfully",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Alert event processing failed",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in process_alert_event",
|
||||
error=str(e),
|
||||
event_id=event_data.get('event_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_cost_variance_alert(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle cost variance alert notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Alert data
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
alert_id = data.get('alert_id')
|
||||
severity = data.get('severity', 'warning')
|
||||
supplier_name = data.get('supplier_name', 'Unknown Supplier')
|
||||
ingredient_name = data.get('ingredient_name', 'Unknown Ingredient')
|
||||
variance_percentage = data.get('variance_percentage', 0)
|
||||
old_price = data.get('old_price', 0)
|
||||
new_price = data.get('new_price', 0)
|
||||
recommendations = data.get('recommendations', [])
|
||||
|
||||
# Check rate limiting
|
||||
if not await self._check_rate_limit(tenant_id, 'cost_variance'):
|
||||
logger.warning(
|
||||
"Rate limit exceeded for cost variance alerts",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return True # Don't fail, just skip
|
||||
|
||||
# Format notification message
|
||||
notification_data = {
|
||||
'alert_id': alert_id,
|
||||
'severity': severity,
|
||||
'supplier_name': supplier_name,
|
||||
'ingredient_name': ingredient_name,
|
||||
'variance_percentage': variance_percentage,
|
||||
'old_price': old_price,
|
||||
'new_price': new_price,
|
||||
'price_change': new_price - old_price,
|
||||
'recommendations': recommendations,
|
||||
'alert_url': self._generate_alert_url(tenant_id, alert_id)
|
||||
}
|
||||
|
||||
# Send notifications based on severity
|
||||
notifications_sent = 0
|
||||
|
||||
if severity in ['critical', 'warning']:
|
||||
# Send email for critical and warning alerts
|
||||
if await self._send_email_notification(
|
||||
tenant_id,
|
||||
'cost_variance',
|
||||
notification_data
|
||||
):
|
||||
notifications_sent += 1
|
||||
|
||||
if severity == 'critical':
|
||||
# Send Slack for critical alerts only
|
||||
if await self._send_slack_notification(
|
||||
tenant_id,
|
||||
'cost_variance',
|
||||
notification_data
|
||||
):
|
||||
notifications_sent += 1
|
||||
|
||||
# Record notification sent
|
||||
await self._record_notification(
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
notification_type='cost_variance',
|
||||
channels_sent=notifications_sent
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Cost variance alert notification sent",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
severity=severity,
|
||||
notifications_sent=notifications_sent
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling cost variance alert",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
alert_id=data.get('alert_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_quality_alert(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle quality alert notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Alert data
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
alert_id = data.get('alert_id')
|
||||
severity = data.get('severity', 'warning')
|
||||
supplier_name = data.get('supplier_name', 'Unknown Supplier')
|
||||
|
||||
logger.info(
|
||||
"Processing quality alert",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
severity=severity,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
# Check rate limiting
|
||||
if not await self._check_rate_limit(tenant_id, 'quality'):
|
||||
return True
|
||||
|
||||
# For now, just log quality alerts
|
||||
# In production, would implement email/Slack similar to cost variance
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling quality alert",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_delivery_alert(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle delivery alert notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Alert data
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
alert_id = data.get('alert_id')
|
||||
severity = data.get('severity', 'warning')
|
||||
supplier_name = data.get('supplier_name', 'Unknown Supplier')
|
||||
|
||||
logger.info(
|
||||
"Processing delivery alert",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
severity=severity,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
# Check rate limiting
|
||||
if not await self._check_rate_limit(tenant_id, 'delivery'):
|
||||
return True
|
||||
|
||||
# For now, just log delivery alerts
|
||||
# In production, would implement email/Slack similar to cost variance
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling delivery alert",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _check_rate_limit(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_type: str
|
||||
) -> bool:
|
||||
"""
|
||||
Check if notification rate limit has been exceeded using Redis
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
alert_type: Type of alert
|
||||
|
||||
Returns:
|
||||
bool: True if within rate limit, False if exceeded
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['rate_limiting']['enabled']:
|
||||
return True
|
||||
|
||||
# Redis-based rate limiting implementation
|
||||
try:
|
||||
import redis.asyncio as redis
|
||||
from datetime import datetime, timedelta
|
||||
from app.core.config import Settings
|
||||
|
||||
# Connect to Redis using proper configuration with TLS and auth
|
||||
settings = Settings()
|
||||
redis_url = settings.REDIS_URL
|
||||
redis_client = await redis.from_url(redis_url, decode_responses=True)
|
||||
|
||||
# Rate limit keys
|
||||
hour_key = f"alert_rate_limit:{tenant_id}:{alert_type}:hour:{datetime.utcnow().strftime('%Y%m%d%H')}"
|
||||
day_key = f"alert_rate_limit:{tenant_id}:{alert_type}:day:{datetime.utcnow().strftime('%Y%m%d')}"
|
||||
|
||||
# Get current counts
|
||||
hour_count = await redis_client.get(hour_key)
|
||||
day_count = await redis_client.get(day_key)
|
||||
|
||||
hour_count = int(hour_count) if hour_count else 0
|
||||
day_count = int(day_count) if day_count else 0
|
||||
|
||||
# Check limits
|
||||
max_per_hour = self.notification_config['rate_limiting']['max_per_hour']
|
||||
max_per_day = self.notification_config['rate_limiting']['max_per_day']
|
||||
|
||||
if hour_count >= max_per_hour:
|
||||
logger.warning(
|
||||
"Hourly rate limit exceeded",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
count=hour_count,
|
||||
limit=max_per_hour
|
||||
)
|
||||
await redis_client.close()
|
||||
return False
|
||||
|
||||
if day_count >= max_per_day:
|
||||
logger.warning(
|
||||
"Daily rate limit exceeded",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
count=day_count,
|
||||
limit=max_per_day
|
||||
)
|
||||
await redis_client.close()
|
||||
return False
|
||||
|
||||
# Increment counters
|
||||
pipe = redis_client.pipeline()
|
||||
pipe.incr(hour_key)
|
||||
pipe.expire(hour_key, 3600) # 1 hour TTL
|
||||
pipe.incr(day_key)
|
||||
pipe.expire(day_key, 86400) # 24 hour TTL
|
||||
await pipe.execute()
|
||||
|
||||
await redis_client.close()
|
||||
|
||||
logger.debug(
|
||||
"Rate limit check passed",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
hour_count=hour_count + 1,
|
||||
day_count=day_count + 1
|
||||
)
|
||||
return True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Redis not available, skipping rate limiting")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking rate limit",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
# On error, allow notification
|
||||
return True
|
||||
|
||||
async def _send_email_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send email notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
notification_type: Type of notification
|
||||
data: Notification data
|
||||
|
||||
Returns:
|
||||
bool: True if sent successfully
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['email']['enabled']:
|
||||
logger.debug("Email notifications disabled")
|
||||
return False
|
||||
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
|
||||
# Build email content
|
||||
subject = self._format_email_subject(notification_type, data)
|
||||
body = self._format_email_body(notification_type, data)
|
||||
|
||||
# Create message
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = self.notification_config['email']['from_address']
|
||||
msg['To'] = ', '.join(self.notification_config['email']['recipients'])
|
||||
|
||||
# Attach HTML body
|
||||
html_part = MIMEText(body, 'html')
|
||||
msg.attach(html_part)
|
||||
|
||||
# Send email
|
||||
smtp_config = self.notification_config['email']
|
||||
with smtplib.SMTP(smtp_config['smtp_host'], smtp_config['smtp_port']) as server:
|
||||
if smtp_config['use_tls']:
|
||||
server.starttls()
|
||||
|
||||
if smtp_config['smtp_username'] and smtp_config['smtp_password']:
|
||||
server.login(smtp_config['smtp_username'], smtp_config['smtp_password'])
|
||||
|
||||
server.send_message(msg)
|
||||
|
||||
logger.info(
|
||||
"Email notification sent",
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type,
|
||||
recipients=len(self.notification_config['email']['recipients'])
|
||||
)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error sending email notification",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _send_slack_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send Slack notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
notification_type: Type of notification
|
||||
data: Notification data
|
||||
|
||||
Returns:
|
||||
bool: True if sent successfully
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['slack']['enabled']:
|
||||
logger.debug("Slack notifications disabled")
|
||||
return False
|
||||
|
||||
webhook_url = self.notification_config['slack']['webhook_url']
|
||||
if not webhook_url:
|
||||
logger.warning("Slack webhook URL not configured")
|
||||
return False
|
||||
|
||||
import aiohttp
|
||||
|
||||
# Format Slack message
|
||||
message = self._format_slack_message(notification_type, data)
|
||||
|
||||
# Send to Slack
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(webhook_url, json=message) as response:
|
||||
if response.status == 200:
|
||||
logger.info(
|
||||
"Slack notification sent",
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type
|
||||
)
|
||||
return True
|
||||
else:
|
||||
logger.error(
|
||||
"Slack notification failed",
|
||||
status=response.status,
|
||||
response=await response.text()
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error sending Slack notification",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
def _format_email_subject(
|
||||
self,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> str:
|
||||
"""Format email subject line"""
|
||||
if notification_type == 'cost_variance':
|
||||
severity = data.get('severity', 'warning').upper()
|
||||
ingredient = data.get('ingredient_name', 'Unknown')
|
||||
variance = data.get('variance_percentage', 0)
|
||||
|
||||
return f"[{severity}] Price Alert: {ingredient} (+{variance:.1f}%)"
|
||||
|
||||
return f"Supplier Alert: {notification_type}"
|
||||
|
||||
def _format_email_body(
|
||||
self,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> str:
|
||||
"""Format email body (HTML)"""
|
||||
if notification_type == 'cost_variance':
|
||||
severity = data.get('severity', 'warning')
|
||||
severity_color = '#dc3545' if severity == 'critical' else '#ffc107'
|
||||
|
||||
html = f"""
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {{ font-family: Arial, sans-serif; }}
|
||||
.alert-box {{
|
||||
border-left: 4px solid {severity_color};
|
||||
padding: 15px;
|
||||
background-color: #f8f9fa;
|
||||
margin: 20px 0;
|
||||
}}
|
||||
.metric {{
|
||||
display: inline-block;
|
||||
margin: 10px 20px 10px 0;
|
||||
}}
|
||||
.metric-label {{
|
||||
color: #6c757d;
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
}}
|
||||
.metric-value {{
|
||||
font-size: 24px;
|
||||
font-weight: bold;
|
||||
color: #212529;
|
||||
}}
|
||||
.recommendations {{
|
||||
background-color: #e7f3ff;
|
||||
border: 1px solid #bee5eb;
|
||||
padding: 15px;
|
||||
margin: 20px 0;
|
||||
}}
|
||||
.btn {{
|
||||
display: inline-block;
|
||||
padding: 10px 20px;
|
||||
background-color: #007bff;
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
border-radius: 4px;
|
||||
margin-top: 15px;
|
||||
}}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Cost Variance Alert</h2>
|
||||
|
||||
<div class="alert-box">
|
||||
<strong>{data.get('supplier_name')}</strong> - {data.get('ingredient_name')}
|
||||
<br><br>
|
||||
|
||||
<div class="metric">
|
||||
<div class="metric-label">Previous Price</div>
|
||||
<div class="metric-value">${data.get('old_price', 0):.2f}</div>
|
||||
</div>
|
||||
|
||||
<div class="metric">
|
||||
<div class="metric-label">New Price</div>
|
||||
<div class="metric-value">${data.get('new_price', 0):.2f}</div>
|
||||
</div>
|
||||
|
||||
<div class="metric">
|
||||
<div class="metric-label">Change</div>
|
||||
<div class="metric-value" style="color: {severity_color};">
|
||||
+{data.get('variance_percentage', 0):.1f}%
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="recommendations">
|
||||
<strong>Recommended Actions:</strong>
|
||||
<ul>
|
||||
{''.join(f'<li>{rec}</li>' for rec in data.get('recommendations', []))}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<a href="{data.get('alert_url', '#')}" class="btn">View Alert Details</a>
|
||||
|
||||
<hr style="margin-top: 30px; border: none; border-top: 1px solid #dee2e6;">
|
||||
<p style="color: #6c757d; font-size: 12px;">
|
||||
This is an automated notification from the Bakery IA Supplier Management System.
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return html
|
||||
|
||||
return "<html><body><p>Alert notification</p></body></html>"
|
||||
|
||||
def _format_slack_message(
|
||||
self,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Format Slack message payload"""
|
||||
if notification_type == 'cost_variance':
|
||||
severity = data.get('severity', 'warning')
|
||||
emoji = ':rotating_light:' if severity == 'critical' else ':warning:'
|
||||
color = 'danger' if severity == 'critical' else 'warning'
|
||||
|
||||
message = {
|
||||
"username": self.notification_config['slack']['username'],
|
||||
"channel": self.notification_config['slack']['channel'],
|
||||
"icon_emoji": emoji,
|
||||
"attachments": [
|
||||
{
|
||||
"color": color,
|
||||
"title": f"Cost Variance Alert - {data.get('supplier_name')}",
|
||||
"fields": [
|
||||
{
|
||||
"title": "Ingredient",
|
||||
"value": data.get('ingredient_name'),
|
||||
"short": True
|
||||
},
|
||||
{
|
||||
"title": "Price Change",
|
||||
"value": f"+{data.get('variance_percentage', 0):.1f}%",
|
||||
"short": True
|
||||
},
|
||||
{
|
||||
"title": "Previous Price",
|
||||
"value": f"${data.get('old_price', 0):.2f}",
|
||||
"short": True
|
||||
},
|
||||
{
|
||||
"title": "New Price",
|
||||
"value": f"${data.get('new_price', 0):.2f}",
|
||||
"short": True
|
||||
}
|
||||
],
|
||||
"text": "*Recommendations:*\n" + "\n".join(
|
||||
f"• {rec}" for rec in data.get('recommendations', [])
|
||||
),
|
||||
"footer": "Bakery IA Supplier Management",
|
||||
"ts": int(datetime.utcnow().timestamp())
|
||||
}
|
||||
]
|
||||
}
|
||||
return message
|
||||
|
||||
return {
|
||||
"username": self.notification_config['slack']['username'],
|
||||
"text": f"Alert: {notification_type}"
|
||||
}
|
||||
|
||||
def _generate_alert_url(self, tenant_id: str, alert_id: str) -> str:
|
||||
"""Generate URL to view alert in dashboard"""
|
||||
import os
|
||||
base_url = os.getenv('FRONTEND_BASE_URL', 'http://localhost:3000')
|
||||
return f"{base_url}/app/suppliers/alerts/{alert_id}"
|
||||
|
||||
async def _record_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_id: str,
|
||||
notification_type: str,
|
||||
channels_sent: int
|
||||
):
|
||||
"""
|
||||
Record that notification was sent
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
alert_id: Alert ID
|
||||
notification_type: Type of notification
|
||||
channels_sent: Number of channels sent to
|
||||
"""
|
||||
try:
|
||||
# In production, would store in database:
|
||||
# - notification_log table
|
||||
# - Used for rate limiting and audit trail
|
||||
|
||||
logger.info(
|
||||
"Notification recorded",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
notification_type=notification_type,
|
||||
channels_sent=channels_sent
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error recording notification",
|
||||
error=str(e),
|
||||
alert_id=alert_id
|
||||
)
|
||||
|
||||
|
||||
# Factory function for creating consumer instance
|
||||
def create_alert_event_consumer(db_session: AsyncSession) -> AlertEventConsumer:
|
||||
"""Create alert event consumer instance"""
|
||||
return AlertEventConsumer(db_session)
|
||||
1
services/suppliers/app/core/__init__.py
Normal file
1
services/suppliers/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/suppliers/app/core/__init__.py
|
||||
147
services/suppliers/app/core/config.py
Normal file
147
services/suppliers/app/core/config.py
Normal file
@@ -0,0 +1,147 @@
|
||||
# services/suppliers/app/core/config.py
|
||||
"""
|
||||
Supplier & Procurement Service Configuration
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
from pydantic import Field
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""Supplier service settings extending base configuration"""
|
||||
|
||||
# Override service-specific settings
|
||||
SERVICE_NAME: str = "suppliers-service"
|
||||
VERSION: str = "1.0.0"
|
||||
APP_NAME: str = "Bakery Supplier Service"
|
||||
DESCRIPTION: str = "Supplier and procurement management service"
|
||||
|
||||
# API Configuration
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Database configuration (secure approach - build from components)
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("SUPPLIERS_DATABASE_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("SUPPLIERS_DB_USER", "suppliers_user")
|
||||
password = os.getenv("SUPPLIERS_DB_PASSWORD", "suppliers_pass123")
|
||||
host = os.getenv("SUPPLIERS_DB_HOST", "localhost")
|
||||
port = os.getenv("SUPPLIERS_DB_PORT", "5432")
|
||||
name = os.getenv("SUPPLIERS_DB_NAME", "suppliers_db")
|
||||
|
||||
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
||||
|
||||
# Suppliers-specific Redis database
|
||||
REDIS_DB: int = Field(default=4, env="SUPPLIERS_REDIS_DB")
|
||||
|
||||
# File upload configuration
|
||||
MAX_UPLOAD_SIZE: int = 10 * 1024 * 1024 # 10MB
|
||||
UPLOAD_PATH: str = Field(default="/tmp/uploads", env="SUPPLIERS_UPLOAD_PATH")
|
||||
ALLOWED_FILE_EXTENSIONS: List[str] = [".csv", ".xlsx", ".xls", ".pdf", ".png", ".jpg", ".jpeg"]
|
||||
|
||||
# Pagination
|
||||
DEFAULT_PAGE_SIZE: int = 50
|
||||
MAX_PAGE_SIZE: int = 500
|
||||
|
||||
# Price validation
|
||||
MIN_UNIT_PRICE: float = 0.01
|
||||
MAX_UNIT_PRICE: float = 10000.0
|
||||
MIN_ORDER_AMOUNT: float = 1.0
|
||||
MAX_ORDER_AMOUNT: float = 100000.0
|
||||
|
||||
# Supplier-specific cache TTL
|
||||
SUPPLIERS_CACHE_TTL: int = 900 # 15 minutes
|
||||
PURCHASE_ORDERS_CACHE_TTL: int = 300 # 5 minutes
|
||||
DELIVERIES_CACHE_TTL: int = 180 # 3 minutes
|
||||
PRICE_LIST_CACHE_TTL: int = 1800 # 30 minutes
|
||||
|
||||
# Purchase order settings
|
||||
DEFAULT_PAYMENT_TERMS_DAYS: int = 30
|
||||
MAX_PAYMENT_TERMS_DAYS: int = 90
|
||||
DEFAULT_DELIVERY_DAYS: int = 3
|
||||
MAX_DELIVERY_DAYS: int = 30
|
||||
|
||||
# Quality and rating settings
|
||||
MIN_QUALITY_RATING: float = 1.0
|
||||
MAX_QUALITY_RATING: float = 5.0
|
||||
MIN_DELIVERY_RATING: float = 1.0
|
||||
MAX_DELIVERY_RATING: float = 5.0
|
||||
|
||||
# Lead time settings (in days)
|
||||
DEFAULT_LEAD_TIME: int = 3
|
||||
MAX_LEAD_TIME: int = 30
|
||||
|
||||
# Order approval thresholds
|
||||
AUTO_APPROVE_THRESHOLD: float = 500.0 # Amounts below this auto-approve
|
||||
MANAGER_APPROVAL_THRESHOLD: float = 2000.0 # Manager approval required
|
||||
|
||||
# Communication settings
|
||||
ORDER_CONFIRMATION_EMAIL: bool = True
|
||||
DELIVERY_NOTIFICATION_EMAIL: bool = True
|
||||
QUALITY_ISSUE_EMAIL: bool = True
|
||||
|
||||
# Business hours for supplier contact (24h format)
|
||||
BUSINESS_HOURS_START: int = 8
|
||||
BUSINESS_HOURS_END: int = 18
|
||||
|
||||
# Performance Tracking Settings
|
||||
PERFORMANCE_TRACKING_ENABLED: bool = Field(default=True, env="PERFORMANCE_TRACKING_ENABLED")
|
||||
PERFORMANCE_CALCULATION_INTERVAL_MINUTES: int = Field(default=60, env="PERFORMANCE_CALCULATION_INTERVAL")
|
||||
PERFORMANCE_CACHE_TTL: int = Field(default=300, env="PERFORMANCE_CACHE_TTL") # 5 minutes
|
||||
|
||||
# Performance Thresholds
|
||||
EXCELLENT_DELIVERY_RATE: float = 95.0
|
||||
GOOD_DELIVERY_RATE: float = 90.0
|
||||
ACCEPTABLE_DELIVERY_RATE: float = 85.0
|
||||
POOR_DELIVERY_RATE: float = 80.0
|
||||
|
||||
EXCELLENT_QUALITY_RATE: float = 98.0
|
||||
GOOD_QUALITY_RATE: float = 95.0
|
||||
ACCEPTABLE_QUALITY_RATE: float = 90.0
|
||||
POOR_QUALITY_RATE: float = 85.0
|
||||
|
||||
# Alert Settings
|
||||
ALERTS_ENABLED: bool = Field(default=True, env="SUPPLIERS_ALERTS_ENABLED")
|
||||
ALERT_EVALUATION_INTERVAL_MINUTES: int = Field(default=15, env="ALERT_EVALUATION_INTERVAL")
|
||||
ALERT_RETENTION_DAYS: int = Field(default=365, env="ALERT_RETENTION_DAYS")
|
||||
|
||||
# Critical alert thresholds
|
||||
CRITICAL_DELIVERY_DELAY_HOURS: int = 24
|
||||
CRITICAL_QUALITY_REJECTION_RATE: float = 10.0
|
||||
HIGH_COST_VARIANCE_PERCENTAGE: float = 15.0
|
||||
|
||||
# Dashboard Settings
|
||||
DASHBOARD_CACHE_TTL: int = Field(default=180, env="SUPPLIERS_DASHBOARD_CACHE_TTL") # 3 minutes
|
||||
DASHBOARD_REFRESH_INTERVAL: int = Field(default=300, env="DASHBOARD_REFRESH_INTERVAL") # 5 minutes
|
||||
|
||||
# Performance Analytics
|
||||
DEFAULT_ANALYTICS_PERIOD_DAYS: int = 30
|
||||
MAX_ANALYTICS_PERIOD_DAYS: int = 365
|
||||
SCORECARD_GENERATION_DAY: int = 1 # Day of month to generate scorecards
|
||||
|
||||
# Notification Settings
|
||||
NOTIFICATION_EMAIL_ENABLED: bool = Field(default=True, env="NOTIFICATION_EMAIL_ENABLED")
|
||||
NOTIFICATION_WEBHOOK_ENABLED: bool = Field(default=False, env="NOTIFICATION_WEBHOOK_ENABLED")
|
||||
NOTIFICATION_WEBHOOK_URL: str = Field(default="", env="NOTIFICATION_WEBHOOK_URL")
|
||||
|
||||
# Business Model Detection
|
||||
ENABLE_BUSINESS_MODEL_DETECTION: bool = Field(default=True, env="ENABLE_BUSINESS_MODEL_DETECTION")
|
||||
CENTRAL_BAKERY_THRESHOLD_SUPPLIERS: int = Field(default=20, env="CENTRAL_BAKERY_THRESHOLD_SUPPLIERS")
|
||||
INDIVIDUAL_BAKERY_THRESHOLD_SUPPLIERS: int = Field(default=10, env="INDIVIDUAL_BAKERY_THRESHOLD_SUPPLIERS")
|
||||
|
||||
# Performance Report Settings
|
||||
AUTO_GENERATE_MONTHLY_REPORTS: bool = Field(default=True, env="AUTO_GENERATE_MONTHLY_REPORTS")
|
||||
AUTO_GENERATE_QUARTERLY_REPORTS: bool = Field(default=True, env="AUTO_GENERATE_QUARTERLY_REPORTS")
|
||||
REPORT_EXPORT_FORMATS: List[str] = ["pdf", "excel", "csv"]
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
86
services/suppliers/app/core/database.py
Normal file
86
services/suppliers/app/core/database.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# services/suppliers/app/core/database.py
|
||||
"""
|
||||
Supplier Service Database Configuration using shared database manager
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import DatabaseManager, Base
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create database manager instance
|
||||
database_manager = DatabaseManager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
service_name="suppliers-service",
|
||||
pool_size=settings.DB_POOL_SIZE,
|
||||
max_overflow=settings.DB_MAX_OVERFLOW,
|
||||
pool_recycle=settings.DB_POOL_RECYCLE,
|
||||
echo=settings.DB_ECHO
|
||||
)
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""
|
||||
Database dependency for FastAPI - using shared database manager
|
||||
"""
|
||||
async for session in database_manager.get_db():
|
||||
yield session
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database tables using shared database manager"""
|
||||
try:
|
||||
logger.info("Initializing Supplier Service database...")
|
||||
|
||||
# Import all models to ensure they're registered
|
||||
from app.models import suppliers # noqa: F401
|
||||
|
||||
# Create all tables using database manager
|
||||
await database_manager.create_tables(Base.metadata)
|
||||
|
||||
logger.info("Supplier Service database initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize database", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close database connections using shared database manager"""
|
||||
try:
|
||||
await database_manager.close_connections()
|
||||
logger.info("Database connections closed")
|
||||
except Exception as e:
|
||||
logger.error("Error closing database connections", error=str(e))
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db_transaction():
|
||||
"""
|
||||
Context manager for database transactions using shared database manager
|
||||
"""
|
||||
async with database_manager.get_session() as session:
|
||||
try:
|
||||
async with session.begin():
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error("Transaction error", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_background_session():
|
||||
"""
|
||||
Context manager for background tasks using shared database manager
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def health_check():
|
||||
"""Database health check using shared database manager"""
|
||||
return await database_manager.health_check()
|
||||
125
services/suppliers/app/main.py
Normal file
125
services/suppliers/app/main.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# services/suppliers/app/main.py
|
||||
"""
|
||||
Supplier & Procurement Service FastAPI Application
|
||||
"""
|
||||
|
||||
import os
|
||||
from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
# Import API routers
|
||||
from app.api import suppliers, supplier_operations, analytics, audit, internal_demo, internal
|
||||
# REMOVED: purchase_orders, deliveries - PO and delivery management moved to Procurement Service
|
||||
# from app.api import purchase_orders, deliveries
|
||||
|
||||
|
||||
class SuppliersService(StandardFastAPIService):
|
||||
"""Suppliers Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
await self.verify_migrations()
|
||||
await super().on_startup(app)
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
def __init__(self):
|
||||
# Define expected database tables for health checks
|
||||
# NOTE: PO, delivery, and invoice tables moved to Procurement Service
|
||||
suppliers_expected_tables = [
|
||||
'suppliers', 'supplier_price_lists',
|
||||
'supplier_quality_reviews',
|
||||
'supplier_performance_metrics', 'supplier_alerts', 'supplier_scorecards',
|
||||
'supplier_benchmarks', 'alert_rules'
|
||||
]
|
||||
|
||||
super().__init__(
|
||||
service_name="suppliers-service",
|
||||
app_name=settings.APP_NAME,
|
||||
description=settings.DESCRIPTION,
|
||||
version=settings.VERSION,
|
||||
cors_origins=settings.CORS_ORIGINS,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=suppliers_expected_tables
|
||||
)
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for suppliers service"""
|
||||
# Custom startup completed
|
||||
pass
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for suppliers service"""
|
||||
# Database cleanup is handled by the base class
|
||||
pass
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return suppliers-specific features"""
|
||||
return [
|
||||
"supplier_management",
|
||||
"vendor_onboarding",
|
||||
# REMOVED: "purchase_orders", "delivery_tracking", "invoice_tracking" - moved to Procurement Service
|
||||
"quality_reviews",
|
||||
"price_list_management",
|
||||
"supplier_ratings",
|
||||
"performance_tracking",
|
||||
"performance_analytics",
|
||||
"supplier_scorecards",
|
||||
"performance_alerts",
|
||||
"business_model_detection",
|
||||
"dashboard_analytics",
|
||||
"cost_optimization",
|
||||
"risk_assessment",
|
||||
"benchmarking"
|
||||
]
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = SuppliersService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app()
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Include API routers
|
||||
# IMPORTANT: Order matters! More specific routes must come first
|
||||
# to avoid path parameter matching issues
|
||||
# REMOVED: purchase_orders.router, deliveries.router - PO and delivery management moved to Procurement Service
|
||||
service.add_router(audit.router) # /suppliers/audit-logs - must be FIRST
|
||||
service.add_router(supplier_operations.router) # /suppliers/operations/...
|
||||
service.add_router(analytics.router) # /suppliers/analytics/...
|
||||
service.add_router(suppliers.router) # /suppliers/{supplier_id} - catch-all, must be last
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
service.add_router(internal.router)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(
|
||||
"app.main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=os.getenv("RELOAD", "false").lower() == "true",
|
||||
log_level="info"
|
||||
)
|
||||
64
services/suppliers/app/models/__init__.py
Normal file
64
services/suppliers/app/models/__init__.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# services/suppliers/app/models/__init__.py
|
||||
"""
|
||||
Models package for the Supplier service
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
from .suppliers import (
|
||||
Supplier, SupplierPriceList, SupplierQualityReview,
|
||||
SupplierType, SupplierStatus, PaymentTerms, QualityRating,
|
||||
# Deprecated stubs for backward compatibility
|
||||
PurchaseOrder, PurchaseOrderItem, Delivery, DeliveryItem, SupplierInvoice,
|
||||
PurchaseOrderStatus, DeliveryStatus, DeliveryRating, InvoiceStatus
|
||||
)
|
||||
|
||||
from .performance import (
|
||||
SupplierPerformanceMetric, SupplierAlert, SupplierScorecard,
|
||||
SupplierBenchmark, AlertRule, AlertSeverity, AlertType, AlertStatus,
|
||||
PerformanceMetricType, PerformancePeriod
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Supplier Models
|
||||
'Supplier',
|
||||
'SupplierPriceList',
|
||||
'SupplierQualityReview',
|
||||
|
||||
# Performance Models
|
||||
'SupplierPerformanceMetric',
|
||||
'SupplierAlert',
|
||||
'SupplierScorecard',
|
||||
'SupplierBenchmark',
|
||||
'AlertRule',
|
||||
|
||||
# Supplier Enums
|
||||
'SupplierType',
|
||||
'SupplierStatus',
|
||||
'PaymentTerms',
|
||||
'QualityRating',
|
||||
|
||||
# Performance Enums
|
||||
'AlertSeverity',
|
||||
'AlertType',
|
||||
'AlertStatus',
|
||||
'PerformanceMetricType',
|
||||
'PerformancePeriod',
|
||||
"AuditLog",
|
||||
|
||||
# Deprecated stubs (backward compatibility only - DO NOT USE)
|
||||
'PurchaseOrder',
|
||||
'PurchaseOrderItem',
|
||||
'Delivery',
|
||||
'DeliveryItem',
|
||||
'SupplierInvoice',
|
||||
'PurchaseOrderStatus',
|
||||
'DeliveryStatus',
|
||||
'DeliveryRating',
|
||||
'InvoiceStatus',
|
||||
]
|
||||
392
services/suppliers/app/models/performance.py
Normal file
392
services/suppliers/app/models/performance.py
Normal file
@@ -0,0 +1,392 @@
|
||||
# ================================================================
|
||||
# services/suppliers/app/models/performance.py
|
||||
# ================================================================
|
||||
"""
|
||||
Supplier Performance Tracking and Alert Models for Suppliers Service
|
||||
Comprehensive supplier performance metrics, KPIs, and alert management
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class AlertSeverity(enum.Enum):
|
||||
"""Alert severity levels"""
|
||||
CRITICAL = "critical"
|
||||
HIGH = "high"
|
||||
MEDIUM = "medium"
|
||||
LOW = "low"
|
||||
INFO = "info"
|
||||
|
||||
|
||||
class AlertType(enum.Enum):
|
||||
"""Types of supplier alerts"""
|
||||
POOR_QUALITY = "poor_quality"
|
||||
LATE_DELIVERY = "late_delivery"
|
||||
PRICE_INCREASE = "price_increase"
|
||||
LOW_PERFORMANCE = "low_performance"
|
||||
CONTRACT_EXPIRY = "contract_expiry"
|
||||
COMPLIANCE_ISSUE = "compliance_issue"
|
||||
FINANCIAL_RISK = "financial_risk"
|
||||
COMMUNICATION_ISSUE = "communication_issue"
|
||||
CAPACITY_CONSTRAINT = "capacity_constraint"
|
||||
CERTIFICATION_EXPIRY = "certification_expiry"
|
||||
|
||||
|
||||
class AlertStatus(enum.Enum):
|
||||
"""Alert processing status"""
|
||||
ACTIVE = "active"
|
||||
ACKNOWLEDGED = "acknowledged"
|
||||
IN_PROGRESS = "in_progress"
|
||||
RESOLVED = "resolved"
|
||||
DISMISSED = "dismissed"
|
||||
|
||||
|
||||
class PerformanceMetricType(enum.Enum):
|
||||
"""Types of performance metrics"""
|
||||
DELIVERY_PERFORMANCE = "delivery_performance"
|
||||
QUALITY_SCORE = "quality_score"
|
||||
PRICE_COMPETITIVENESS = "price_competitiveness"
|
||||
COMMUNICATION_RATING = "communication_rating"
|
||||
ORDER_ACCURACY = "order_accuracy"
|
||||
RESPONSE_TIME = "response_time"
|
||||
COMPLIANCE_SCORE = "compliance_score"
|
||||
FINANCIAL_STABILITY = "financial_stability"
|
||||
|
||||
|
||||
class PerformancePeriod(enum.Enum):
|
||||
"""Performance measurement periods"""
|
||||
DAILY = "daily"
|
||||
WEEKLY = "weekly"
|
||||
MONTHLY = "monthly"
|
||||
QUARTERLY = "quarterly"
|
||||
YEARLY = "yearly"
|
||||
|
||||
|
||||
class SupplierPerformanceMetric(Base):
|
||||
"""Supplier performance metrics tracking"""
|
||||
__tablename__ = "supplier_performance_metrics"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), ForeignKey('suppliers.id'), nullable=False, index=True)
|
||||
|
||||
# Metric details
|
||||
metric_type = Column(SQLEnum(PerformanceMetricType), nullable=False, index=True)
|
||||
period = Column(SQLEnum(PerformancePeriod), nullable=False, index=True)
|
||||
period_start = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
period_end = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
|
||||
# Performance values
|
||||
metric_value = Column(Float, nullable=False) # Main metric value (0-100 scale)
|
||||
target_value = Column(Float, nullable=True) # Target/benchmark value
|
||||
previous_value = Column(Float, nullable=True) # Previous period value for comparison
|
||||
|
||||
# Supporting data
|
||||
total_orders = Column(Integer, nullable=False, default=0)
|
||||
total_deliveries = Column(Integer, nullable=False, default=0)
|
||||
on_time_deliveries = Column(Integer, nullable=False, default=0)
|
||||
late_deliveries = Column(Integer, nullable=False, default=0)
|
||||
quality_issues = Column(Integer, nullable=False, default=0)
|
||||
total_amount = Column(Numeric(12, 2), nullable=False, default=0.0)
|
||||
|
||||
# Detailed metrics breakdown
|
||||
metrics_data = Column(JSONB, nullable=True) # Detailed breakdown of calculations
|
||||
|
||||
# Performance trends
|
||||
trend_direction = Column(String(20), nullable=True) # improving, declining, stable
|
||||
trend_percentage = Column(Float, nullable=True) # % change from previous period
|
||||
|
||||
# Contextual information
|
||||
notes = Column(Text, nullable=True)
|
||||
external_factors = Column(JSONB, nullable=True) # External factors affecting performance
|
||||
|
||||
# Audit fields
|
||||
calculated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
calculated_by = Column(UUID(as_uuid=True), nullable=True) # System or user ID
|
||||
|
||||
# Relationships
|
||||
supplier = relationship("Supplier")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_performance_metrics_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_performance_metrics_type_period', 'metric_type', 'period'),
|
||||
Index('ix_performance_metrics_period_dates', 'period_start', 'period_end'),
|
||||
Index('ix_performance_metrics_value', 'metric_value'),
|
||||
)
|
||||
|
||||
|
||||
class SupplierAlert(Base):
|
||||
"""Supplier-related alerts and notifications"""
|
||||
__tablename__ = "supplier_alerts"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), ForeignKey('suppliers.id'), nullable=False, index=True)
|
||||
|
||||
# Alert classification
|
||||
alert_type = Column(SQLEnum(AlertType), nullable=False, index=True)
|
||||
severity = Column(SQLEnum(AlertSeverity), nullable=False, index=True)
|
||||
status = Column(SQLEnum(AlertStatus), nullable=False, default=AlertStatus.ACTIVE, index=True)
|
||||
|
||||
# Alert content
|
||||
title = Column(String(255), nullable=False)
|
||||
message = Column(Text, nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
# Alert triggers and context
|
||||
trigger_value = Column(Float, nullable=True) # The value that triggered the alert
|
||||
threshold_value = Column(Float, nullable=True) # The threshold that was exceeded
|
||||
metric_type = Column(SQLEnum(PerformanceMetricType), nullable=True, index=True)
|
||||
|
||||
# Related entities
|
||||
purchase_order_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
delivery_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
performance_metric_id = Column(UUID(as_uuid=True), ForeignKey('supplier_performance_metrics.id'), nullable=True)
|
||||
|
||||
# Alert lifecycle
|
||||
triggered_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
acknowledged_at = Column(DateTime(timezone=True), nullable=True)
|
||||
acknowledged_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
resolved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
resolved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Actions and resolution
|
||||
recommended_actions = Column(JSONB, nullable=True) # Suggested actions
|
||||
actions_taken = Column(JSONB, nullable=True) # Actions that were taken
|
||||
resolution_notes = Column(Text, nullable=True)
|
||||
|
||||
# Auto-resolution
|
||||
auto_resolve = Column(Boolean, nullable=False, default=False)
|
||||
auto_resolve_condition = Column(JSONB, nullable=True) # Conditions for auto-resolution
|
||||
|
||||
# Escalation
|
||||
escalated = Column(Boolean, nullable=False, default=False)
|
||||
escalated_at = Column(DateTime(timezone=True), nullable=True)
|
||||
escalated_to = Column(UUID(as_uuid=True), nullable=True) # User/role escalated to
|
||||
|
||||
# Notification tracking
|
||||
notification_sent = Column(Boolean, nullable=False, default=False)
|
||||
notification_sent_at = Column(DateTime(timezone=True), nullable=True)
|
||||
notification_recipients = Column(JSONB, nullable=True) # List of recipients
|
||||
|
||||
# Additional metadata
|
||||
priority_score = Column(Integer, nullable=False, default=50) # 1-100 priority scoring
|
||||
business_impact = Column(String(50), nullable=True) # high, medium, low impact
|
||||
tags = Column(JSONB, nullable=True) # Categorization tags
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
supplier = relationship("Supplier")
|
||||
performance_metric = relationship("SupplierPerformanceMetric")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_supplier_alerts_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_supplier_alerts_type_severity', 'alert_type', 'severity'),
|
||||
Index('ix_supplier_alerts_status_triggered', 'status', 'triggered_at'),
|
||||
Index('ix_supplier_alerts_metric_type', 'metric_type'),
|
||||
Index('ix_supplier_alerts_priority', 'priority_score'),
|
||||
)
|
||||
|
||||
|
||||
class SupplierScorecard(Base):
|
||||
"""Comprehensive supplier scorecards for performance evaluation"""
|
||||
__tablename__ = "supplier_scorecards"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), ForeignKey('suppliers.id'), nullable=False, index=True)
|
||||
|
||||
# Scorecard details
|
||||
scorecard_name = Column(String(255), nullable=False)
|
||||
period = Column(SQLEnum(PerformancePeriod), nullable=False, index=True)
|
||||
period_start = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
period_end = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
|
||||
# Overall performance scores
|
||||
overall_score = Column(Float, nullable=False) # Weighted overall score (0-100)
|
||||
quality_score = Column(Float, nullable=False) # Quality performance (0-100)
|
||||
delivery_score = Column(Float, nullable=False) # Delivery performance (0-100)
|
||||
cost_score = Column(Float, nullable=False) # Cost competitiveness (0-100)
|
||||
service_score = Column(Float, nullable=False) # Service quality (0-100)
|
||||
|
||||
# Performance rankings
|
||||
overall_rank = Column(Integer, nullable=True) # Rank among all suppliers
|
||||
category_rank = Column(Integer, nullable=True) # Rank within supplier category
|
||||
total_suppliers_evaluated = Column(Integer, nullable=True)
|
||||
|
||||
# Detailed performance breakdown
|
||||
on_time_delivery_rate = Column(Float, nullable=False) # % of on-time deliveries
|
||||
quality_rejection_rate = Column(Float, nullable=False) # % of quality rejections
|
||||
order_accuracy_rate = Column(Float, nullable=False) # % of accurate orders
|
||||
response_time_hours = Column(Float, nullable=False) # Average response time
|
||||
cost_variance_percentage = Column(Float, nullable=False) # Cost variance from budget
|
||||
|
||||
# Business metrics
|
||||
total_orders_processed = Column(Integer, nullable=False, default=0)
|
||||
total_amount_processed = Column(Numeric(12, 2), nullable=False, default=0.0)
|
||||
average_order_value = Column(Numeric(10, 2), nullable=False, default=0.0)
|
||||
cost_savings_achieved = Column(Numeric(10, 2), nullable=False, default=0.0)
|
||||
|
||||
# Performance trends
|
||||
score_trend = Column(String(20), nullable=True) # improving, declining, stable
|
||||
score_change_percentage = Column(Float, nullable=True) # % change from previous period
|
||||
|
||||
# Recommendations and actions
|
||||
strengths = Column(JSONB, nullable=True) # List of strengths
|
||||
improvement_areas = Column(JSONB, nullable=True) # Areas for improvement
|
||||
recommended_actions = Column(JSONB, nullable=True) # Recommended actions
|
||||
|
||||
# Scorecard status
|
||||
is_final = Column(Boolean, nullable=False, default=False)
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
approved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Additional information
|
||||
notes = Column(Text, nullable=True)
|
||||
attachments = Column(JSONB, nullable=True) # Supporting documents
|
||||
|
||||
# Audit fields
|
||||
generated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
generated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
supplier = relationship("Supplier")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_scorecards_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_scorecards_period_dates', 'period_start', 'period_end'),
|
||||
Index('ix_scorecards_overall_score', 'overall_score'),
|
||||
Index('ix_scorecards_period', 'period'),
|
||||
Index('ix_scorecards_final', 'is_final'),
|
||||
)
|
||||
|
||||
|
||||
class SupplierBenchmark(Base):
|
||||
"""Supplier performance benchmarks and industry standards"""
|
||||
__tablename__ = "supplier_benchmarks"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Benchmark details
|
||||
benchmark_name = Column(String(255), nullable=False)
|
||||
benchmark_type = Column(String(50), nullable=False, index=True) # industry, internal, custom
|
||||
supplier_category = Column(String(100), nullable=True, index=True) # Target supplier category
|
||||
|
||||
# Metric thresholds
|
||||
metric_type = Column(SQLEnum(PerformanceMetricType), nullable=False, index=True)
|
||||
excellent_threshold = Column(Float, nullable=False) # Excellent performance threshold
|
||||
good_threshold = Column(Float, nullable=False) # Good performance threshold
|
||||
acceptable_threshold = Column(Float, nullable=False) # Acceptable performance threshold
|
||||
poor_threshold = Column(Float, nullable=False) # Poor performance threshold
|
||||
|
||||
# Benchmark context
|
||||
data_source = Column(String(255), nullable=True) # Source of benchmark data
|
||||
sample_size = Column(Integer, nullable=True) # Sample size for benchmark
|
||||
confidence_level = Column(Float, nullable=True) # Statistical confidence level
|
||||
|
||||
# Validity and updates
|
||||
effective_date = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
expiry_date = Column(DateTime(timezone=True), nullable=True)
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
# Additional information
|
||||
description = Column(Text, nullable=True)
|
||||
methodology = Column(Text, nullable=True)
|
||||
notes = Column(Text, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_benchmarks_tenant_type', 'tenant_id', 'benchmark_type'),
|
||||
Index('ix_benchmarks_metric_type', 'metric_type'),
|
||||
Index('ix_benchmarks_category', 'supplier_category'),
|
||||
Index('ix_benchmarks_active', 'is_active'),
|
||||
)
|
||||
|
||||
|
||||
class AlertRule(Base):
|
||||
"""Configurable alert rules for supplier performance monitoring"""
|
||||
__tablename__ = "alert_rules"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Rule identification
|
||||
rule_name = Column(String(255), nullable=False)
|
||||
rule_description = Column(Text, nullable=True)
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
# Alert configuration
|
||||
alert_type = Column(SQLEnum(AlertType), nullable=False, index=True)
|
||||
severity = Column(SQLEnum(AlertSeverity), nullable=False)
|
||||
metric_type = Column(SQLEnum(PerformanceMetricType), nullable=True, index=True)
|
||||
|
||||
# Trigger conditions
|
||||
trigger_condition = Column(String(50), nullable=False) # greater_than, less_than, equals, etc.
|
||||
threshold_value = Column(Float, nullable=False)
|
||||
consecutive_violations = Column(Integer, nullable=False, default=1) # How many consecutive violations before alert
|
||||
|
||||
# Scope and filters
|
||||
supplier_categories = Column(JSONB, nullable=True) # Which supplier categories this applies to
|
||||
supplier_ids = Column(JSONB, nullable=True) # Specific suppliers (if applicable)
|
||||
exclude_suppliers = Column(JSONB, nullable=True) # Suppliers to exclude
|
||||
|
||||
# Time constraints
|
||||
evaluation_period = Column(SQLEnum(PerformancePeriod), nullable=False)
|
||||
time_window_hours = Column(Integer, nullable=True) # Time window for evaluation
|
||||
business_hours_only = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Auto-resolution
|
||||
auto_resolve = Column(Boolean, nullable=False, default=False)
|
||||
auto_resolve_threshold = Column(Float, nullable=True) # Value at which alert auto-resolves
|
||||
auto_resolve_duration_hours = Column(Integer, nullable=True) # How long condition must be met
|
||||
|
||||
# Notification settings
|
||||
notification_enabled = Column(Boolean, nullable=False, default=True)
|
||||
notification_recipients = Column(JSONB, nullable=True) # List of recipients
|
||||
escalation_minutes = Column(Integer, nullable=True) # Minutes before escalation
|
||||
escalation_recipients = Column(JSONB, nullable=True) # Escalation recipients
|
||||
|
||||
# Action triggers
|
||||
recommended_actions = Column(JSONB, nullable=True) # Actions to recommend
|
||||
auto_actions = Column(JSONB, nullable=True) # Actions to automatically trigger
|
||||
|
||||
# Rule metadata
|
||||
priority = Column(Integer, nullable=False, default=50) # Rule priority (1-100)
|
||||
tags = Column(JSONB, nullable=True) # Classification tags
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
last_triggered = Column(DateTime(timezone=True), nullable=True)
|
||||
trigger_count = Column(Integer, nullable=False, default=0)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_alert_rules_tenant_active', 'tenant_id', 'is_active'),
|
||||
Index('ix_alert_rules_type_severity', 'alert_type', 'severity'),
|
||||
Index('ix_alert_rules_metric_type', 'metric_type'),
|
||||
Index('ix_alert_rules_priority', 'priority'),
|
||||
)
|
||||
333
services/suppliers/app/models/suppliers.py
Normal file
333
services/suppliers/app/models/suppliers.py
Normal file
@@ -0,0 +1,333 @@
|
||||
# services/suppliers/app/models/suppliers.py
|
||||
"""
|
||||
Supplier management models for Suppliers Service
|
||||
Comprehensive supplier management and vendor relationships
|
||||
NOTE: Purchase orders, deliveries, and invoices have been moved to Procurement Service
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class SupplierType(enum.Enum):
|
||||
"""Types of suppliers"""
|
||||
ingredients = "ingredients" # Raw materials supplier
|
||||
packaging = "packaging" # Packaging materials
|
||||
equipment = "equipment" # Bakery equipment
|
||||
services = "services" # Service providers
|
||||
utilities = "utilities" # Utilities (gas, electricity)
|
||||
multi = "multi" # Multi-category supplier
|
||||
|
||||
|
||||
class SupplierStatus(enum.Enum):
|
||||
"""Supplier lifecycle status"""
|
||||
active = "active"
|
||||
inactive = "inactive"
|
||||
pending_approval = "pending_approval"
|
||||
suspended = "suspended"
|
||||
blacklisted = "blacklisted"
|
||||
|
||||
|
||||
class PaymentTerms(enum.Enum):
|
||||
"""Payment terms with suppliers"""
|
||||
cod = "cod"
|
||||
net_15 = "net_15"
|
||||
net_30 = "net_30"
|
||||
net_45 = "net_45"
|
||||
net_60 = "net_60"
|
||||
prepaid = "prepaid"
|
||||
credit_terms = "credit_terms"
|
||||
|
||||
|
||||
class QualityRating(enum.Enum):
|
||||
"""Quality rating scale for supplier reviews"""
|
||||
excellent = 5
|
||||
good = 4
|
||||
average = 3
|
||||
poor = 2
|
||||
very_poor = 1
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DEPRECATED ENUMS - Kept for backward compatibility only
|
||||
# These enums are defined here to prevent import errors, but the actual
|
||||
# tables and functionality have moved to the Procurement Service
|
||||
# ============================================================================
|
||||
|
||||
class PurchaseOrderStatus(enum.Enum):
|
||||
"""DEPRECATED: Moved to Procurement Service"""
|
||||
draft = "draft"
|
||||
pending_approval = "pending_approval"
|
||||
approved = "approved"
|
||||
sent_to_supplier = "sent_to_supplier"
|
||||
confirmed = "confirmed"
|
||||
partially_received = "partially_received"
|
||||
completed = "completed"
|
||||
cancelled = "cancelled"
|
||||
disputed = "disputed"
|
||||
|
||||
|
||||
class DeliveryStatus(enum.Enum):
|
||||
"""DEPRECATED: Moved to Procurement Service"""
|
||||
scheduled = "scheduled"
|
||||
in_transit = "in_transit"
|
||||
out_for_delivery = "out_for_delivery"
|
||||
delivered = "delivered"
|
||||
partially_delivered = "partially_delivered"
|
||||
failed_delivery = "failed_delivery"
|
||||
returned = "returned"
|
||||
|
||||
|
||||
class DeliveryRating(enum.Enum):
|
||||
"""DEPRECATED: Moved to Procurement Service"""
|
||||
excellent = 5
|
||||
good = 4
|
||||
average = 3
|
||||
poor = 2
|
||||
very_poor = 1
|
||||
|
||||
|
||||
class InvoiceStatus(enum.Enum):
|
||||
"""DEPRECATED: Moved to Procurement Service"""
|
||||
pending = "pending"
|
||||
approved = "approved"
|
||||
paid = "paid"
|
||||
overdue = "overdue"
|
||||
disputed = "disputed"
|
||||
cancelled = "cancelled"
|
||||
|
||||
|
||||
class Supplier(Base):
|
||||
"""Master supplier information"""
|
||||
__tablename__ = "suppliers"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Basic supplier information
|
||||
name = Column(String(255), nullable=False, index=True)
|
||||
supplier_code = Column(String(50), nullable=True, index=True) # Internal reference code
|
||||
tax_id = Column(String(50), nullable=True) # VAT/Tax ID
|
||||
registration_number = Column(String(100), nullable=True) # Business registration number
|
||||
|
||||
# Supplier classification
|
||||
supplier_type = Column(SQLEnum(SupplierType), nullable=False, index=True)
|
||||
status = Column(SQLEnum(SupplierStatus), nullable=False, default=SupplierStatus.pending_approval, index=True)
|
||||
|
||||
# Contact information
|
||||
contact_person = Column(String(200), nullable=True)
|
||||
email = Column(String(254), nullable=True)
|
||||
phone = Column(String(30), nullable=True)
|
||||
mobile = Column(String(30), nullable=True)
|
||||
website = Column(String(255), nullable=True)
|
||||
|
||||
# Address information
|
||||
address_line1 = Column(String(255), nullable=True)
|
||||
address_line2 = Column(String(255), nullable=True)
|
||||
city = Column(String(100), nullable=True)
|
||||
state_province = Column(String(100), nullable=True)
|
||||
postal_code = Column(String(20), nullable=True)
|
||||
country = Column(String(100), nullable=True)
|
||||
|
||||
# Business terms
|
||||
payment_terms = Column(SQLEnum(PaymentTerms), nullable=False, default=PaymentTerms.net_30)
|
||||
credit_limit = Column(Numeric(12, 2), nullable=True)
|
||||
currency = Column(String(3), nullable=False, default="EUR") # ISO currency code
|
||||
|
||||
# Lead times (in days)
|
||||
standard_lead_time = Column(Integer, nullable=False, default=3)
|
||||
minimum_order_amount = Column(Numeric(10, 2), nullable=True)
|
||||
delivery_area = Column(String(255), nullable=True)
|
||||
|
||||
# Quality and performance metrics
|
||||
quality_rating = Column(Float, nullable=True, default=0.0) # Average quality rating (1-5)
|
||||
delivery_rating = Column(Float, nullable=True, default=0.0) # Average delivery rating (1-5)
|
||||
total_orders = Column(Integer, nullable=False, default=0)
|
||||
total_amount = Column(Numeric(12, 2), nullable=False, default=0.0)
|
||||
|
||||
# Trust and auto-approval metrics
|
||||
trust_score = Column(Float, nullable=False, default=0.0) # Calculated trust score (0.0-1.0)
|
||||
is_preferred_supplier = Column(Boolean, nullable=False, default=False) # Preferred supplier status
|
||||
auto_approve_enabled = Column(Boolean, nullable=False, default=False) # Enable auto-approval for this supplier
|
||||
total_pos_count = Column(Integer, nullable=False, default=0) # Total purchase orders created
|
||||
approved_pos_count = Column(Integer, nullable=False, default=0) # Total POs approved
|
||||
on_time_delivery_rate = Column(Float, nullable=False, default=0.0) # Percentage of on-time deliveries
|
||||
fulfillment_rate = Column(Float, nullable=False, default=0.0) # Percentage of orders fully fulfilled
|
||||
last_performance_update = Column(DateTime(timezone=True), nullable=True) # Last time metrics were calculated
|
||||
|
||||
# Onboarding and approval
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True) # User who approved
|
||||
approved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
rejection_reason = Column(Text, nullable=True)
|
||||
|
||||
# Additional information
|
||||
notes = Column(Text, nullable=True)
|
||||
certifications = Column(JSONB, nullable=True) # Quality certifications, licenses
|
||||
business_hours = Column(JSONB, nullable=True) # Operating hours by day
|
||||
specializations = Column(JSONB, nullable=True) # Product categories, special services
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
price_lists = relationship("SupplierPriceList", back_populates="supplier", cascade="all, delete-orphan")
|
||||
quality_reviews = relationship("SupplierQualityReview", back_populates="supplier", cascade="all, delete-orphan")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_suppliers_tenant_name', 'tenant_id', 'name'),
|
||||
Index('ix_suppliers_tenant_status', 'tenant_id', 'status'),
|
||||
Index('ix_suppliers_tenant_type', 'tenant_id', 'supplier_type'),
|
||||
Index('ix_suppliers_quality_rating', 'quality_rating'),
|
||||
)
|
||||
|
||||
|
||||
class SupplierPriceList(Base):
|
||||
"""Product pricing from suppliers"""
|
||||
__tablename__ = "supplier_price_lists"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), ForeignKey('suppliers.id'), nullable=False, index=True)
|
||||
|
||||
# Product identification (references inventory service)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory products
|
||||
product_code = Column(String(100), nullable=True) # Supplier's product code
|
||||
|
||||
# Pricing information
|
||||
unit_price = Column(Numeric(10, 4), nullable=False)
|
||||
unit_of_measure = Column(String(20), nullable=False) # kg, g, l, ml, units, etc.
|
||||
minimum_order_quantity = Column(Integer, nullable=True, default=1)
|
||||
price_per_unit = Column(Numeric(10, 4), nullable=False) # Calculated field
|
||||
|
||||
# Pricing tiers (volume discounts)
|
||||
tier_pricing = Column(JSONB, nullable=True) # [{quantity: 100, price: 2.50}, ...]
|
||||
|
||||
# Validity and terms
|
||||
effective_date = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
expiry_date = Column(DateTime(timezone=True), nullable=True)
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
# Additional product details
|
||||
brand = Column(String(100), nullable=True)
|
||||
packaging_size = Column(String(50), nullable=True)
|
||||
origin_country = Column(String(100), nullable=True)
|
||||
shelf_life_days = Column(Integer, nullable=True)
|
||||
storage_requirements = Column(Text, nullable=True)
|
||||
|
||||
# Quality specifications
|
||||
quality_specs = Column(JSONB, nullable=True) # Quality parameters, certifications
|
||||
allergens = Column(JSONB, nullable=True) # Allergen information
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
supplier = relationship("Supplier", back_populates="price_lists")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_price_lists_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_price_lists_inventory_product', 'inventory_product_id'),
|
||||
Index('ix_price_lists_active', 'is_active'),
|
||||
Index('ix_price_lists_effective_date', 'effective_date'),
|
||||
)
|
||||
|
||||
|
||||
class SupplierQualityReview(Base):
|
||||
"""Quality and performance reviews for suppliers"""
|
||||
__tablename__ = "supplier_quality_reviews"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
supplier_id = Column(UUID(as_uuid=True), ForeignKey('suppliers.id'), nullable=False, index=True)
|
||||
|
||||
# Review details
|
||||
review_date = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
review_type = Column(String(50), nullable=False) # monthly, annual, incident
|
||||
|
||||
# Ratings (1-5 scale)
|
||||
quality_rating = Column(SQLEnum(QualityRating), nullable=False)
|
||||
delivery_rating = Column(Integer, nullable=False) # 1-5 scale
|
||||
communication_rating = Column(Integer, nullable=False) # 1-5
|
||||
overall_rating = Column(Float, nullable=False) # Calculated average
|
||||
|
||||
# Detailed feedback
|
||||
quality_comments = Column(Text, nullable=True)
|
||||
delivery_comments = Column(Text, nullable=True)
|
||||
communication_comments = Column(Text, nullable=True)
|
||||
improvement_suggestions = Column(Text, nullable=True)
|
||||
|
||||
# Issues and corrective actions
|
||||
quality_issues = Column(JSONB, nullable=True) # Documented issues
|
||||
corrective_actions = Column(Text, nullable=True)
|
||||
follow_up_required = Column(Boolean, nullable=False, default=False)
|
||||
follow_up_date = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Review status
|
||||
is_final = Column(Boolean, nullable=False, default=True)
|
||||
approved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
reviewed_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
supplier = relationship("Supplier", back_populates="quality_reviews")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_quality_reviews_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_quality_reviews_date', 'review_date'),
|
||||
Index('ix_quality_reviews_overall_rating', 'overall_rating'),
|
||||
)
|
||||
|
||||
# ============================================================================
|
||||
# DEPRECATED MODELS - Stub definitions for backward compatibility
|
||||
# These models are defined here ONLY to prevent import errors
|
||||
# The actual tables exist in the Procurement Service database, NOT here
|
||||
# __table__ = None prevents SQLAlchemy from creating these tables
|
||||
# ============================================================================
|
||||
|
||||
class PurchaseOrder:
|
||||
"""DEPRECATED STUB: Actual implementation in Procurement Service"""
|
||||
__table__ = None # Prevent table creation
|
||||
pass
|
||||
|
||||
|
||||
class PurchaseOrderItem:
|
||||
"""DEPRECATED STUB: Actual implementation in Procurement Service"""
|
||||
__table__ = None # Prevent table creation
|
||||
pass
|
||||
|
||||
|
||||
class Delivery:
|
||||
"""DEPRECATED STUB: Actual implementation in Procurement Service"""
|
||||
__table__ = None # Prevent table creation
|
||||
pass
|
||||
|
||||
|
||||
class DeliveryItem:
|
||||
"""DEPRECATED STUB: Actual implementation in Procurement Service"""
|
||||
__table__ = None # Prevent table creation
|
||||
pass
|
||||
|
||||
|
||||
class SupplierInvoice:
|
||||
"""DEPRECATED STUB: Actual implementation in Procurement Service"""
|
||||
__table__ = None # Prevent table creation
|
||||
pass
|
||||
1
services/suppliers/app/repositories/__init__.py
Normal file
1
services/suppliers/app/repositories/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/suppliers/app/repositories/__init__.py
|
||||
100
services/suppliers/app/repositories/base.py
Normal file
100
services/suppliers/app/repositories/base.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# services/suppliers/app/repositories/base.py
|
||||
"""
|
||||
Base repository class for common database operations
|
||||
"""
|
||||
|
||||
from typing import TypeVar, Generic, List, Optional, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import desc, asc, select, func
|
||||
from uuid import UUID
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
class BaseRepository(Generic[T]):
|
||||
"""Base repository with common CRUD operations"""
|
||||
|
||||
def __init__(self, model: type, db: AsyncSession):
|
||||
self.model = model
|
||||
self.db = db
|
||||
|
||||
async def create(self, obj_data: Dict[str, Any]) -> T:
|
||||
"""Create a new record"""
|
||||
db_obj = self.model(**obj_data)
|
||||
self.db.add(db_obj)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(db_obj)
|
||||
return db_obj
|
||||
|
||||
async def get_by_id(self, record_id: UUID) -> Optional[T]:
|
||||
"""Get record by ID"""
|
||||
stmt = select(self.model).filter(self.model.id == record_id)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_tenant_id(self, tenant_id: UUID, limit: int = 100, offset: int = 0) -> List[T]:
|
||||
"""Get records by tenant ID with pagination"""
|
||||
stmt = select(self.model).filter(
|
||||
self.model.tenant_id == tenant_id
|
||||
).limit(limit).offset(offset)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update(self, record_id: UUID, update_data: Dict[str, Any]) -> Optional[T]:
|
||||
"""Update record by ID"""
|
||||
db_obj = await self.get_by_id(record_id)
|
||||
if db_obj:
|
||||
for key, value in update_data.items():
|
||||
if hasattr(db_obj, key):
|
||||
setattr(db_obj, key, value)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(db_obj)
|
||||
return db_obj
|
||||
|
||||
async def delete(self, record_id: UUID) -> bool:
|
||||
"""Delete record by ID"""
|
||||
db_obj = await self.get_by_id(record_id)
|
||||
if db_obj:
|
||||
await self.db.delete(db_obj)
|
||||
await self.db.commit()
|
||||
return True
|
||||
return False
|
||||
|
||||
async def count_by_tenant(self, tenant_id: UUID) -> int:
|
||||
"""Count records by tenant"""
|
||||
stmt = select(func.count()).select_from(self.model).filter(
|
||||
self.model.tenant_id == tenant_id
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar() or 0
|
||||
|
||||
def list_with_filters(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
sort_by: str = "created_at",
|
||||
sort_order: str = "desc",
|
||||
limit: int = 100,
|
||||
offset: int = 0
|
||||
) -> List[T]:
|
||||
"""List records with filtering and sorting"""
|
||||
query = self.db.query(self.model).filter(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
if filters:
|
||||
for key, value in filters.items():
|
||||
if hasattr(self.model, key) and value is not None:
|
||||
query = query.filter(getattr(self.model, key) == value)
|
||||
|
||||
# Apply sorting
|
||||
if hasattr(self.model, sort_by):
|
||||
if sort_order.lower() == "desc":
|
||||
query = query.order_by(desc(getattr(self.model, sort_by)))
|
||||
else:
|
||||
query = query.order_by(asc(getattr(self.model, sort_by)))
|
||||
|
||||
return query.limit(limit).offset(offset).all()
|
||||
|
||||
def exists(self, record_id: UUID) -> bool:
|
||||
"""Check if record exists"""
|
||||
return self.db.query(self.model).filter(self.model.id == record_id).first() is not None
|
||||
@@ -0,0 +1,289 @@
|
||||
# services/suppliers/app/repositories/supplier_performance_repository.py
|
||||
"""
|
||||
Supplier Performance Repository - Calculate and manage supplier trust scores
|
||||
Handles supplier performance metrics, trust scores, and auto-approval eligibility
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, case
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.suppliers import (
|
||||
Supplier,
|
||||
PurchaseOrder,
|
||||
PurchaseOrderStatus
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SupplierPerformanceRepository:
|
||||
"""Repository for calculating and managing supplier performance metrics"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
|
||||
async def calculate_trust_score(self, supplier_id: UUID) -> float:
|
||||
"""
|
||||
Calculate comprehensive trust score for a supplier
|
||||
|
||||
Score components (weighted):
|
||||
- Quality rating: 30%
|
||||
- Delivery rating: 30%
|
||||
- On-time delivery rate: 20%
|
||||
- Fulfillment rate: 15%
|
||||
- Order history: 5%
|
||||
|
||||
Returns:
|
||||
float: Trust score between 0.0 and 1.0
|
||||
"""
|
||||
try:
|
||||
# Get supplier with current metrics
|
||||
stmt = select(Supplier).where(Supplier.id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
supplier = result.scalar_one_or_none()
|
||||
|
||||
if not supplier:
|
||||
logger.warning("Supplier not found for trust score calculation", supplier_id=str(supplier_id))
|
||||
return 0.0
|
||||
|
||||
# Calculate on-time delivery rate from recent POs
|
||||
on_time_rate = await self._calculate_on_time_delivery_rate(supplier_id)
|
||||
|
||||
# Calculate fulfillment rate from recent POs
|
||||
fulfillment_rate = await self._calculate_fulfillment_rate(supplier_id)
|
||||
|
||||
# Calculate order history score (more orders = higher confidence)
|
||||
order_history_score = min(1.0, supplier.total_pos_count / 50.0)
|
||||
|
||||
# Weighted components
|
||||
quality_score = (supplier.quality_rating or 0.0) / 5.0 # Normalize to 0-1
|
||||
delivery_score = (supplier.delivery_rating or 0.0) / 5.0 # Normalize to 0-1
|
||||
|
||||
trust_score = (
|
||||
quality_score * 0.30 +
|
||||
delivery_score * 0.30 +
|
||||
on_time_rate * 0.20 +
|
||||
fulfillment_rate * 0.15 +
|
||||
order_history_score * 0.05
|
||||
)
|
||||
|
||||
# Ensure score is between 0 and 1
|
||||
trust_score = max(0.0, min(1.0, trust_score))
|
||||
|
||||
logger.info(
|
||||
"Trust score calculated",
|
||||
supplier_id=str(supplier_id),
|
||||
trust_score=trust_score,
|
||||
quality_score=quality_score,
|
||||
delivery_score=delivery_score,
|
||||
on_time_rate=on_time_rate,
|
||||
fulfillment_rate=fulfillment_rate,
|
||||
order_history_score=order_history_score
|
||||
)
|
||||
|
||||
return trust_score
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating trust score", supplier_id=str(supplier_id), error=str(e))
|
||||
return 0.0
|
||||
|
||||
async def _calculate_on_time_delivery_rate(self, supplier_id: UUID, days: int = 90) -> float:
|
||||
"""Calculate percentage of orders delivered on time in the last N days"""
|
||||
try:
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
# Get completed orders with delivery dates
|
||||
stmt = select(
|
||||
func.count(PurchaseOrder.id).label('total_orders'),
|
||||
func.count(
|
||||
case(
|
||||
(PurchaseOrder.actual_delivery_date <= PurchaseOrder.required_delivery_date, 1)
|
||||
)
|
||||
).label('on_time_orders')
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.supplier_id == supplier_id,
|
||||
PurchaseOrder.status == PurchaseOrderStatus.completed,
|
||||
PurchaseOrder.actual_delivery_date.isnot(None),
|
||||
PurchaseOrder.created_at >= cutoff_date
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
row = result.one()
|
||||
|
||||
if row.total_orders == 0:
|
||||
return 0.0
|
||||
|
||||
on_time_rate = float(row.on_time_orders) / float(row.total_orders)
|
||||
return on_time_rate
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating on-time delivery rate", supplier_id=str(supplier_id), error=str(e))
|
||||
return 0.0
|
||||
|
||||
async def _calculate_fulfillment_rate(self, supplier_id: UUID, days: int = 90) -> float:
|
||||
"""Calculate percentage of orders fully fulfilled (no shortages) in the last N days"""
|
||||
try:
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
# Get completed/confirmed orders
|
||||
stmt = select(
|
||||
func.count(PurchaseOrder.id).label('total_orders'),
|
||||
func.count(
|
||||
case(
|
||||
(PurchaseOrder.status == PurchaseOrderStatus.completed, 1)
|
||||
)
|
||||
).label('completed_orders')
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.supplier_id == supplier_id,
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.completed,
|
||||
PurchaseOrderStatus.partially_received
|
||||
]),
|
||||
PurchaseOrder.created_at >= cutoff_date
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
row = result.one()
|
||||
|
||||
if row.total_orders == 0:
|
||||
return 0.0
|
||||
|
||||
fulfillment_rate = float(row.completed_orders) / float(row.total_orders)
|
||||
return fulfillment_rate
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating fulfillment rate", supplier_id=str(supplier_id), error=str(e))
|
||||
return 0.0
|
||||
|
||||
async def update_supplier_performance_metrics(self, supplier_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Update all performance metrics for a supplier
|
||||
|
||||
Returns:
|
||||
Dict with updated metrics
|
||||
"""
|
||||
try:
|
||||
# Calculate all metrics
|
||||
trust_score = await self.calculate_trust_score(supplier_id)
|
||||
on_time_rate = await self._calculate_on_time_delivery_rate(supplier_id)
|
||||
fulfillment_rate = await self._calculate_fulfillment_rate(supplier_id)
|
||||
|
||||
# Get current supplier
|
||||
stmt = select(Supplier).where(Supplier.id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
supplier = result.scalar_one_or_none()
|
||||
|
||||
if not supplier:
|
||||
return {}
|
||||
|
||||
# Update supplier metrics
|
||||
supplier.trust_score = trust_score
|
||||
supplier.on_time_delivery_rate = on_time_rate
|
||||
supplier.fulfillment_rate = fulfillment_rate
|
||||
supplier.last_performance_update = datetime.now(timezone.utc)
|
||||
|
||||
# Auto-update preferred status based on performance
|
||||
supplier.is_preferred_supplier = (
|
||||
supplier.total_pos_count >= 10 and
|
||||
trust_score >= 0.80 and
|
||||
supplier.status.value == 'active'
|
||||
)
|
||||
|
||||
# Auto-update auto-approve eligibility
|
||||
supplier.auto_approve_enabled = (
|
||||
supplier.total_pos_count >= 20 and
|
||||
trust_score >= 0.85 and
|
||||
on_time_rate >= 0.90 and
|
||||
supplier.is_preferred_supplier and
|
||||
supplier.status.value == 'active'
|
||||
)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
logger.info(
|
||||
"Supplier performance metrics updated",
|
||||
supplier_id=str(supplier_id),
|
||||
trust_score=trust_score,
|
||||
is_preferred=supplier.is_preferred_supplier,
|
||||
auto_approve_enabled=supplier.auto_approve_enabled
|
||||
)
|
||||
|
||||
return {
|
||||
"supplier_id": str(supplier_id),
|
||||
"trust_score": trust_score,
|
||||
"on_time_delivery_rate": on_time_rate,
|
||||
"fulfillment_rate": fulfillment_rate,
|
||||
"is_preferred_supplier": supplier.is_preferred_supplier,
|
||||
"auto_approve_enabled": supplier.auto_approve_enabled,
|
||||
"last_updated": supplier.last_performance_update.isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
await self.db.rollback()
|
||||
logger.error("Error updating supplier performance metrics", supplier_id=str(supplier_id), error=str(e))
|
||||
raise
|
||||
|
||||
async def increment_po_counters(self, supplier_id: UUID, approved: bool = False):
|
||||
"""Increment PO counters when a new PO is created or approved"""
|
||||
try:
|
||||
stmt = select(Supplier).where(Supplier.id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
supplier = result.scalar_one_or_none()
|
||||
|
||||
if supplier:
|
||||
supplier.total_pos_count += 1
|
||||
if approved:
|
||||
supplier.approved_pos_count += 1
|
||||
|
||||
await self.db.commit()
|
||||
logger.info(
|
||||
"Supplier PO counters incremented",
|
||||
supplier_id=str(supplier_id),
|
||||
total=supplier.total_pos_count,
|
||||
approved=supplier.approved_pos_count
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
await self.db.rollback()
|
||||
logger.error("Error incrementing PO counters", supplier_id=str(supplier_id), error=str(e))
|
||||
|
||||
async def get_supplier_with_performance(self, supplier_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier data with all performance metrics"""
|
||||
try:
|
||||
stmt = select(Supplier).where(Supplier.id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
supplier = result.scalar_one_or_none()
|
||||
|
||||
if not supplier:
|
||||
return None
|
||||
|
||||
return {
|
||||
"id": str(supplier.id),
|
||||
"name": supplier.name,
|
||||
"trust_score": supplier.trust_score,
|
||||
"is_preferred_supplier": supplier.is_preferred_supplier,
|
||||
"auto_approve_enabled": supplier.auto_approve_enabled,
|
||||
"total_pos_count": supplier.total_pos_count,
|
||||
"approved_pos_count": supplier.approved_pos_count,
|
||||
"on_time_delivery_rate": supplier.on_time_delivery_rate,
|
||||
"fulfillment_rate": supplier.fulfillment_rate,
|
||||
"quality_rating": supplier.quality_rating,
|
||||
"delivery_rating": supplier.delivery_rating,
|
||||
"status": supplier.status.value if supplier.status else None,
|
||||
"last_performance_update": supplier.last_performance_update.isoformat() if supplier.last_performance_update else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier with performance", supplier_id=str(supplier_id), error=str(e))
|
||||
return None
|
||||
454
services/suppliers/app/repositories/supplier_repository.py
Normal file
454
services/suppliers/app/repositories/supplier_repository.py
Normal file
@@ -0,0 +1,454 @@
|
||||
# services/suppliers/app/repositories/supplier_repository.py
|
||||
"""
|
||||
Supplier repository for database operations
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import and_, or_, func, select
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.suppliers import Supplier, SupplierStatus, SupplierType
|
||||
from app.repositories.base import BaseRepository
|
||||
|
||||
|
||||
class SupplierRepository(BaseRepository[Supplier]):
|
||||
"""Repository for supplier management operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
super().__init__(Supplier, db)
|
||||
|
||||
async def get_by_name(self, tenant_id: UUID, name: str) -> Optional[Supplier]:
|
||||
"""Get supplier by name within tenant"""
|
||||
stmt = select(self.model).filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.name == name
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_supplier_code(self, tenant_id: UUID, supplier_code: str) -> Optional[Supplier]:
|
||||
"""Get supplier by supplier code within tenant"""
|
||||
stmt = select(self.model).filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.supplier_code == supplier_code
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def search_suppliers(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
search_term: Optional[str] = None,
|
||||
supplier_type: Optional[SupplierType] = None,
|
||||
status: Optional[SupplierStatus] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> List[Supplier]:
|
||||
"""Search suppliers with filters"""
|
||||
stmt = select(self.model).filter(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Search term filter (name, contact person, email)
|
||||
if search_term:
|
||||
search_filter = or_(
|
||||
self.model.name.ilike(f"%{search_term}%"),
|
||||
self.model.contact_person.ilike(f"%{search_term}%"),
|
||||
self.model.email.ilike(f"%{search_term}%")
|
||||
)
|
||||
stmt = stmt.filter(search_filter)
|
||||
|
||||
# Type filter
|
||||
if supplier_type:
|
||||
stmt = stmt.filter(self.model.supplier_type == supplier_type)
|
||||
|
||||
# Status filter
|
||||
if status:
|
||||
stmt = stmt.filter(self.model.status == status)
|
||||
|
||||
stmt = stmt.order_by(self.model.name).limit(limit).offset(offset)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_active_suppliers(self, tenant_id: UUID) -> List[Supplier]:
|
||||
"""Get all active suppliers for a tenant"""
|
||||
stmt = select(self.model).filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.status == SupplierStatus.active
|
||||
)
|
||||
).order_by(self.model.name)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_suppliers_by_ids(self, tenant_id: UUID, supplier_ids: List[UUID]) -> List[Supplier]:
|
||||
"""Get multiple suppliers by IDs in a single query (batch fetch)"""
|
||||
if not supplier_ids:
|
||||
return []
|
||||
|
||||
stmt = select(self.model).filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.id.in_(supplier_ids)
|
||||
)
|
||||
).order_by(self.model.name)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
def get_suppliers_by_type(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_type: SupplierType
|
||||
) -> List[Supplier]:
|
||||
"""Get suppliers by type"""
|
||||
return (
|
||||
self.db.query(self.model)
|
||||
.filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.supplier_type == supplier_type,
|
||||
self.model.status == SupplierStatus.active
|
||||
)
|
||||
)
|
||||
.order_by(self.model.quality_rating.desc(), self.model.name)
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_top_suppliers(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 10
|
||||
) -> List[Supplier]:
|
||||
"""Get top suppliers by quality rating and order value"""
|
||||
return (
|
||||
self.db.query(self.model)
|
||||
.filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.status == SupplierStatus.active
|
||||
)
|
||||
)
|
||||
.order_by(
|
||||
self.model.quality_rating.desc(),
|
||||
self.model.total_amount.desc()
|
||||
)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
def update_supplier_stats(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
total_orders_increment: int = 0,
|
||||
total_amount_increment: float = 0.0,
|
||||
new_quality_rating: Optional[float] = None,
|
||||
new_delivery_rating: Optional[float] = None
|
||||
) -> Optional[Supplier]:
|
||||
"""Update supplier performance statistics"""
|
||||
supplier = self.get_by_id(supplier_id)
|
||||
if not supplier:
|
||||
return None
|
||||
|
||||
# Update counters
|
||||
if total_orders_increment:
|
||||
supplier.total_orders += total_orders_increment
|
||||
|
||||
if total_amount_increment:
|
||||
supplier.total_amount += total_amount_increment
|
||||
|
||||
# Update ratings (these should be calculated averages)
|
||||
if new_quality_rating is not None:
|
||||
supplier.quality_rating = new_quality_rating
|
||||
|
||||
if new_delivery_rating is not None:
|
||||
supplier.delivery_rating = new_delivery_rating
|
||||
|
||||
supplier.updated_at = datetime.utcnow()
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(supplier)
|
||||
return supplier
|
||||
|
||||
def get_suppliers_needing_review(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_since_last_order: int = 30
|
||||
) -> List[Supplier]:
|
||||
"""Get suppliers that may need performance review"""
|
||||
from datetime import datetime, timedelta
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_since_last_order)
|
||||
|
||||
return (
|
||||
self.db.query(self.model)
|
||||
.filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.status == SupplierStatus.active,
|
||||
or_(
|
||||
self.model.quality_rating < 3.0, # Poor rating
|
||||
self.model.delivery_rating < 3.0, # Poor delivery
|
||||
self.model.updated_at < cutoff_date # Long time since interaction
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(self.model.quality_rating.asc())
|
||||
.all()
|
||||
)
|
||||
|
||||
async def get_supplier_statistics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get supplier statistics for dashboard"""
|
||||
total_suppliers = await self.count_by_tenant(tenant_id)
|
||||
|
||||
# Get all suppliers for this tenant to avoid multiple queries and enum casting issues
|
||||
all_stmt = select(self.model).filter(self.model.tenant_id == tenant_id)
|
||||
all_result = await self.db.execute(all_stmt)
|
||||
all_suppliers = all_result.scalars().all()
|
||||
|
||||
# Calculate statistics in Python to avoid database enum casting issues
|
||||
active_suppliers = [s for s in all_suppliers if s.status == SupplierStatus.active]
|
||||
pending_suppliers = [s for s in all_suppliers if s.status == SupplierStatus.pending_approval]
|
||||
|
||||
# Calculate averages from active suppliers
|
||||
quality_ratings = [s.quality_rating for s in active_suppliers if s.quality_rating and s.quality_rating > 0]
|
||||
avg_quality_rating = sum(quality_ratings) / len(quality_ratings) if quality_ratings else 0.0
|
||||
|
||||
delivery_ratings = [s.delivery_rating for s in active_suppliers if s.delivery_rating and s.delivery_rating > 0]
|
||||
avg_delivery_rating = sum(delivery_ratings) / len(delivery_ratings) if delivery_ratings else 0.0
|
||||
|
||||
# Total spend for all suppliers
|
||||
total_spend = sum(float(s.total_amount or 0) for s in all_suppliers)
|
||||
|
||||
return {
|
||||
"total_suppliers": total_suppliers,
|
||||
"active_suppliers": len(active_suppliers),
|
||||
"pending_suppliers": len(pending_suppliers),
|
||||
"avg_quality_rating": round(float(avg_quality_rating), 2),
|
||||
"avg_delivery_rating": round(float(avg_delivery_rating), 2),
|
||||
"total_spend": float(total_spend)
|
||||
}
|
||||
|
||||
async def approve_supplier(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
approved_by: UUID,
|
||||
approval_date: Optional[datetime] = None
|
||||
) -> Optional[Supplier]:
|
||||
"""Approve a pending supplier"""
|
||||
supplier = await self.get_by_id(supplier_id)
|
||||
if not supplier or supplier.status != SupplierStatus.pending_approval:
|
||||
return None
|
||||
|
||||
supplier.status = SupplierStatus.active
|
||||
supplier.approved_by = approved_by
|
||||
supplier.approved_at = approval_date or datetime.utcnow()
|
||||
supplier.rejection_reason = None
|
||||
supplier.updated_at = datetime.utcnow()
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(supplier)
|
||||
return supplier
|
||||
|
||||
async def reject_supplier(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
rejection_reason: str,
|
||||
approved_by: UUID
|
||||
) -> Optional[Supplier]:
|
||||
"""Reject a pending supplier"""
|
||||
supplier = await self.get_by_id(supplier_id)
|
||||
if not supplier or supplier.status != SupplierStatus.pending_approval:
|
||||
return None
|
||||
|
||||
supplier.status = SupplierStatus.inactive
|
||||
supplier.rejection_reason = rejection_reason
|
||||
supplier.approved_by = approved_by
|
||||
supplier.approved_at = datetime.utcnow()
|
||||
supplier.updated_at = datetime.utcnow()
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(supplier)
|
||||
return supplier
|
||||
async def hard_delete_supplier(self, supplier_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Hard delete supplier and all associated data
|
||||
Returns counts of deleted records
|
||||
"""
|
||||
from app.models.suppliers import (
|
||||
SupplierPriceList, SupplierQualityReview,
|
||||
SupplierAlert, SupplierScorecard, PurchaseOrderStatus, PurchaseOrder
|
||||
)
|
||||
from app.models.performance import SupplierPerformanceMetric
|
||||
from sqlalchemy import delete
|
||||
|
||||
# Get supplier first
|
||||
supplier = await self.get_by_id(supplier_id)
|
||||
if not supplier:
|
||||
return None
|
||||
|
||||
# Check for active purchase orders (block deletion if any exist)
|
||||
active_statuses = [
|
||||
PurchaseOrderStatus.draft,
|
||||
PurchaseOrderStatus.pending_approval,
|
||||
PurchaseOrderStatus.approved,
|
||||
PurchaseOrderStatus.sent_to_supplier,
|
||||
PurchaseOrderStatus.confirmed
|
||||
]
|
||||
|
||||
stmt = select(PurchaseOrder).where(
|
||||
PurchaseOrder.supplier_id == supplier_id,
|
||||
PurchaseOrder.status.in_(active_statuses)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
active_pos = result.scalars().all()
|
||||
|
||||
if active_pos:
|
||||
raise ValueError(
|
||||
f"Cannot delete supplier with {len(active_pos)} active purchase orders. "
|
||||
"Complete or cancel all purchase orders first."
|
||||
)
|
||||
|
||||
# Count related records before deletion
|
||||
stmt = select(SupplierPriceList).where(SupplierPriceList.supplier_id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
price_lists_count = len(result.scalars().all())
|
||||
|
||||
stmt = select(SupplierQualityReview).where(SupplierQualityReview.supplier_id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
quality_reviews_count = len(result.scalars().all())
|
||||
|
||||
stmt = select(SupplierPerformanceMetric).where(SupplierPerformanceMetric.supplier_id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
metrics_count = len(result.scalars().all())
|
||||
|
||||
stmt = select(SupplierAlert).where(SupplierAlert.supplier_id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
alerts_count = len(result.scalars().all())
|
||||
|
||||
stmt = select(SupplierScorecard).where(SupplierScorecard.supplier_id == supplier_id)
|
||||
result = await self.db.execute(stmt)
|
||||
scorecards_count = len(result.scalars().all())
|
||||
|
||||
# Delete related records (in reverse dependency order)
|
||||
stmt = delete(SupplierScorecard).where(SupplierScorecard.supplier_id == supplier_id)
|
||||
await self.db.execute(stmt)
|
||||
|
||||
stmt = delete(SupplierAlert).where(SupplierAlert.supplier_id == supplier_id)
|
||||
await self.db.execute(stmt)
|
||||
|
||||
stmt = delete(SupplierPerformanceMetric).where(SupplierPerformanceMetric.supplier_id == supplier_id)
|
||||
await self.db.execute(stmt)
|
||||
|
||||
stmt = delete(SupplierQualityReview).where(SupplierQualityReview.supplier_id == supplier_id)
|
||||
await self.db.execute(stmt)
|
||||
|
||||
stmt = delete(SupplierPriceList).where(SupplierPriceList.supplier_id == supplier_id)
|
||||
await self.db.execute(stmt)
|
||||
|
||||
# Delete the supplier itself
|
||||
await self.delete(supplier_id)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
return {
|
||||
"supplier_name": supplier.name,
|
||||
"deleted_price_lists": price_lists_count,
|
||||
"deleted_quality_reviews": quality_reviews_count,
|
||||
"deleted_performance_metrics": metrics_count,
|
||||
"deleted_alerts": alerts_count,
|
||||
"deleted_scorecards": scorecards_count,
|
||||
"deletion_timestamp": datetime.utcnow()
|
||||
}
|
||||
|
||||
async def get_supplier_price_lists(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
is_active: bool = True
|
||||
) -> List[Any]:
|
||||
"""Get all price list items for a supplier"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
stmt = select(SupplierPriceList).filter(
|
||||
and_(
|
||||
SupplierPriceList.supplier_id == supplier_id,
|
||||
SupplierPriceList.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
if is_active:
|
||||
stmt = stmt.filter(SupplierPriceList.is_active == True)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[Any]:
|
||||
"""Get specific price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
stmt = select(SupplierPriceList).filter(
|
||||
and_(
|
||||
SupplierPriceList.id == price_list_id,
|
||||
SupplierPriceList.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create_supplier_price_list(
|
||||
self,
|
||||
create_data: Dict[str, Any]
|
||||
) -> Any:
|
||||
"""Create a new price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
price_list = SupplierPriceList(**create_data)
|
||||
self.db.add(price_list)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(price_list)
|
||||
return price_list
|
||||
|
||||
async def update_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Any:
|
||||
"""Update a price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
stmt = select(SupplierPriceList).filter(SupplierPriceList.id == price_list_id)
|
||||
result = await self.db.execute(stmt)
|
||||
price_list = result.scalar_one_or_none()
|
||||
|
||||
if not price_list:
|
||||
raise ValueError("Price list item not found")
|
||||
|
||||
# Update fields
|
||||
for key, value in update_data.items():
|
||||
if hasattr(price_list, key):
|
||||
setattr(price_list, key, value)
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(price_list)
|
||||
return price_list
|
||||
|
||||
async def delete_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID
|
||||
) -> bool:
|
||||
"""Delete a price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
from sqlalchemy import delete
|
||||
|
||||
stmt = delete(SupplierPriceList).filter(SupplierPriceList.id == price_list_id)
|
||||
result = await self.db.execute(stmt)
|
||||
|
||||
await self.db.commit()
|
||||
return result.rowcount > 0
|
||||
1
services/suppliers/app/schemas/__init__.py
Normal file
1
services/suppliers/app/schemas/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/suppliers/app/schemas/__init__.py
|
||||
385
services/suppliers/app/schemas/performance.py
Normal file
385
services/suppliers/app/schemas/performance.py
Normal file
@@ -0,0 +1,385 @@
|
||||
# ================================================================
|
||||
# services/suppliers/app/schemas/performance.py
|
||||
# ================================================================
|
||||
"""
|
||||
Performance Tracking and Alert Schemas for Suppliers Service
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from decimal import Decimal
|
||||
|
||||
from app.models.performance import (
|
||||
AlertSeverity, AlertType, AlertStatus, PerformanceMetricType,
|
||||
PerformancePeriod
|
||||
)
|
||||
|
||||
|
||||
# ===== Base Schemas =====
|
||||
|
||||
class PerformanceMetricBase(BaseModel):
|
||||
"""Base schema for performance metrics"""
|
||||
metric_type: PerformanceMetricType
|
||||
period: PerformancePeriod
|
||||
period_start: datetime
|
||||
period_end: datetime
|
||||
metric_value: float = Field(ge=0, le=100)
|
||||
target_value: Optional[float] = None
|
||||
total_orders: int = Field(ge=0, default=0)
|
||||
total_deliveries: int = Field(ge=0, default=0)
|
||||
on_time_deliveries: int = Field(ge=0, default=0)
|
||||
late_deliveries: int = Field(ge=0, default=0)
|
||||
quality_issues: int = Field(ge=0, default=0)
|
||||
total_amount: Decimal = Field(ge=0, default=0)
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class PerformanceMetricCreate(PerformanceMetricBase):
|
||||
"""Schema for creating performance metrics"""
|
||||
supplier_id: UUID
|
||||
metrics_data: Optional[Dict[str, Any]] = None
|
||||
external_factors: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class PerformanceMetricUpdate(BaseModel):
|
||||
"""Schema for updating performance metrics"""
|
||||
metric_value: Optional[float] = Field(None, ge=0, le=100)
|
||||
target_value: Optional[float] = None
|
||||
notes: Optional[str] = None
|
||||
metrics_data: Optional[Dict[str, Any]] = None
|
||||
external_factors: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class PerformanceMetric(PerformanceMetricBase):
|
||||
"""Complete performance metric schema"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
supplier_id: UUID
|
||||
previous_value: Optional[float] = None
|
||||
trend_direction: Optional[str] = None
|
||||
trend_percentage: Optional[float] = None
|
||||
metrics_data: Optional[Dict[str, Any]] = None
|
||||
external_factors: Optional[Dict[str, Any]] = None
|
||||
calculated_at: datetime
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
|
||||
# ===== Alert Schemas =====
|
||||
|
||||
class AlertBase(BaseModel):
|
||||
"""Base schema for alerts"""
|
||||
alert_type: AlertType
|
||||
severity: AlertSeverity
|
||||
title: str = Field(max_length=255)
|
||||
message: str
|
||||
description: Optional[str] = None
|
||||
trigger_value: Optional[float] = None
|
||||
threshold_value: Optional[float] = None
|
||||
metric_type: Optional[PerformanceMetricType] = None
|
||||
recommended_actions: Optional[List[Dict[str, Any]]] = None
|
||||
auto_resolve: bool = False
|
||||
|
||||
|
||||
class AlertCreate(AlertBase):
|
||||
"""Schema for creating alerts"""
|
||||
supplier_id: UUID
|
||||
purchase_order_id: Optional[UUID] = None
|
||||
delivery_id: Optional[UUID] = None
|
||||
performance_metric_id: Optional[UUID] = None
|
||||
priority_score: int = Field(ge=1, le=100, default=50)
|
||||
business_impact: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
|
||||
|
||||
class AlertUpdate(BaseModel):
|
||||
"""Schema for updating alerts"""
|
||||
status: Optional[AlertStatus] = None
|
||||
actions_taken: Optional[List[Dict[str, Any]]] = None
|
||||
resolution_notes: Optional[str] = None
|
||||
escalated: Optional[bool] = None
|
||||
|
||||
|
||||
class Alert(AlertBase):
|
||||
"""Complete alert schema"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
supplier_id: UUID
|
||||
status: AlertStatus
|
||||
purchase_order_id: Optional[UUID] = None
|
||||
delivery_id: Optional[UUID] = None
|
||||
performance_metric_id: Optional[UUID] = None
|
||||
triggered_at: datetime
|
||||
acknowledged_at: Optional[datetime] = None
|
||||
acknowledged_by: Optional[UUID] = None
|
||||
resolved_at: Optional[datetime] = None
|
||||
resolved_by: Optional[UUID] = None
|
||||
actions_taken: Optional[List[Dict[str, Any]]] = None
|
||||
resolution_notes: Optional[str] = None
|
||||
escalated: bool = False
|
||||
escalated_at: Optional[datetime] = None
|
||||
notification_sent: bool = False
|
||||
priority_score: int
|
||||
business_impact: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
|
||||
# ===== Scorecard Schemas =====
|
||||
|
||||
class ScorecardBase(BaseModel):
|
||||
"""Base schema for supplier scorecards"""
|
||||
scorecard_name: str = Field(max_length=255)
|
||||
period: PerformancePeriod
|
||||
period_start: datetime
|
||||
period_end: datetime
|
||||
overall_score: float = Field(ge=0, le=100)
|
||||
quality_score: float = Field(ge=0, le=100)
|
||||
delivery_score: float = Field(ge=0, le=100)
|
||||
cost_score: float = Field(ge=0, le=100)
|
||||
service_score: float = Field(ge=0, le=100)
|
||||
on_time_delivery_rate: float = Field(ge=0, le=100)
|
||||
quality_rejection_rate: float = Field(ge=0, le=100)
|
||||
order_accuracy_rate: float = Field(ge=0, le=100)
|
||||
response_time_hours: float = Field(ge=0)
|
||||
cost_variance_percentage: float
|
||||
total_orders_processed: int = Field(ge=0, default=0)
|
||||
total_amount_processed: Decimal = Field(ge=0, default=0)
|
||||
average_order_value: Decimal = Field(ge=0, default=0)
|
||||
cost_savings_achieved: Decimal = Field(default=0)
|
||||
|
||||
|
||||
class ScorecardCreate(ScorecardBase):
|
||||
"""Schema for creating scorecards"""
|
||||
supplier_id: UUID
|
||||
strengths: Optional[List[str]] = None
|
||||
improvement_areas: Optional[List[str]] = None
|
||||
recommended_actions: Optional[List[Dict[str, Any]]] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class ScorecardUpdate(BaseModel):
|
||||
"""Schema for updating scorecards"""
|
||||
overall_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
quality_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
delivery_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
cost_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
service_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
strengths: Optional[List[str]] = None
|
||||
improvement_areas: Optional[List[str]] = None
|
||||
recommended_actions: Optional[List[Dict[str, Any]]] = None
|
||||
notes: Optional[str] = None
|
||||
is_final: Optional[bool] = None
|
||||
|
||||
|
||||
class Scorecard(ScorecardBase):
|
||||
"""Complete scorecard schema"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
supplier_id: UUID
|
||||
overall_rank: Optional[int] = None
|
||||
category_rank: Optional[int] = None
|
||||
total_suppliers_evaluated: Optional[int] = None
|
||||
score_trend: Optional[str] = None
|
||||
score_change_percentage: Optional[float] = None
|
||||
strengths: Optional[List[str]] = None
|
||||
improvement_areas: Optional[List[str]] = None
|
||||
recommended_actions: Optional[List[Dict[str, Any]]] = None
|
||||
is_final: bool = False
|
||||
approved_by: Optional[UUID] = None
|
||||
approved_at: Optional[datetime] = None
|
||||
notes: Optional[str] = None
|
||||
attachments: Optional[List[Dict[str, Any]]] = None
|
||||
generated_at: datetime
|
||||
generated_by: UUID
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
|
||||
# ===== Dashboard Schemas =====
|
||||
|
||||
class PerformanceDashboardSummary(BaseModel):
|
||||
"""Performance dashboard summary schema"""
|
||||
total_suppliers: int
|
||||
active_suppliers: int
|
||||
suppliers_above_threshold: int
|
||||
suppliers_below_threshold: int
|
||||
average_overall_score: float
|
||||
average_delivery_rate: float
|
||||
average_quality_rate: float
|
||||
total_active_alerts: int
|
||||
critical_alerts: int
|
||||
high_priority_alerts: int
|
||||
recent_scorecards_generated: int
|
||||
cost_savings_this_month: Decimal
|
||||
|
||||
# Performance trends
|
||||
performance_trend: str # improving, declining, stable
|
||||
delivery_trend: str
|
||||
quality_trend: str
|
||||
|
||||
# Business model insights
|
||||
detected_business_model: str # individual_bakery, central_bakery, hybrid
|
||||
model_confidence: float
|
||||
business_model_metrics: Dict[str, Any]
|
||||
|
||||
|
||||
class SupplierPerformanceInsights(BaseModel):
|
||||
"""Supplier performance insights schema"""
|
||||
supplier_id: UUID
|
||||
supplier_name: str
|
||||
current_overall_score: float
|
||||
previous_score: Optional[float] = None
|
||||
score_change_percentage: Optional[float] = None
|
||||
performance_rank: Optional[int] = None
|
||||
|
||||
# Key performance indicators
|
||||
delivery_performance: float
|
||||
quality_performance: float
|
||||
cost_performance: float
|
||||
service_performance: float
|
||||
|
||||
# Recent metrics
|
||||
orders_last_30_days: int
|
||||
average_delivery_time: float
|
||||
quality_issues_count: int
|
||||
cost_variance: float
|
||||
|
||||
# Alert summary
|
||||
active_alerts: int
|
||||
resolved_alerts_last_30_days: int
|
||||
alert_trend: str
|
||||
|
||||
# Performance categorization
|
||||
performance_category: str # excellent, good, acceptable, needs_improvement, poor
|
||||
risk_level: str # low, medium, high, critical
|
||||
|
||||
# Recommendations
|
||||
top_strengths: List[str]
|
||||
improvement_priorities: List[str]
|
||||
recommended_actions: List[Dict[str, Any]]
|
||||
|
||||
|
||||
class PerformanceAnalytics(BaseModel):
|
||||
"""Advanced performance analytics schema"""
|
||||
period_start: datetime
|
||||
period_end: datetime
|
||||
total_suppliers_analyzed: int
|
||||
|
||||
# Performance distribution
|
||||
performance_distribution: Dict[str, int] # excellent, good, etc.
|
||||
score_ranges: Dict[str, List[float]] # min, max, avg per range
|
||||
|
||||
# Trend analysis
|
||||
overall_trend: Dict[str, float] # month-over-month changes
|
||||
delivery_trends: Dict[str, float]
|
||||
quality_trends: Dict[str, float]
|
||||
cost_trends: Dict[str, float]
|
||||
|
||||
# Comparative analysis
|
||||
top_performers: List[SupplierPerformanceInsights]
|
||||
underperformers: List[SupplierPerformanceInsights]
|
||||
most_improved: List[SupplierPerformanceInsights]
|
||||
biggest_declines: List[SupplierPerformanceInsights]
|
||||
|
||||
# Risk analysis
|
||||
high_risk_suppliers: List[Dict[str, Any]]
|
||||
contract_renewals_due: List[Dict[str, Any]]
|
||||
certification_expiries: List[Dict[str, Any]]
|
||||
|
||||
# Financial impact
|
||||
total_procurement_value: Decimal
|
||||
cost_savings_achieved: Decimal
|
||||
cost_avoidance: Decimal
|
||||
financial_risk_exposure: Decimal
|
||||
|
||||
|
||||
class AlertSummary(BaseModel):
|
||||
"""Alert summary schema"""
|
||||
alert_type: AlertType
|
||||
severity: AlertSeverity
|
||||
count: int
|
||||
avg_resolution_time_hours: Optional[float] = None
|
||||
oldest_alert_age_hours: Optional[float] = None
|
||||
trend_percentage: Optional[float] = None
|
||||
|
||||
|
||||
class DashboardFilter(BaseModel):
|
||||
"""Dashboard filter schema"""
|
||||
supplier_ids: Optional[List[UUID]] = None
|
||||
supplier_categories: Optional[List[str]] = None
|
||||
performance_categories: Optional[List[str]] = None
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
include_inactive: bool = False
|
||||
|
||||
|
||||
class AlertFilter(BaseModel):
|
||||
"""Alert filter schema"""
|
||||
alert_types: Optional[List[AlertType]] = None
|
||||
severities: Optional[List[AlertSeverity]] = None
|
||||
statuses: Optional[List[AlertStatus]] = None
|
||||
supplier_ids: Optional[List[UUID]] = None
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
metric_types: Optional[List[PerformanceMetricType]] = None
|
||||
|
||||
|
||||
# ===== Business Model Detection =====
|
||||
|
||||
class BusinessModelInsights(BaseModel):
|
||||
"""Business model detection and insights schema"""
|
||||
detected_model: str # individual_bakery, central_bakery, hybrid
|
||||
confidence_score: float
|
||||
model_characteristics: Dict[str, Any]
|
||||
|
||||
# Model-specific metrics
|
||||
supplier_diversity_score: float
|
||||
procurement_volume_patterns: Dict[str, Any]
|
||||
delivery_frequency_patterns: Dict[str, Any]
|
||||
order_size_patterns: Dict[str, Any]
|
||||
|
||||
# Recommendations
|
||||
optimization_opportunities: List[Dict[str, Any]]
|
||||
recommended_supplier_mix: Dict[str, Any]
|
||||
cost_optimization_potential: Decimal
|
||||
risk_mitigation_suggestions: List[str]
|
||||
|
||||
# Benchmarking
|
||||
industry_comparison: Dict[str, float]
|
||||
peer_comparison: Optional[Dict[str, float]] = None
|
||||
|
||||
|
||||
# ===== Export and Reporting =====
|
||||
|
||||
class PerformanceReportRequest(BaseModel):
|
||||
"""Performance report generation request"""
|
||||
report_type: str # scorecard, analytics, alerts, comprehensive
|
||||
format: str = Field(pattern="^(pdf|excel|csv|json)$")
|
||||
period: PerformancePeriod
|
||||
date_from: datetime
|
||||
date_to: datetime
|
||||
supplier_ids: Optional[List[UUID]] = None
|
||||
include_charts: bool = True
|
||||
include_recommendations: bool = True
|
||||
include_benchmarks: bool = True
|
||||
custom_metrics: Optional[List[str]] = None
|
||||
|
||||
|
||||
class ExportDataResponse(BaseModel):
|
||||
"""Export data response schema"""
|
||||
export_id: UUID
|
||||
format: str
|
||||
file_url: Optional[str] = None
|
||||
file_size_bytes: Optional[int] = None
|
||||
generated_at: datetime
|
||||
expires_at: datetime
|
||||
status: str # generating, ready, expired, failed
|
||||
error_message: Optional[str] = None
|
||||
732
services/suppliers/app/schemas/suppliers.py
Normal file
732
services/suppliers/app/schemas/suppliers.py
Normal file
@@ -0,0 +1,732 @@
|
||||
# services/suppliers/app/schemas/suppliers.py
|
||||
"""
|
||||
Pydantic schemas for supplier-related API requests and responses
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field, EmailStr
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from app.models.suppliers import (
|
||||
SupplierType, SupplierStatus, PaymentTerms,
|
||||
QualityRating
|
||||
)
|
||||
|
||||
# NOTE: PO, Delivery, and Invoice schemas remain for backward compatibility
|
||||
# The primary implementation has moved to Procurement Service (services/procurement/)
|
||||
# These schemas support legacy endpoints in suppliers service (app/api/purchase_orders.py)
|
||||
#
|
||||
# Migration Status:
|
||||
# - ✅ Procurement Service fully operational with enhanced features
|
||||
# - ⚠️ Supplier service endpoints still active for backward compatibility
|
||||
# - 📋 Deprecation Timeline: Q2 2026 (after 6-month dual-operation period)
|
||||
#
|
||||
# Action Required:
|
||||
# 1. All new integrations should use Procurement Service endpoints
|
||||
# 2. Update client applications to use ProcurementServiceClient
|
||||
# 3. Monitor usage of supplier service PO endpoints via logs
|
||||
# 4. Plan migration of remaining clients by Q1 2026
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# SUPPLIER SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class SupplierCreate(BaseModel):
|
||||
"""Schema for creating suppliers"""
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
supplier_code: Optional[str] = Field(None, max_length=50)
|
||||
tax_id: Optional[str] = Field(None, max_length=50)
|
||||
registration_number: Optional[str] = Field(None, max_length=100)
|
||||
supplier_type: SupplierType
|
||||
contact_person: Optional[str] = Field(None, max_length=200)
|
||||
email: Optional[EmailStr] = None
|
||||
phone: Optional[str] = Field(None, max_length=30)
|
||||
mobile: Optional[str] = Field(None, max_length=30)
|
||||
website: Optional[str] = Field(None, max_length=255)
|
||||
|
||||
# Address
|
||||
address_line1: Optional[str] = Field(None, max_length=255)
|
||||
address_line2: Optional[str] = Field(None, max_length=255)
|
||||
city: Optional[str] = Field(None, max_length=100)
|
||||
state_province: Optional[str] = Field(None, max_length=100)
|
||||
postal_code: Optional[str] = Field(None, max_length=20)
|
||||
country: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Business terms
|
||||
payment_terms: PaymentTerms = PaymentTerms.net_30
|
||||
credit_limit: Optional[Decimal] = Field(None, ge=0)
|
||||
currency: str = Field(default="EUR", max_length=3)
|
||||
standard_lead_time: int = Field(default=3, ge=0, le=365)
|
||||
minimum_order_amount: Optional[Decimal] = Field(None, ge=0)
|
||||
delivery_area: Optional[str] = Field(None, max_length=255)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
certifications: Optional[Union[Dict[str, Any], List[str]]] = None
|
||||
business_hours: Optional[Dict[str, Any]] = None
|
||||
specializations: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class SupplierUpdate(BaseModel):
|
||||
"""Schema for updating suppliers"""
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
supplier_code: Optional[str] = Field(None, max_length=50)
|
||||
tax_id: Optional[str] = Field(None, max_length=50)
|
||||
registration_number: Optional[str] = Field(None, max_length=100)
|
||||
supplier_type: Optional[SupplierType] = None
|
||||
status: Optional[SupplierStatus] = None
|
||||
contact_person: Optional[str] = Field(None, max_length=200)
|
||||
email: Optional[EmailStr] = None
|
||||
phone: Optional[str] = Field(None, max_length=30)
|
||||
mobile: Optional[str] = Field(None, max_length=30)
|
||||
website: Optional[str] = Field(None, max_length=255)
|
||||
|
||||
# Address
|
||||
address_line1: Optional[str] = Field(None, max_length=255)
|
||||
address_line2: Optional[str] = Field(None, max_length=255)
|
||||
city: Optional[str] = Field(None, max_length=100)
|
||||
state_province: Optional[str] = Field(None, max_length=100)
|
||||
postal_code: Optional[str] = Field(None, max_length=20)
|
||||
country: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Business terms
|
||||
payment_terms: Optional[PaymentTerms] = None
|
||||
credit_limit: Optional[Decimal] = Field(None, ge=0)
|
||||
currency: Optional[str] = Field(None, max_length=3)
|
||||
standard_lead_time: Optional[int] = Field(None, ge=0, le=365)
|
||||
minimum_order_amount: Optional[Decimal] = Field(None, ge=0)
|
||||
delivery_area: Optional[str] = Field(None, max_length=255)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
certifications: Optional[Union[Dict[str, Any], List[str]]] = None
|
||||
business_hours: Optional[Dict[str, Any]] = None
|
||||
specializations: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class SupplierApproval(BaseModel):
|
||||
"""Schema for supplier approval/rejection"""
|
||||
action: str = Field(..., pattern="^(approve|reject)$")
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class SupplierResponse(BaseModel):
|
||||
"""Schema for supplier responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
name: str
|
||||
supplier_code: Optional[str] = None
|
||||
tax_id: Optional[str] = None
|
||||
registration_number: Optional[str] = None
|
||||
supplier_type: SupplierType
|
||||
status: SupplierStatus
|
||||
contact_person: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
phone: Optional[str] = None
|
||||
mobile: Optional[str] = None
|
||||
website: Optional[str] = None
|
||||
|
||||
# Address
|
||||
address_line1: Optional[str] = None
|
||||
address_line2: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
state_province: Optional[str] = None
|
||||
postal_code: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
# Business terms
|
||||
payment_terms: PaymentTerms
|
||||
credit_limit: Optional[Decimal] = None
|
||||
currency: str
|
||||
standard_lead_time: int
|
||||
minimum_order_amount: Optional[Decimal] = None
|
||||
delivery_area: Optional[str] = None
|
||||
|
||||
# Performance metrics
|
||||
quality_rating: Optional[float] = None
|
||||
delivery_rating: Optional[float] = None
|
||||
total_orders: int
|
||||
total_amount: Decimal
|
||||
|
||||
# Approval info
|
||||
approved_by: Optional[UUID] = None
|
||||
approved_at: Optional[datetime] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
certifications: Optional[Union[Dict[str, Any], List[str]]] = None
|
||||
business_hours: Optional[Dict[str, Any]] = None
|
||||
specializations: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Audit fields
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: UUID
|
||||
updated_by: UUID
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class SupplierSummary(BaseModel):
|
||||
"""Schema for supplier summary (list view)"""
|
||||
id: UUID
|
||||
name: str
|
||||
supplier_code: Optional[str] = None
|
||||
supplier_type: SupplierType
|
||||
status: SupplierStatus
|
||||
contact_person: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
phone: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
# Business terms - Added for list view
|
||||
payment_terms: PaymentTerms
|
||||
standard_lead_time: int
|
||||
minimum_order_amount: Optional[Decimal] = None
|
||||
|
||||
# Performance metrics
|
||||
quality_rating: Optional[float] = None
|
||||
delivery_rating: Optional[float] = None
|
||||
total_orders: int
|
||||
total_amount: Decimal
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class SupplierDeletionSummary(BaseModel):
|
||||
"""Schema for supplier deletion summary"""
|
||||
supplier_name: str
|
||||
deleted_price_lists: int = 0
|
||||
deleted_quality_reviews: int = 0
|
||||
deleted_performance_metrics: int = 0
|
||||
deleted_alerts: int = 0
|
||||
deleted_scorecards: int = 0
|
||||
deletion_timestamp: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# PURCHASE ORDER SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class PurchaseOrderItemCreate(BaseModel):
|
||||
"""Schema for creating purchase order items"""
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = Field(None, max_length=100)
|
||||
ordered_quantity: int = Field(..., gt=0)
|
||||
unit_of_measure: str = Field(..., max_length=20)
|
||||
unit_price: Decimal = Field(..., gt=0)
|
||||
quality_requirements: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderItemUpdate(BaseModel):
|
||||
"""Schema for updating purchase order items"""
|
||||
ordered_quantity: Optional[int] = Field(None, gt=0)
|
||||
unit_price: Optional[Decimal] = Field(None, gt=0)
|
||||
quality_requirements: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderItemResponse(BaseModel):
|
||||
"""Schema for purchase order item responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
purchase_order_id: UUID
|
||||
price_list_item_id: Optional[UUID] = None
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = None
|
||||
ordered_quantity: int
|
||||
unit_of_measure: str
|
||||
unit_price: Decimal
|
||||
line_total: Decimal
|
||||
received_quantity: int
|
||||
remaining_quantity: int
|
||||
quality_requirements: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class PurchaseOrderCreate(BaseModel):
|
||||
"""Schema for creating purchase orders"""
|
||||
supplier_id: UUID
|
||||
reference_number: Optional[str] = Field(None, max_length=100)
|
||||
priority: str = Field(default="normal", max_length=20)
|
||||
required_delivery_date: Optional[datetime] = None
|
||||
|
||||
# Delivery information
|
||||
delivery_address: Optional[str] = None
|
||||
delivery_instructions: Optional[str] = None
|
||||
delivery_contact: Optional[str] = Field(None, max_length=200)
|
||||
delivery_phone: Optional[str] = Field(None, max_length=30)
|
||||
|
||||
# Financial information
|
||||
tax_amount: Decimal = Field(default=0, ge=0)
|
||||
shipping_cost: Decimal = Field(default=0, ge=0)
|
||||
discount_amount: Decimal = Field(default=0, ge=0)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
internal_notes: Optional[str] = None
|
||||
terms_and_conditions: Optional[str] = None
|
||||
|
||||
# Items
|
||||
items: List[PurchaseOrderItemCreate] = Field(..., min_items=1)
|
||||
|
||||
|
||||
class PurchaseOrderUpdate(BaseModel):
|
||||
"""Schema for updating purchase orders"""
|
||||
reference_number: Optional[str] = Field(None, max_length=100)
|
||||
priority: Optional[str] = Field(None, max_length=20)
|
||||
required_delivery_date: Optional[datetime] = None
|
||||
estimated_delivery_date: Optional[datetime] = None
|
||||
|
||||
# Delivery information
|
||||
delivery_address: Optional[str] = None
|
||||
delivery_instructions: Optional[str] = None
|
||||
delivery_contact: Optional[str] = Field(None, max_length=200)
|
||||
delivery_phone: Optional[str] = Field(None, max_length=30)
|
||||
|
||||
# Financial information
|
||||
tax_amount: Optional[Decimal] = Field(None, ge=0)
|
||||
shipping_cost: Optional[Decimal] = Field(None, ge=0)
|
||||
discount_amount: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
internal_notes: Optional[str] = None
|
||||
terms_and_conditions: Optional[str] = None
|
||||
|
||||
# Supplier communication
|
||||
supplier_reference: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
|
||||
class PurchaseOrderStatusUpdate(BaseModel):
|
||||
"""Schema for updating purchase order status"""
|
||||
status: str # PurchaseOrderStatus - moved to Procurement Service
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderApproval(BaseModel):
|
||||
"""Schema for purchase order approval/rejection"""
|
||||
action: str = Field(..., pattern="^(approve|reject)$")
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class PurchaseOrderResponse(BaseModel):
|
||||
"""Schema for purchase order responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
supplier_id: UUID
|
||||
po_number: str
|
||||
reference_number: Optional[str] = None
|
||||
status: str # PurchaseOrderStatus
|
||||
priority: str
|
||||
order_date: datetime
|
||||
required_delivery_date: Optional[datetime] = None
|
||||
estimated_delivery_date: Optional[datetime] = None
|
||||
|
||||
# Financial information
|
||||
subtotal: Decimal
|
||||
tax_amount: Decimal
|
||||
shipping_cost: Decimal
|
||||
discount_amount: Decimal
|
||||
total_amount: Decimal
|
||||
currency: str
|
||||
|
||||
# Delivery information
|
||||
delivery_address: Optional[str] = None
|
||||
delivery_instructions: Optional[str] = None
|
||||
delivery_contact: Optional[str] = None
|
||||
delivery_phone: Optional[str] = None
|
||||
|
||||
# Approval workflow
|
||||
requires_approval: bool
|
||||
approved_by: Optional[UUID] = None
|
||||
approved_at: Optional[datetime] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
|
||||
# Communication tracking
|
||||
sent_to_supplier_at: Optional[datetime] = None
|
||||
supplier_confirmation_date: Optional[datetime] = None
|
||||
supplier_reference: Optional[str] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
internal_notes: Optional[str] = None
|
||||
terms_and_conditions: Optional[str] = None
|
||||
|
||||
# Audit fields
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: UUID
|
||||
updated_by: UUID
|
||||
|
||||
# Related data (populated separately)
|
||||
supplier: Optional[SupplierSummary] = None
|
||||
items: Optional[List[PurchaseOrderItemResponse]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class PurchaseOrderSummary(BaseModel):
|
||||
"""Schema for purchase order summary (list view)"""
|
||||
id: UUID
|
||||
po_number: str
|
||||
supplier_id: UUID
|
||||
supplier_name: Optional[str] = None
|
||||
status: str # PurchaseOrderStatus
|
||||
priority: str
|
||||
order_date: datetime
|
||||
required_delivery_date: Optional[datetime] = None
|
||||
total_amount: Decimal
|
||||
currency: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DELIVERY SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class DeliveryItemCreate(BaseModel):
|
||||
"""Schema for creating delivery items"""
|
||||
purchase_order_item_id: UUID
|
||||
inventory_product_id: UUID
|
||||
ordered_quantity: int = Field(..., gt=0)
|
||||
delivered_quantity: int = Field(..., ge=0)
|
||||
accepted_quantity: int = Field(..., ge=0)
|
||||
rejected_quantity: int = Field(default=0, ge=0)
|
||||
|
||||
# Quality information
|
||||
batch_lot_number: Optional[str] = Field(None, max_length=100)
|
||||
expiry_date: Optional[datetime] = None
|
||||
quality_grade: Optional[str] = Field(None, max_length=20)
|
||||
|
||||
# Issues and notes
|
||||
quality_issues: Optional[str] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
|
||||
|
||||
class DeliveryItemResponse(BaseModel):
|
||||
"""Schema for delivery item responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
delivery_id: UUID
|
||||
purchase_order_item_id: UUID
|
||||
inventory_product_id: UUID
|
||||
ordered_quantity: int
|
||||
delivered_quantity: int
|
||||
accepted_quantity: int
|
||||
rejected_quantity: int
|
||||
batch_lot_number: Optional[str] = None
|
||||
expiry_date: Optional[datetime] = None
|
||||
quality_grade: Optional[str] = None
|
||||
quality_issues: Optional[str] = None
|
||||
rejection_reason: Optional[str] = None
|
||||
item_notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class DeliveryCreate(BaseModel):
|
||||
"""Schema for creating deliveries"""
|
||||
purchase_order_id: UUID
|
||||
supplier_id: UUID
|
||||
supplier_delivery_note: Optional[str] = Field(None, max_length=100)
|
||||
scheduled_date: Optional[datetime] = None
|
||||
estimated_arrival: Optional[datetime] = None
|
||||
|
||||
# Delivery details
|
||||
delivery_address: Optional[str] = None
|
||||
delivery_contact: Optional[str] = Field(None, max_length=200)
|
||||
delivery_phone: Optional[str] = Field(None, max_length=30)
|
||||
carrier_name: Optional[str] = Field(None, max_length=200)
|
||||
tracking_number: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
# Items
|
||||
items: List[DeliveryItemCreate] = Field(..., min_items=1)
|
||||
|
||||
|
||||
class DeliveryUpdate(BaseModel):
|
||||
"""Schema for updating deliveries"""
|
||||
supplier_delivery_note: Optional[str] = Field(None, max_length=100)
|
||||
scheduled_date: Optional[datetime] = None
|
||||
estimated_arrival: Optional[datetime] = None
|
||||
actual_arrival: Optional[datetime] = None
|
||||
|
||||
# Delivery details
|
||||
delivery_address: Optional[str] = None
|
||||
delivery_contact: Optional[str] = Field(None, max_length=200)
|
||||
delivery_phone: Optional[str] = Field(None, max_length=30)
|
||||
carrier_name: Optional[str] = Field(None, max_length=200)
|
||||
tracking_number: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Quality inspection
|
||||
inspection_passed: Optional[bool] = None
|
||||
inspection_notes: Optional[str] = None
|
||||
quality_issues: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class DeliveryStatusUpdate(BaseModel):
|
||||
"""Schema for updating delivery status"""
|
||||
status: str # DeliveryStatus
|
||||
notes: Optional[str] = None
|
||||
update_timestamps: bool = Field(default=True)
|
||||
|
||||
|
||||
class DeliveryReceiptConfirmation(BaseModel):
|
||||
"""Schema for confirming delivery receipt"""
|
||||
inspection_passed: bool = True
|
||||
inspection_notes: Optional[str] = None
|
||||
quality_issues: Optional[Dict[str, Any]] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class DeliveryResponse(BaseModel):
|
||||
"""Schema for delivery responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
purchase_order_id: UUID
|
||||
supplier_id: UUID
|
||||
delivery_number: str
|
||||
supplier_delivery_note: Optional[str] = None
|
||||
status: str # DeliveryStatus
|
||||
|
||||
# Timing
|
||||
scheduled_date: Optional[datetime] = None
|
||||
estimated_arrival: Optional[datetime] = None
|
||||
actual_arrival: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
# Delivery details
|
||||
delivery_address: Optional[str] = None
|
||||
delivery_contact: Optional[str] = None
|
||||
delivery_phone: Optional[str] = None
|
||||
carrier_name: Optional[str] = None
|
||||
tracking_number: Optional[str] = None
|
||||
|
||||
# Quality inspection
|
||||
inspection_passed: Optional[bool] = None
|
||||
inspection_notes: Optional[str] = None
|
||||
quality_issues: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Receipt information
|
||||
received_by: Optional[UUID] = None
|
||||
received_at: Optional[datetime] = None
|
||||
|
||||
# Additional information
|
||||
notes: Optional[str] = None
|
||||
photos: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Audit fields
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: UUID
|
||||
|
||||
# Related data
|
||||
supplier: Optional[SupplierSummary] = None
|
||||
purchase_order: Optional[PurchaseOrderSummary] = None
|
||||
items: Optional[List[DeliveryItemResponse]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class DeliverySummary(BaseModel):
|
||||
"""Schema for delivery summary (list view)"""
|
||||
id: UUID
|
||||
delivery_number: str
|
||||
supplier_id: UUID
|
||||
supplier_name: Optional[str] = None
|
||||
purchase_order_id: UUID
|
||||
po_number: Optional[str] = None
|
||||
status: str # DeliveryStatus
|
||||
scheduled_date: Optional[datetime] = None
|
||||
actual_arrival: Optional[datetime] = None
|
||||
inspection_passed: Optional[bool] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# SEARCH AND FILTER SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class SupplierSearchParams(BaseModel):
|
||||
"""Search parameters for suppliers"""
|
||||
search_term: Optional[str] = Field(None, max_length=100)
|
||||
supplier_type: Optional[SupplierType] = None
|
||||
status: Optional[SupplierStatus] = None
|
||||
limit: int = Field(default=50, ge=1, le=1000)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
class PurchaseOrderSearchParams(BaseModel):
|
||||
"""Search parameters for purchase orders"""
|
||||
supplier_id: Optional[UUID] = None
|
||||
status: Optional[str] = None # PurchaseOrderStatus
|
||||
priority: Optional[str] = None
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
search_term: Optional[str] = Field(None, max_length=100)
|
||||
limit: int = Field(default=50, ge=1, le=1000)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
class DeliverySearchParams(BaseModel):
|
||||
"""Search parameters for deliveries"""
|
||||
supplier_id: Optional[UUID] = None
|
||||
status: Optional[str] = None # DeliveryStatus
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
search_term: Optional[str] = Field(None, max_length=100)
|
||||
limit: int = Field(default=50, ge=1, le=1000)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# SUPPLIER PRICE LIST SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class SupplierPriceListCreate(BaseModel):
|
||||
"""Schema for creating supplier price list items"""
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = Field(None, max_length=100)
|
||||
unit_price: Decimal = Field(..., gt=0)
|
||||
unit_of_measure: str = Field(..., max_length=20)
|
||||
minimum_order_quantity: Optional[int] = Field(None, ge=1)
|
||||
price_per_unit: Decimal = Field(..., gt=0)
|
||||
tier_pricing: Optional[Dict[str, Any]] = None # [{quantity: 100, price: 2.50}, ...]
|
||||
effective_date: Optional[datetime] = Field(default_factory=lambda: datetime.now())
|
||||
expiry_date: Optional[datetime] = None
|
||||
is_active: bool = True
|
||||
brand: Optional[str] = Field(None, max_length=100)
|
||||
packaging_size: Optional[str] = Field(None, max_length=50)
|
||||
origin_country: Optional[str] = Field(None, max_length=100)
|
||||
shelf_life_days: Optional[int] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
quality_specs: Optional[Dict[str, Any]] = None
|
||||
allergens: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class SupplierPriceListUpdate(BaseModel):
|
||||
"""Schema for updating supplier price list items"""
|
||||
unit_price: Optional[Decimal] = Field(None, gt=0)
|
||||
unit_of_measure: Optional[str] = Field(None, max_length=20)
|
||||
minimum_order_quantity: Optional[int] = Field(None, ge=1)
|
||||
tier_pricing: Optional[Dict[str, Any]] = None
|
||||
effective_date: Optional[datetime] = None
|
||||
expiry_date: Optional[datetime] = None
|
||||
is_active: Optional[bool] = None
|
||||
brand: Optional[str] = Field(None, max_length=100)
|
||||
packaging_size: Optional[str] = Field(None, max_length=50)
|
||||
origin_country: Optional[str] = Field(None, max_length=100)
|
||||
shelf_life_days: Optional[int] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
quality_specs: Optional[Dict[str, Any]] = None
|
||||
allergens: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class SupplierPriceListResponse(BaseModel):
|
||||
"""Schema for supplier price list responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
supplier_id: UUID
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = None
|
||||
unit_price: Decimal
|
||||
unit_of_measure: str
|
||||
minimum_order_quantity: Optional[int] = None
|
||||
price_per_unit: Decimal
|
||||
tier_pricing: Optional[Dict[str, Any]] = None
|
||||
effective_date: datetime
|
||||
expiry_date: Optional[datetime] = None
|
||||
is_active: bool
|
||||
brand: Optional[str] = None
|
||||
packaging_size: Optional[str] = None
|
||||
origin_country: Optional[str] = None
|
||||
shelf_life_days: Optional[int] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
quality_specs: Optional[Dict[str, Any]] = None
|
||||
allergens: Optional[Dict[str, Any]] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: UUID
|
||||
updated_by: UUID
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# STATISTICS AND REPORTING SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class SupplierStatistics(BaseModel):
|
||||
"""Schema for supplier statistics"""
|
||||
total_suppliers: int
|
||||
active_suppliers: int
|
||||
pending_suppliers: int
|
||||
avg_quality_rating: float
|
||||
avg_delivery_rating: float
|
||||
total_spend: float
|
||||
|
||||
|
||||
class PurchaseOrderStatistics(BaseModel):
|
||||
"""Schema for purchase order statistics"""
|
||||
total_orders: int
|
||||
status_counts: Dict[str, int]
|
||||
this_month_orders: int
|
||||
this_month_spend: float
|
||||
avg_order_value: float
|
||||
overdue_count: int
|
||||
pending_approval: int
|
||||
|
||||
|
||||
class DeliveryPerformanceStats(BaseModel):
|
||||
"""Schema for delivery performance statistics"""
|
||||
total_deliveries: int
|
||||
on_time_deliveries: int
|
||||
late_deliveries: int
|
||||
failed_deliveries: int
|
||||
on_time_percentage: float
|
||||
avg_delay_hours: float
|
||||
quality_pass_rate: float
|
||||
|
||||
|
||||
class DeliverySummaryStats(BaseModel):
|
||||
"""Schema for delivery summary statistics"""
|
||||
todays_deliveries: int
|
||||
this_week_deliveries: int
|
||||
overdue_deliveries: int
|
||||
in_transit_deliveries: int
|
||||
18
services/suppliers/app/services/__init__.py
Normal file
18
services/suppliers/app/services/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# services/suppliers/app/services/__init__.py
|
||||
"""
|
||||
Services package for the Supplier service
|
||||
"""
|
||||
|
||||
from .supplier_service import SupplierService
|
||||
# REMOVED: PurchaseOrderService, DeliveryService - moved to Procurement Service
|
||||
# from .purchase_order_service import PurchaseOrderService
|
||||
# from .delivery_service import DeliveryService
|
||||
from .performance_service import PerformanceTrackingService, AlertService
|
||||
from .dashboard_service import DashboardService
|
||||
|
||||
__all__ = [
|
||||
'SupplierService',
|
||||
'PerformanceTrackingService',
|
||||
'AlertService',
|
||||
'DashboardService'
|
||||
]
|
||||
721
services/suppliers/app/services/dashboard_service.py
Normal file
721
services/suppliers/app/services/dashboard_service.py
Normal file
@@ -0,0 +1,721 @@
|
||||
# ================================================================
|
||||
# services/suppliers/app/services/dashboard_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Supplier Dashboard and Analytics Service
|
||||
Comprehensive supplier performance dashboards and business intelligence
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc, text
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from app.models.suppliers import (
|
||||
Supplier, PurchaseOrder, Delivery, SupplierQualityReview,
|
||||
SupplierStatus, SupplierType, PurchaseOrderStatus, DeliveryStatus
|
||||
)
|
||||
from app.models.performance import (
|
||||
SupplierPerformanceMetric, SupplierScorecard, SupplierAlert,
|
||||
PerformanceMetricType, PerformancePeriod, AlertSeverity, AlertStatus
|
||||
)
|
||||
from app.schemas.performance import (
|
||||
PerformanceDashboardSummary, SupplierPerformanceInsights,
|
||||
PerformanceAnalytics, BusinessModelInsights, AlertSummary
|
||||
)
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DashboardService:
|
||||
"""Service for supplier performance dashboards and analytics"""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = logger.bind(service="dashboard_service")
|
||||
|
||||
async def get_performance_dashboard_summary(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
date_from: Optional[datetime] = None,
|
||||
date_to: Optional[datetime] = None
|
||||
) -> PerformanceDashboardSummary:
|
||||
"""Get comprehensive performance dashboard summary"""
|
||||
try:
|
||||
# Default date range - last 30 days
|
||||
if not date_to:
|
||||
date_to = datetime.now(timezone.utc)
|
||||
if not date_from:
|
||||
date_from = date_to - timedelta(days=30)
|
||||
|
||||
self.logger.info("Generating dashboard summary",
|
||||
tenant_id=str(tenant_id),
|
||||
date_from=date_from.isoformat(),
|
||||
date_to=date_to.isoformat())
|
||||
|
||||
# Get supplier statistics
|
||||
supplier_stats = await self._get_supplier_statistics(db, tenant_id)
|
||||
|
||||
# Get performance statistics
|
||||
performance_stats = await self._get_performance_statistics(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get alert statistics
|
||||
alert_stats = await self._get_alert_statistics(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get financial statistics
|
||||
financial_stats = await self._get_financial_statistics(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get business model insights
|
||||
business_model = await self._detect_business_model(db, tenant_id)
|
||||
|
||||
# Calculate trends
|
||||
trends = await self._calculate_performance_trends(db, tenant_id, date_from, date_to)
|
||||
|
||||
return PerformanceDashboardSummary(
|
||||
total_suppliers=supplier_stats['total_suppliers'],
|
||||
active_suppliers=supplier_stats['active_suppliers'],
|
||||
suppliers_above_threshold=performance_stats['above_threshold'],
|
||||
suppliers_below_threshold=performance_stats['below_threshold'],
|
||||
average_overall_score=performance_stats['avg_overall_score'],
|
||||
average_delivery_rate=performance_stats['avg_delivery_rate'],
|
||||
average_quality_rate=performance_stats['avg_quality_rate'],
|
||||
total_active_alerts=alert_stats['total_active'],
|
||||
critical_alerts=alert_stats['critical_alerts'],
|
||||
high_priority_alerts=alert_stats['high_priority'],
|
||||
recent_scorecards_generated=performance_stats['recent_scorecards'],
|
||||
cost_savings_this_month=financial_stats['cost_savings'],
|
||||
performance_trend=trends['performance_trend'],
|
||||
delivery_trend=trends['delivery_trend'],
|
||||
quality_trend=trends['quality_trend'],
|
||||
detected_business_model=business_model['model'],
|
||||
model_confidence=business_model['confidence'],
|
||||
business_model_metrics=business_model['metrics']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error generating dashboard summary", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_supplier_performance_insights(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
supplier_id: UUID,
|
||||
days_back: int = 30
|
||||
) -> SupplierPerformanceInsights:
|
||||
"""Get detailed performance insights for a specific supplier"""
|
||||
try:
|
||||
date_to = datetime.now(timezone.utc)
|
||||
date_from = date_to - timedelta(days=days_back)
|
||||
|
||||
# Get supplier info
|
||||
supplier = await self._get_supplier_info(db, supplier_id, tenant_id)
|
||||
|
||||
# Get current performance metrics
|
||||
current_metrics = await self._get_current_performance_metrics(db, supplier_id, tenant_id)
|
||||
|
||||
# Get previous period metrics for comparison
|
||||
previous_metrics = await self._get_previous_performance_metrics(db, supplier_id, tenant_id, days_back)
|
||||
|
||||
# Get recent activity statistics
|
||||
activity_stats = await self._get_supplier_activity_stats(db, supplier_id, tenant_id, date_from, date_to)
|
||||
|
||||
# Get alert summary
|
||||
alert_summary = await self._get_supplier_alert_summary(db, supplier_id, tenant_id, date_from, date_to)
|
||||
|
||||
# Calculate performance categorization
|
||||
performance_category = self._categorize_performance(current_metrics.get('overall_score', 0))
|
||||
risk_level = self._assess_risk_level(current_metrics, alert_summary)
|
||||
|
||||
# Generate recommendations
|
||||
recommendations = await self._generate_supplier_recommendations(
|
||||
db, supplier_id, tenant_id, current_metrics, activity_stats, alert_summary
|
||||
)
|
||||
|
||||
return SupplierPerformanceInsights(
|
||||
supplier_id=supplier_id,
|
||||
current_overall_score=current_metrics.get('overall_score', 0),
|
||||
previous_score=previous_metrics.get('overall_score'),
|
||||
score_change_percentage=self._calculate_change_percentage(
|
||||
current_metrics.get('overall_score', 0),
|
||||
previous_metrics.get('overall_score')
|
||||
),
|
||||
performance_rank=current_metrics.get('rank'),
|
||||
delivery_performance=current_metrics.get('delivery_performance', 0),
|
||||
quality_performance=current_metrics.get('quality_performance', 0),
|
||||
cost_performance=current_metrics.get('cost_performance', 0),
|
||||
service_performance=current_metrics.get('service_performance', 0),
|
||||
orders_last_30_days=activity_stats['orders_count'],
|
||||
average_delivery_time=activity_stats['avg_delivery_time'],
|
||||
quality_issues_count=activity_stats['quality_issues'],
|
||||
cost_variance=activity_stats['cost_variance'],
|
||||
active_alerts=alert_summary['active_count'],
|
||||
resolved_alerts_last_30_days=alert_summary['resolved_count'],
|
||||
alert_trend=alert_summary['trend'],
|
||||
performance_category=performance_category,
|
||||
risk_level=risk_level,
|
||||
top_strengths=recommendations['strengths'],
|
||||
improvement_priorities=recommendations['improvements'],
|
||||
recommended_actions=recommendations['actions']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error generating supplier insights",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_performance_analytics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
period_days: int = 90
|
||||
) -> PerformanceAnalytics:
|
||||
"""Get advanced performance analytics"""
|
||||
try:
|
||||
date_to = datetime.now(timezone.utc)
|
||||
date_from = date_to - timedelta(days=period_days)
|
||||
|
||||
# Get performance distribution
|
||||
performance_distribution = await self._get_performance_distribution(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get trend analysis
|
||||
trends = await self._get_detailed_trends(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get comparative analysis
|
||||
comparative_analysis = await self._get_comparative_analysis(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get risk analysis
|
||||
risk_analysis = await self._get_risk_analysis(db, tenant_id, date_from, date_to)
|
||||
|
||||
# Get financial impact
|
||||
financial_impact = await self._get_financial_impact(db, tenant_id, date_from, date_to)
|
||||
|
||||
return PerformanceAnalytics(
|
||||
period_start=date_from,
|
||||
period_end=date_to,
|
||||
total_suppliers_analyzed=performance_distribution['total_suppliers'],
|
||||
performance_distribution=performance_distribution['distribution'],
|
||||
score_ranges=performance_distribution['score_ranges'],
|
||||
overall_trend=trends['overall'],
|
||||
delivery_trends=trends['delivery'],
|
||||
quality_trends=trends['quality'],
|
||||
cost_trends=trends['cost'],
|
||||
top_performers=comparative_analysis['top_performers'],
|
||||
underperformers=comparative_analysis['underperformers'],
|
||||
most_improved=comparative_analysis['most_improved'],
|
||||
biggest_declines=comparative_analysis['biggest_declines'],
|
||||
high_risk_suppliers=risk_analysis['high_risk'],
|
||||
contract_renewals_due=risk_analysis['contract_renewals'],
|
||||
certification_expiries=risk_analysis['certification_expiries'],
|
||||
total_procurement_value=financial_impact['total_value'],
|
||||
cost_savings_achieved=financial_impact['cost_savings'],
|
||||
cost_avoidance=financial_impact['cost_avoidance'],
|
||||
financial_risk_exposure=financial_impact['risk_exposure']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error generating performance analytics", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_business_model_insights(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID
|
||||
) -> BusinessModelInsights:
|
||||
"""Get business model detection and insights"""
|
||||
try:
|
||||
# Analyze supplier patterns
|
||||
supplier_patterns = await self._analyze_supplier_patterns(db, tenant_id)
|
||||
|
||||
# Detect business model
|
||||
business_model = await self._detect_business_model_detailed(db, tenant_id)
|
||||
|
||||
# Generate optimization recommendations
|
||||
optimization = await self._generate_optimization_recommendations(db, tenant_id, business_model)
|
||||
|
||||
# Get benchmarking data
|
||||
benchmarking = await self._get_benchmarking_data(db, tenant_id, business_model['model'])
|
||||
|
||||
return BusinessModelInsights(
|
||||
detected_model=business_model['model'],
|
||||
confidence_score=business_model['confidence'],
|
||||
model_characteristics=business_model['characteristics'],
|
||||
supplier_diversity_score=supplier_patterns['diversity_score'],
|
||||
procurement_volume_patterns=supplier_patterns['volume_patterns'],
|
||||
delivery_frequency_patterns=supplier_patterns['delivery_patterns'],
|
||||
order_size_patterns=supplier_patterns['order_size_patterns'],
|
||||
optimization_opportunities=optimization['opportunities'],
|
||||
recommended_supplier_mix=optimization['supplier_mix'],
|
||||
cost_optimization_potential=optimization['cost_potential'],
|
||||
risk_mitigation_suggestions=optimization['risk_mitigation'],
|
||||
industry_comparison=benchmarking['industry'],
|
||||
peer_comparison=benchmarking.get('peer')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error generating business model insights", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_alert_summary(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
date_from: Optional[datetime] = None,
|
||||
date_to: Optional[datetime] = None
|
||||
) -> List[AlertSummary]:
|
||||
"""Get alert summary by type and severity"""
|
||||
try:
|
||||
if not date_to:
|
||||
date_to = datetime.now(timezone.utc)
|
||||
if not date_from:
|
||||
date_from = date_to - timedelta(days=30)
|
||||
|
||||
query = select(
|
||||
SupplierAlert.alert_type,
|
||||
SupplierAlert.severity,
|
||||
func.count(SupplierAlert.id).label('count'),
|
||||
func.avg(
|
||||
func.extract('epoch', SupplierAlert.resolved_at - SupplierAlert.triggered_at) / 3600
|
||||
).label('avg_resolution_hours'),
|
||||
func.max(
|
||||
func.extract('epoch', func.current_timestamp() - SupplierAlert.triggered_at) / 3600
|
||||
).label('oldest_age_hours')
|
||||
).where(
|
||||
and_(
|
||||
SupplierAlert.tenant_id == tenant_id,
|
||||
SupplierAlert.triggered_at >= date_from,
|
||||
SupplierAlert.triggered_at <= date_to
|
||||
)
|
||||
).group_by(SupplierAlert.alert_type, SupplierAlert.severity)
|
||||
|
||||
result = await db.execute(query)
|
||||
rows = result.all()
|
||||
|
||||
alert_summaries = []
|
||||
for row in rows:
|
||||
alert_summaries.append(AlertSummary(
|
||||
alert_type=row.alert_type,
|
||||
severity=row.severity,
|
||||
count=row.count,
|
||||
avg_resolution_time_hours=row.avg_resolution_hours,
|
||||
oldest_alert_age_hours=row.oldest_age_hours
|
||||
))
|
||||
|
||||
return alert_summaries
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error getting alert summary", error=str(e))
|
||||
raise
|
||||
|
||||
# === Private Helper Methods ===
|
||||
|
||||
async def _get_supplier_statistics(self, db: AsyncSession, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get basic supplier statistics"""
|
||||
query = select(
|
||||
func.count(Supplier.id).label('total_suppliers'),
|
||||
func.count(Supplier.id.filter(Supplier.status == SupplierStatus.ACTIVE)).label('active_suppliers')
|
||||
).where(Supplier.tenant_id == tenant_id)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
return {
|
||||
'total_suppliers': row.total_suppliers or 0,
|
||||
'active_suppliers': row.active_suppliers or 0
|
||||
}
|
||||
|
||||
async def _get_performance_statistics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
date_from: datetime,
|
||||
date_to: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get performance statistics"""
|
||||
# Get recent performance metrics
|
||||
query = select(
|
||||
func.avg(SupplierPerformanceMetric.metric_value).label('avg_score'),
|
||||
func.count(
|
||||
SupplierPerformanceMetric.id.filter(
|
||||
SupplierPerformanceMetric.metric_value >= settings.GOOD_DELIVERY_RATE
|
||||
)
|
||||
).label('above_threshold'),
|
||||
func.count(
|
||||
SupplierPerformanceMetric.id.filter(
|
||||
SupplierPerformanceMetric.metric_value < settings.GOOD_DELIVERY_RATE
|
||||
)
|
||||
).label('below_threshold')
|
||||
).where(
|
||||
and_(
|
||||
SupplierPerformanceMetric.tenant_id == tenant_id,
|
||||
SupplierPerformanceMetric.calculated_at >= date_from,
|
||||
SupplierPerformanceMetric.calculated_at <= date_to,
|
||||
SupplierPerformanceMetric.metric_type == PerformanceMetricType.DELIVERY_PERFORMANCE
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
# Get quality statistics
|
||||
quality_query = select(
|
||||
func.avg(SupplierPerformanceMetric.metric_value).label('avg_quality')
|
||||
).where(
|
||||
and_(
|
||||
SupplierPerformanceMetric.tenant_id == tenant_id,
|
||||
SupplierPerformanceMetric.calculated_at >= date_from,
|
||||
SupplierPerformanceMetric.calculated_at <= date_to,
|
||||
SupplierPerformanceMetric.metric_type == PerformanceMetricType.QUALITY_SCORE
|
||||
)
|
||||
)
|
||||
|
||||
quality_result = await db.execute(quality_query)
|
||||
quality_row = quality_result.first()
|
||||
|
||||
# Get scorecard count
|
||||
scorecard_query = select(func.count(SupplierScorecard.id)).where(
|
||||
and_(
|
||||
SupplierScorecard.tenant_id == tenant_id,
|
||||
SupplierScorecard.generated_at >= date_from,
|
||||
SupplierScorecard.generated_at <= date_to
|
||||
)
|
||||
)
|
||||
|
||||
scorecard_result = await db.execute(scorecard_query)
|
||||
scorecard_count = scorecard_result.scalar() or 0
|
||||
|
||||
return {
|
||||
'avg_overall_score': row.avg_score or 0,
|
||||
'above_threshold': row.above_threshold or 0,
|
||||
'below_threshold': row.below_threshold or 0,
|
||||
'avg_delivery_rate': row.avg_score or 0,
|
||||
'avg_quality_rate': quality_row.avg_quality or 0,
|
||||
'recent_scorecards': scorecard_count
|
||||
}
|
||||
|
||||
async def _get_alert_statistics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
date_from: datetime,
|
||||
date_to: datetime
|
||||
) -> Dict[str, int]:
|
||||
"""Get alert statistics"""
|
||||
query = select(
|
||||
func.count(SupplierAlert.id.filter(SupplierAlert.status == AlertStatus.ACTIVE)).label('total_active'),
|
||||
func.count(SupplierAlert.id.filter(SupplierAlert.severity == AlertSeverity.CRITICAL)).label('critical'),
|
||||
func.count(SupplierAlert.id.filter(SupplierAlert.priority_score >= 70)).label('high_priority')
|
||||
).where(
|
||||
and_(
|
||||
SupplierAlert.tenant_id == tenant_id,
|
||||
SupplierAlert.triggered_at >= date_from,
|
||||
SupplierAlert.triggered_at <= date_to
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
return {
|
||||
'total_active': row.total_active or 0,
|
||||
'critical_alerts': row.critical or 0,
|
||||
'high_priority': row.high_priority or 0
|
||||
}
|
||||
|
||||
async def _get_financial_statistics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
date_from: datetime,
|
||||
date_to: datetime
|
||||
) -> Dict[str, Decimal]:
|
||||
"""Get financial statistics"""
|
||||
# Calculate potential cost savings based on supplier performance
|
||||
# Cost savings estimated from quality issues avoided, on-time deliveries, etc.
|
||||
|
||||
# Get purchase orders in period
|
||||
query = select(
|
||||
func.sum(PurchaseOrder.total_amount).label('total_spent')
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.created_at >= date_from,
|
||||
PurchaseOrder.created_at <= date_to,
|
||||
PurchaseOrder.status.in_([
|
||||
PurchaseOrderStatus.RECEIVED,
|
||||
PurchaseOrderStatus.PARTIALLY_RECEIVED,
|
||||
PurchaseOrderStatus.COMPLETED
|
||||
])
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
total_spent = row.total_spent or Decimal('0')
|
||||
|
||||
# Estimate cost savings as 2-5% of total spent based on:
|
||||
# - Better supplier selection
|
||||
# - Reduced waste from quality issues
|
||||
# - Better pricing through supplier comparison
|
||||
estimated_savings_percentage = Decimal('0.03') # 3% conservative estimate
|
||||
cost_savings = total_spent * estimated_savings_percentage
|
||||
|
||||
return {
|
||||
'cost_savings': cost_savings
|
||||
}
|
||||
|
||||
async def _detect_business_model(self, db: AsyncSession, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Detect business model based on supplier patterns"""
|
||||
# Get supplier count by category
|
||||
query = select(
|
||||
func.count(Supplier.id).label('total_suppliers'),
|
||||
func.count(Supplier.id.filter(Supplier.supplier_type == SupplierType.INGREDIENTS)).label('ingredient_suppliers')
|
||||
).where(
|
||||
and_(
|
||||
Supplier.tenant_id == tenant_id,
|
||||
Supplier.status == SupplierStatus.ACTIVE
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
total_suppliers = row.total_suppliers or 0
|
||||
ingredient_suppliers = row.ingredient_suppliers or 0
|
||||
|
||||
# Simple business model detection logic
|
||||
if total_suppliers >= settings.CENTRAL_BAKERY_THRESHOLD_SUPPLIERS:
|
||||
model = "central_bakery"
|
||||
confidence = 0.85
|
||||
elif total_suppliers >= settings.INDIVIDUAL_BAKERY_THRESHOLD_SUPPLIERS:
|
||||
model = "individual_bakery"
|
||||
confidence = 0.75
|
||||
else:
|
||||
model = "small_bakery"
|
||||
confidence = 0.60
|
||||
|
||||
return {
|
||||
'model': model,
|
||||
'confidence': confidence,
|
||||
'metrics': {
|
||||
'total_suppliers': total_suppliers,
|
||||
'ingredient_suppliers': ingredient_suppliers,
|
||||
'supplier_diversity': ingredient_suppliers / max(total_suppliers, 1)
|
||||
}
|
||||
}
|
||||
|
||||
async def _calculate_performance_trends(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
date_from: datetime,
|
||||
date_to: datetime
|
||||
) -> Dict[str, str]:
|
||||
"""Calculate performance trends based on historical data"""
|
||||
|
||||
# Calculate period length and compare with previous period
|
||||
period_length = (date_to - date_from).days
|
||||
previous_period_start = date_from - timedelta(days=period_length)
|
||||
previous_period_end = date_from
|
||||
|
||||
# Get current period metrics
|
||||
current_query = select(
|
||||
func.avg(Supplier.delivery_rating).label('avg_delivery'),
|
||||
func.avg(Supplier.quality_rating).label('avg_quality'),
|
||||
func.count(PurchaseOrder.id).label('order_count')
|
||||
).select_from(PurchaseOrder).join(
|
||||
Supplier, PurchaseOrder.supplier_id == Supplier.id
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.created_at >= date_from,
|
||||
PurchaseOrder.created_at <= date_to
|
||||
)
|
||||
)
|
||||
|
||||
current_result = await db.execute(current_query)
|
||||
current = current_result.first()
|
||||
|
||||
# Get previous period metrics
|
||||
previous_query = select(
|
||||
func.avg(Supplier.delivery_rating).label('avg_delivery'),
|
||||
func.avg(Supplier.quality_rating).label('avg_quality'),
|
||||
func.count(PurchaseOrder.id).label('order_count')
|
||||
).select_from(PurchaseOrder).join(
|
||||
Supplier, PurchaseOrder.supplier_id == Supplier.id
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.created_at >= previous_period_start,
|
||||
PurchaseOrder.created_at < previous_period_end
|
||||
)
|
||||
)
|
||||
|
||||
previous_result = await db.execute(previous_query)
|
||||
previous = previous_result.first()
|
||||
|
||||
# Calculate trends
|
||||
def calculate_trend(current_value, previous_value, threshold=0.05):
|
||||
"""Calculate trend direction based on percentage change"""
|
||||
if not current_value or not previous_value:
|
||||
return 'stable'
|
||||
change = (current_value - previous_value) / previous_value
|
||||
if change > threshold:
|
||||
return 'improving'
|
||||
elif change < -threshold:
|
||||
return 'declining'
|
||||
return 'stable'
|
||||
|
||||
delivery_trend = calculate_trend(
|
||||
current.avg_delivery if current else None,
|
||||
previous.avg_delivery if previous else None
|
||||
)
|
||||
|
||||
quality_trend = calculate_trend(
|
||||
current.avg_quality if current else None,
|
||||
previous.avg_quality if previous else None
|
||||
)
|
||||
|
||||
# Overall performance based on both metrics
|
||||
if delivery_trend == 'improving' and quality_trend == 'improving':
|
||||
performance_trend = 'improving'
|
||||
elif delivery_trend == 'declining' or quality_trend == 'declining':
|
||||
performance_trend = 'declining'
|
||||
else:
|
||||
performance_trend = 'stable'
|
||||
|
||||
return {
|
||||
'performance_trend': performance_trend,
|
||||
'delivery_trend': delivery_trend,
|
||||
'quality_trend': quality_trend
|
||||
}
|
||||
|
||||
def _categorize_performance(self, score: float) -> str:
|
||||
"""Categorize performance based on score"""
|
||||
if score >= settings.EXCELLENT_DELIVERY_RATE:
|
||||
return "excellent"
|
||||
elif score >= settings.GOOD_DELIVERY_RATE:
|
||||
return "good"
|
||||
elif score >= settings.ACCEPTABLE_DELIVERY_RATE:
|
||||
return "acceptable"
|
||||
elif score >= settings.POOR_DELIVERY_RATE:
|
||||
return "needs_improvement"
|
||||
else:
|
||||
return "poor"
|
||||
|
||||
def _assess_risk_level(self, metrics: Dict[str, Any], alerts: Dict[str, Any]) -> str:
|
||||
"""Assess risk level based on metrics and alerts"""
|
||||
if alerts.get('active_count', 0) > 3 or metrics.get('overall_score', 0) < 50:
|
||||
return "critical"
|
||||
elif alerts.get('active_count', 0) > 1 or metrics.get('overall_score', 0) < 70:
|
||||
return "high"
|
||||
elif alerts.get('active_count', 0) > 0 or metrics.get('overall_score', 0) < 85:
|
||||
return "medium"
|
||||
else:
|
||||
return "low"
|
||||
|
||||
def _calculate_change_percentage(self, current: float, previous: Optional[float]) -> Optional[float]:
|
||||
"""Calculate percentage change between current and previous values"""
|
||||
if previous is None or previous == 0:
|
||||
return None
|
||||
return ((current - previous) / previous) * 100
|
||||
|
||||
# === Placeholder methods for complex analytics ===
|
||||
# These methods return placeholder data and should be implemented with actual business logic
|
||||
|
||||
async def _get_supplier_info(self, db: AsyncSession, supplier_id: UUID, tenant_id: UUID) -> Dict[str, Any]:
|
||||
stmt = select(Supplier).where(and_(Supplier.id == supplier_id, Supplier.tenant_id == tenant_id))
|
||||
result = await db.execute(stmt)
|
||||
supplier = result.scalar_one_or_none()
|
||||
return {'name': supplier.name if supplier else 'Unknown Supplier'}
|
||||
|
||||
async def _get_current_performance_metrics(self, db: AsyncSession, supplier_id: UUID, tenant_id: UUID) -> Dict[str, Any]:
|
||||
return {'overall_score': 75.0, 'delivery_performance': 80.0, 'quality_performance': 85.0, 'cost_performance': 70.0, 'service_performance': 75.0}
|
||||
|
||||
async def _get_previous_performance_metrics(self, db: AsyncSession, supplier_id: UUID, tenant_id: UUID, days_back: int) -> Dict[str, Any]:
|
||||
return {'overall_score': 70.0}
|
||||
|
||||
async def _get_supplier_activity_stats(self, db: AsyncSession, supplier_id: UUID, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {'orders_count': 15, 'avg_delivery_time': 3.2, 'quality_issues': 2, 'cost_variance': 5.5}
|
||||
|
||||
async def _get_supplier_alert_summary(self, db: AsyncSession, supplier_id: UUID, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {'active_count': 1, 'resolved_count': 3, 'trend': 'improving'}
|
||||
|
||||
async def _generate_supplier_recommendations(self, db: AsyncSession, supplier_id: UUID, tenant_id: UUID, metrics: Dict[str, Any], activity: Dict[str, Any], alerts: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return {
|
||||
'strengths': ['Consistent quality', 'Reliable delivery'],
|
||||
'improvements': ['Cost optimization', 'Communication'],
|
||||
'actions': [{'action': 'Negotiate better pricing', 'priority': 'high'}]
|
||||
}
|
||||
|
||||
async def _get_performance_distribution(self, db: AsyncSession, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {
|
||||
'total_suppliers': 25,
|
||||
'distribution': {'excellent': 5, 'good': 12, 'acceptable': 6, 'poor': 2},
|
||||
'score_ranges': {'excellent': [95, 100, 97.5], 'good': [80, 94, 87.0]}
|
||||
}
|
||||
|
||||
async def _get_detailed_trends(self, db: AsyncSession, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {
|
||||
'overall': {'month_over_month': 2.5},
|
||||
'delivery': {'month_over_month': 1.8},
|
||||
'quality': {'month_over_month': 3.2},
|
||||
'cost': {'month_over_month': -1.5}
|
||||
}
|
||||
|
||||
async def _get_comparative_analysis(self, db: AsyncSession, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {
|
||||
'top_performers': [],
|
||||
'underperformers': [],
|
||||
'most_improved': [],
|
||||
'biggest_declines': []
|
||||
}
|
||||
|
||||
async def _get_risk_analysis(self, db: AsyncSession, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {
|
||||
'high_risk': [],
|
||||
'contract_renewals': [],
|
||||
'certification_expiries': []
|
||||
}
|
||||
|
||||
async def _get_financial_impact(self, db: AsyncSession, tenant_id: UUID, date_from: datetime, date_to: datetime) -> Dict[str, Any]:
|
||||
return {
|
||||
'total_value': Decimal('150000'),
|
||||
'cost_savings': Decimal('5000'),
|
||||
'cost_avoidance': Decimal('2000'),
|
||||
'risk_exposure': Decimal('10000')
|
||||
}
|
||||
|
||||
async def _analyze_supplier_patterns(self, db: AsyncSession, tenant_id: UUID) -> Dict[str, Any]:
|
||||
return {
|
||||
'diversity_score': 75.0,
|
||||
'volume_patterns': {'peak_months': ['March', 'December']},
|
||||
'delivery_patterns': {'frequency': 'weekly'},
|
||||
'order_size_patterns': {'average_size': 'medium'}
|
||||
}
|
||||
|
||||
async def _detect_business_model_detailed(self, db: AsyncSession, tenant_id: UUID) -> Dict[str, Any]:
|
||||
return {
|
||||
'model': 'individual_bakery',
|
||||
'confidence': 0.85,
|
||||
'characteristics': {'supplier_count': 15, 'order_frequency': 'weekly'}
|
||||
}
|
||||
|
||||
async def _generate_optimization_recommendations(self, db: AsyncSession, tenant_id: UUID, business_model: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return {
|
||||
'opportunities': [{'type': 'consolidation', 'potential_savings': '10%'}],
|
||||
'supplier_mix': {'ingredients': '60%', 'packaging': '25%', 'services': '15%'},
|
||||
'cost_potential': Decimal('5000'),
|
||||
'risk_mitigation': ['Diversify supplier base', 'Implement backup suppliers']
|
||||
}
|
||||
|
||||
async def _get_benchmarking_data(self, db: AsyncSession, tenant_id: UUID, business_model: str) -> Dict[str, Any]:
|
||||
return {
|
||||
'industry': {'delivery_rate': 88.5, 'quality_score': 91.2},
|
||||
'peer': {'delivery_rate': 86.8, 'quality_score': 89.5}
|
||||
}
|
||||
863
services/suppliers/app/services/performance_service.py
Normal file
863
services/suppliers/app/services/performance_service.py
Normal file
@@ -0,0 +1,863 @@
|
||||
# ================================================================
|
||||
# services/suppliers/app/services/performance_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Supplier Performance Tracking Service
|
||||
Comprehensive supplier performance calculation, tracking, and analytics
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
from decimal import Decimal
|
||||
|
||||
from app.models.suppliers import (
|
||||
Supplier, PurchaseOrder, Delivery, SupplierQualityReview,
|
||||
PurchaseOrderStatus, DeliveryStatus, QualityRating, DeliveryRating
|
||||
)
|
||||
from app.models.performance import (
|
||||
SupplierPerformanceMetric, SupplierScorecard, SupplierAlert,
|
||||
PerformanceMetricType, PerformancePeriod, AlertType, AlertSeverity,
|
||||
AlertStatus
|
||||
)
|
||||
from app.schemas.performance import (
|
||||
PerformanceMetricCreate, ScorecardCreate, AlertCreate,
|
||||
PerformanceDashboardSummary, SupplierPerformanceInsights,
|
||||
PerformanceAnalytics, BusinessModelInsights
|
||||
)
|
||||
from app.core.config import settings
|
||||
from shared.database.transactions import transactional
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class PerformanceTrackingService:
|
||||
"""Service for tracking and calculating supplier performance metrics"""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = logger.bind(service="performance_tracking")
|
||||
|
||||
@transactional
|
||||
async def calculate_supplier_performance(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
period: PerformancePeriod,
|
||||
period_start: datetime,
|
||||
period_end: datetime
|
||||
) -> SupplierPerformanceMetric:
|
||||
"""Calculate comprehensive performance metrics for a supplier"""
|
||||
try:
|
||||
self.logger.info("Calculating supplier performance",
|
||||
supplier_id=str(supplier_id),
|
||||
period=period.value,
|
||||
period_start=period_start.isoformat(),
|
||||
period_end=period_end.isoformat())
|
||||
|
||||
# Get base data for calculations
|
||||
orders_data = await self._get_orders_data(db, supplier_id, tenant_id, period_start, period_end)
|
||||
deliveries_data = await self._get_deliveries_data(db, supplier_id, tenant_id, period_start, period_end)
|
||||
quality_data = await self._get_quality_data(db, supplier_id, tenant_id, period_start, period_end)
|
||||
|
||||
# Calculate delivery performance
|
||||
delivery_performance = await self._calculate_delivery_performance(
|
||||
orders_data, deliveries_data
|
||||
)
|
||||
|
||||
# Calculate quality performance
|
||||
quality_performance = await self._calculate_quality_performance(
|
||||
deliveries_data, quality_data
|
||||
)
|
||||
|
||||
# Calculate cost performance
|
||||
cost_performance = await self._calculate_cost_performance(
|
||||
orders_data, deliveries_data
|
||||
)
|
||||
|
||||
# Calculate service performance
|
||||
service_performance = await self._calculate_service_performance(
|
||||
orders_data, quality_data
|
||||
)
|
||||
|
||||
# Calculate overall performance (weighted average)
|
||||
overall_performance = (
|
||||
delivery_performance * 0.30 +
|
||||
quality_performance * 0.30 +
|
||||
cost_performance * 0.20 +
|
||||
service_performance * 0.20
|
||||
)
|
||||
|
||||
# Create performance metrics for each category
|
||||
performance_metrics = []
|
||||
|
||||
metrics_to_create = [
|
||||
(PerformanceMetricType.DELIVERY_PERFORMANCE, delivery_performance),
|
||||
(PerformanceMetricType.QUALITY_SCORE, quality_performance),
|
||||
(PerformanceMetricType.PRICE_COMPETITIVENESS, cost_performance),
|
||||
(PerformanceMetricType.COMMUNICATION_RATING, service_performance)
|
||||
]
|
||||
|
||||
for metric_type, value in metrics_to_create:
|
||||
# Get previous period value for trend calculation
|
||||
previous_value = await self._get_previous_period_value(
|
||||
db, supplier_id, tenant_id, metric_type, period, period_start
|
||||
)
|
||||
|
||||
# Calculate trend
|
||||
trend_direction, trend_percentage = self._calculate_trend(value, previous_value)
|
||||
|
||||
# Prepare detailed metrics data
|
||||
metrics_data = await self._prepare_detailed_metrics(
|
||||
metric_type, orders_data, deliveries_data, quality_data
|
||||
)
|
||||
|
||||
# Create performance metric
|
||||
metric_create = PerformanceMetricCreate(
|
||||
supplier_id=supplier_id,
|
||||
metric_type=metric_type,
|
||||
period=period,
|
||||
period_start=period_start,
|
||||
period_end=period_end,
|
||||
metric_value=value,
|
||||
target_value=self._get_target_value(metric_type),
|
||||
total_orders=orders_data.get('total_orders', 0),
|
||||
total_deliveries=deliveries_data.get('total_deliveries', 0),
|
||||
on_time_deliveries=deliveries_data.get('on_time_deliveries', 0),
|
||||
late_deliveries=deliveries_data.get('late_deliveries', 0),
|
||||
quality_issues=quality_data.get('quality_issues', 0),
|
||||
total_amount=orders_data.get('total_amount', Decimal('0')),
|
||||
metrics_data=metrics_data
|
||||
)
|
||||
|
||||
performance_metric = SupplierPerformanceMetric(
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
metric_type=metric_create.metric_type,
|
||||
period=metric_create.period,
|
||||
period_start=metric_create.period_start,
|
||||
period_end=metric_create.period_end,
|
||||
metric_value=metric_create.metric_value,
|
||||
target_value=metric_create.target_value,
|
||||
previous_value=previous_value,
|
||||
total_orders=metric_create.total_orders,
|
||||
total_deliveries=metric_create.total_deliveries,
|
||||
on_time_deliveries=metric_create.on_time_deliveries,
|
||||
late_deliveries=metric_create.late_deliveries,
|
||||
quality_issues=metric_create.quality_issues,
|
||||
total_amount=metric_create.total_amount,
|
||||
metrics_data=metric_create.metrics_data,
|
||||
trend_direction=trend_direction,
|
||||
trend_percentage=trend_percentage,
|
||||
calculated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
db.add(performance_metric)
|
||||
performance_metrics.append(performance_metric)
|
||||
|
||||
await db.flush()
|
||||
|
||||
# Update supplier's overall performance ratings
|
||||
await self._update_supplier_ratings(db, supplier_id, overall_performance, quality_performance)
|
||||
|
||||
self.logger.info("Supplier performance calculated successfully",
|
||||
supplier_id=str(supplier_id),
|
||||
overall_performance=overall_performance)
|
||||
|
||||
# Return the overall performance metric
|
||||
return performance_metrics[0] if performance_metrics else None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error calculating supplier performance",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_orders_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
period_start: datetime,
|
||||
period_end: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get orders data for performance calculation"""
|
||||
query = select(
|
||||
func.count(PurchaseOrder.id).label('total_orders'),
|
||||
func.sum(PurchaseOrder.total_amount).label('total_amount'),
|
||||
func.avg(PurchaseOrder.total_amount).label('avg_order_value'),
|
||||
func.count(
|
||||
PurchaseOrder.id.filter(
|
||||
PurchaseOrder.status == PurchaseOrderStatus.COMPLETED
|
||||
)
|
||||
).label('completed_orders')
|
||||
).where(
|
||||
and_(
|
||||
PurchaseOrder.supplier_id == supplier_id,
|
||||
PurchaseOrder.tenant_id == tenant_id,
|
||||
PurchaseOrder.order_date >= period_start,
|
||||
PurchaseOrder.order_date <= period_end
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
return {
|
||||
'total_orders': row.total_orders or 0,
|
||||
'total_amount': row.total_amount or Decimal('0'),
|
||||
'avg_order_value': row.avg_order_value or Decimal('0'),
|
||||
'completed_orders': row.completed_orders or 0
|
||||
}
|
||||
|
||||
async def _get_deliveries_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
period_start: datetime,
|
||||
period_end: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get deliveries data for performance calculation"""
|
||||
# Get delivery statistics
|
||||
query = select(
|
||||
func.count(Delivery.id).label('total_deliveries'),
|
||||
func.count(
|
||||
Delivery.id.filter(
|
||||
and_(
|
||||
Delivery.actual_arrival <= Delivery.scheduled_date,
|
||||
Delivery.status == DeliveryStatus.DELIVERED
|
||||
)
|
||||
)
|
||||
).label('on_time_deliveries'),
|
||||
func.count(
|
||||
Delivery.id.filter(
|
||||
and_(
|
||||
Delivery.actual_arrival > Delivery.scheduled_date,
|
||||
Delivery.status == DeliveryStatus.DELIVERED
|
||||
)
|
||||
)
|
||||
).label('late_deliveries'),
|
||||
func.avg(
|
||||
func.extract('epoch', Delivery.actual_arrival - Delivery.scheduled_date) / 3600
|
||||
).label('avg_delay_hours')
|
||||
).where(
|
||||
and_(
|
||||
Delivery.supplier_id == supplier_id,
|
||||
Delivery.tenant_id == tenant_id,
|
||||
Delivery.scheduled_date >= period_start,
|
||||
Delivery.scheduled_date <= period_end,
|
||||
Delivery.status.in_([DeliveryStatus.DELIVERED, DeliveryStatus.PARTIALLY_DELIVERED])
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
return {
|
||||
'total_deliveries': row.total_deliveries or 0,
|
||||
'on_time_deliveries': row.on_time_deliveries or 0,
|
||||
'late_deliveries': row.late_deliveries or 0,
|
||||
'avg_delay_hours': row.avg_delay_hours or 0
|
||||
}
|
||||
|
||||
async def _get_quality_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
period_start: datetime,
|
||||
period_end: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality data for performance calculation"""
|
||||
query = select(
|
||||
func.count(SupplierQualityReview.id).label('total_reviews'),
|
||||
func.avg(
|
||||
func.cast(SupplierQualityReview.quality_rating, func.Float)
|
||||
).label('avg_quality_rating'),
|
||||
func.avg(
|
||||
func.cast(SupplierQualityReview.delivery_rating, func.Float)
|
||||
).label('avg_delivery_rating'),
|
||||
func.avg(SupplierQualityReview.communication_rating).label('avg_communication_rating'),
|
||||
func.count(
|
||||
SupplierQualityReview.id.filter(
|
||||
SupplierQualityReview.quality_issues.isnot(None)
|
||||
)
|
||||
).label('quality_issues')
|
||||
).where(
|
||||
and_(
|
||||
SupplierQualityReview.supplier_id == supplier_id,
|
||||
SupplierQualityReview.tenant_id == tenant_id,
|
||||
SupplierQualityReview.review_date >= period_start,
|
||||
SupplierQualityReview.review_date <= period_end
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
|
||||
return {
|
||||
'total_reviews': row.total_reviews or 0,
|
||||
'avg_quality_rating': row.avg_quality_rating or 0,
|
||||
'avg_delivery_rating': row.avg_delivery_rating or 0,
|
||||
'avg_communication_rating': row.avg_communication_rating or 0,
|
||||
'quality_issues': row.quality_issues or 0
|
||||
}
|
||||
|
||||
async def _calculate_delivery_performance(
|
||||
self,
|
||||
orders_data: Dict[str, Any],
|
||||
deliveries_data: Dict[str, Any]
|
||||
) -> float:
|
||||
"""Calculate delivery performance score (0-100)"""
|
||||
total_deliveries = deliveries_data.get('total_deliveries', 0)
|
||||
if total_deliveries == 0:
|
||||
return 0.0
|
||||
|
||||
on_time_deliveries = deliveries_data.get('on_time_deliveries', 0)
|
||||
on_time_rate = (on_time_deliveries / total_deliveries) * 100
|
||||
|
||||
# Apply penalty for average delay
|
||||
avg_delay_hours = deliveries_data.get('avg_delay_hours', 0)
|
||||
delay_penalty = min(avg_delay_hours * 2, 20) # Max 20 point penalty
|
||||
|
||||
performance_score = max(on_time_rate - delay_penalty, 0)
|
||||
return min(performance_score, 100.0)
|
||||
|
||||
async def _calculate_quality_performance(
|
||||
self,
|
||||
deliveries_data: Dict[str, Any],
|
||||
quality_data: Dict[str, Any]
|
||||
) -> float:
|
||||
"""Calculate quality performance score (0-100)"""
|
||||
total_reviews = quality_data.get('total_reviews', 0)
|
||||
if total_reviews == 0:
|
||||
return 50.0 # Default score when no reviews
|
||||
|
||||
# Base quality score from ratings
|
||||
avg_quality_rating = quality_data.get('avg_quality_rating', 0)
|
||||
base_score = (avg_quality_rating / 5.0) * 100
|
||||
|
||||
# Apply penalty for quality issues
|
||||
quality_issues = quality_data.get('quality_issues', 0)
|
||||
issue_penalty = min(quality_issues * 5, 30) # Max 30 point penalty
|
||||
|
||||
performance_score = max(base_score - issue_penalty, 0)
|
||||
return min(performance_score, 100.0)
|
||||
|
||||
async def _calculate_cost_performance(
|
||||
self,
|
||||
orders_data: Dict[str, Any],
|
||||
deliveries_data: Dict[str, Any]
|
||||
) -> float:
|
||||
"""Calculate cost performance score (0-100)"""
|
||||
# For now, return a baseline score
|
||||
# In future, implement price comparison with market rates
|
||||
return 75.0
|
||||
|
||||
async def _calculate_service_performance(
|
||||
self,
|
||||
orders_data: Dict[str, Any],
|
||||
quality_data: Dict[str, Any]
|
||||
) -> float:
|
||||
"""Calculate service performance score (0-100)"""
|
||||
total_reviews = quality_data.get('total_reviews', 0)
|
||||
if total_reviews == 0:
|
||||
return 50.0 # Default score when no reviews
|
||||
|
||||
avg_communication_rating = quality_data.get('avg_communication_rating', 0)
|
||||
return (avg_communication_rating / 5.0) * 100
|
||||
|
||||
def _calculate_trend(self, current_value: float, previous_value: Optional[float]) -> Tuple[Optional[str], Optional[float]]:
|
||||
"""Calculate performance trend"""
|
||||
if previous_value is None or previous_value == 0:
|
||||
return None, None
|
||||
|
||||
change_percentage = ((current_value - previous_value) / previous_value) * 100
|
||||
|
||||
if abs(change_percentage) < 2: # Less than 2% change considered stable
|
||||
trend_direction = "stable"
|
||||
elif change_percentage > 0:
|
||||
trend_direction = "improving"
|
||||
else:
|
||||
trend_direction = "declining"
|
||||
|
||||
return trend_direction, change_percentage
|
||||
|
||||
async def _get_previous_period_value(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
metric_type: PerformanceMetricType,
|
||||
period: PerformancePeriod,
|
||||
current_period_start: datetime
|
||||
) -> Optional[float]:
|
||||
"""Get the previous period's value for trend calculation"""
|
||||
# Calculate previous period dates
|
||||
if period == PerformancePeriod.DAILY:
|
||||
previous_start = current_period_start - timedelta(days=1)
|
||||
previous_end = current_period_start
|
||||
elif period == PerformancePeriod.WEEKLY:
|
||||
previous_start = current_period_start - timedelta(weeks=1)
|
||||
previous_end = current_period_start
|
||||
elif period == PerformancePeriod.MONTHLY:
|
||||
previous_start = current_period_start - timedelta(days=30)
|
||||
previous_end = current_period_start
|
||||
elif period == PerformancePeriod.QUARTERLY:
|
||||
previous_start = current_period_start - timedelta(days=90)
|
||||
previous_end = current_period_start
|
||||
else: # YEARLY
|
||||
previous_start = current_period_start - timedelta(days=365)
|
||||
previous_end = current_period_start
|
||||
|
||||
query = select(SupplierPerformanceMetric.metric_value).where(
|
||||
and_(
|
||||
SupplierPerformanceMetric.supplier_id == supplier_id,
|
||||
SupplierPerformanceMetric.tenant_id == tenant_id,
|
||||
SupplierPerformanceMetric.metric_type == metric_type,
|
||||
SupplierPerformanceMetric.period == period,
|
||||
SupplierPerformanceMetric.period_start >= previous_start,
|
||||
SupplierPerformanceMetric.period_start < previous_end
|
||||
)
|
||||
).order_by(desc(SupplierPerformanceMetric.period_start)).limit(1)
|
||||
|
||||
result = await db.execute(query)
|
||||
row = result.first()
|
||||
return row[0] if row else None
|
||||
|
||||
def _get_target_value(self, metric_type: PerformanceMetricType) -> float:
|
||||
"""Get target value for metric type"""
|
||||
targets = {
|
||||
PerformanceMetricType.DELIVERY_PERFORMANCE: settings.GOOD_DELIVERY_RATE,
|
||||
PerformanceMetricType.QUALITY_SCORE: settings.GOOD_QUALITY_RATE,
|
||||
PerformanceMetricType.PRICE_COMPETITIVENESS: 80.0,
|
||||
PerformanceMetricType.COMMUNICATION_RATING: 80.0,
|
||||
PerformanceMetricType.ORDER_ACCURACY: 95.0,
|
||||
PerformanceMetricType.RESPONSE_TIME: 90.0,
|
||||
PerformanceMetricType.COMPLIANCE_SCORE: 95.0,
|
||||
PerformanceMetricType.FINANCIAL_STABILITY: 85.0
|
||||
}
|
||||
return targets.get(metric_type, 80.0)
|
||||
|
||||
async def _prepare_detailed_metrics(
|
||||
self,
|
||||
metric_type: PerformanceMetricType,
|
||||
orders_data: Dict[str, Any],
|
||||
deliveries_data: Dict[str, Any],
|
||||
quality_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Prepare detailed metrics breakdown"""
|
||||
if metric_type == PerformanceMetricType.DELIVERY_PERFORMANCE:
|
||||
return {
|
||||
"on_time_rate": (deliveries_data.get('on_time_deliveries', 0) /
|
||||
max(deliveries_data.get('total_deliveries', 1), 1)) * 100,
|
||||
"avg_delay_hours": deliveries_data.get('avg_delay_hours', 0),
|
||||
"late_delivery_count": deliveries_data.get('late_deliveries', 0)
|
||||
}
|
||||
elif metric_type == PerformanceMetricType.QUALITY_SCORE:
|
||||
return {
|
||||
"avg_quality_rating": quality_data.get('avg_quality_rating', 0),
|
||||
"quality_issues_count": quality_data.get('quality_issues', 0),
|
||||
"total_reviews": quality_data.get('total_reviews', 0)
|
||||
}
|
||||
else:
|
||||
return {}
|
||||
|
||||
async def _update_supplier_ratings(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
overall_performance: float,
|
||||
quality_performance: float
|
||||
) -> None:
|
||||
"""Update supplier's overall ratings"""
|
||||
stmt = select(Supplier).where(Supplier.id == supplier_id)
|
||||
result = await db.execute(stmt)
|
||||
supplier = result.scalar_one_or_none()
|
||||
|
||||
if supplier:
|
||||
supplier.quality_rating = quality_performance / 20 # Convert to 1-5 scale
|
||||
supplier.delivery_rating = overall_performance / 20 # Convert to 1-5 scale
|
||||
db.add(supplier)
|
||||
|
||||
|
||||
class AlertService:
|
||||
"""Service for managing supplier alerts"""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = logger.bind(service="alert_service")
|
||||
|
||||
@transactional
|
||||
async def evaluate_performance_alerts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
supplier_id: Optional[UUID] = None
|
||||
) -> List[SupplierAlert]:
|
||||
"""Evaluate and create performance-based alerts"""
|
||||
try:
|
||||
alerts_created = []
|
||||
|
||||
# Get suppliers to evaluate
|
||||
if supplier_id:
|
||||
supplier_filter = and_(Supplier.id == supplier_id, Supplier.tenant_id == tenant_id)
|
||||
else:
|
||||
supplier_filter = and_(Supplier.tenant_id == tenant_id, Supplier.status == "active")
|
||||
|
||||
stmt = select(Supplier).where(supplier_filter)
|
||||
result = await db.execute(stmt)
|
||||
suppliers = result.scalars().all()
|
||||
|
||||
for supplier in suppliers:
|
||||
# Get recent performance metrics
|
||||
recent_metrics = await self._get_recent_performance_metrics(db, supplier.id, tenant_id)
|
||||
|
||||
# Evaluate delivery performance alerts
|
||||
delivery_alerts = await self._evaluate_delivery_alerts(db, supplier, recent_metrics)
|
||||
alerts_created.extend(delivery_alerts)
|
||||
|
||||
# Evaluate quality alerts
|
||||
quality_alerts = await self._evaluate_quality_alerts(db, supplier, recent_metrics)
|
||||
alerts_created.extend(quality_alerts)
|
||||
|
||||
# Evaluate cost variance alerts
|
||||
cost_alerts = await self._evaluate_cost_alerts(db, supplier, recent_metrics)
|
||||
alerts_created.extend(cost_alerts)
|
||||
|
||||
return alerts_created
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error evaluating performance alerts", error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_recent_performance_metrics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Dict[PerformanceMetricType, SupplierPerformanceMetric]:
|
||||
"""Get recent performance metrics for a supplier"""
|
||||
query = select(SupplierPerformanceMetric).where(
|
||||
and_(
|
||||
SupplierPerformanceMetric.supplier_id == supplier_id,
|
||||
SupplierPerformanceMetric.tenant_id == tenant_id,
|
||||
SupplierPerformanceMetric.calculated_at >= datetime.now(timezone.utc) - timedelta(days=7)
|
||||
)
|
||||
).order_by(desc(SupplierPerformanceMetric.calculated_at))
|
||||
|
||||
result = await db.execute(query)
|
||||
metrics = result.scalars().all()
|
||||
|
||||
# Return the most recent metric for each type
|
||||
metrics_dict = {}
|
||||
for metric in metrics:
|
||||
if metric.metric_type not in metrics_dict:
|
||||
metrics_dict[metric.metric_type] = metric
|
||||
|
||||
return metrics_dict
|
||||
|
||||
async def _evaluate_delivery_alerts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier: Supplier,
|
||||
metrics: Dict[PerformanceMetricType, SupplierPerformanceMetric]
|
||||
) -> List[SupplierAlert]:
|
||||
"""Evaluate delivery performance alerts"""
|
||||
alerts = []
|
||||
|
||||
delivery_metric = metrics.get(PerformanceMetricType.DELIVERY_PERFORMANCE)
|
||||
if not delivery_metric:
|
||||
return alerts
|
||||
|
||||
# Poor delivery performance alert
|
||||
if delivery_metric.metric_value < settings.POOR_DELIVERY_RATE:
|
||||
severity = AlertSeverity.CRITICAL if delivery_metric.metric_value < 70 else AlertSeverity.HIGH
|
||||
|
||||
alert = SupplierAlert(
|
||||
tenant_id=supplier.tenant_id,
|
||||
supplier_id=supplier.id,
|
||||
alert_type=AlertType.POOR_QUALITY,
|
||||
severity=severity,
|
||||
title=f"Poor Delivery Performance - {supplier.name}",
|
||||
message=f"Delivery performance has dropped to {delivery_metric.metric_value:.1f}%",
|
||||
description=f"Supplier {supplier.name} delivery performance is below acceptable threshold",
|
||||
trigger_value=delivery_metric.metric_value,
|
||||
threshold_value=settings.POOR_DELIVERY_RATE,
|
||||
metric_type=PerformanceMetricType.DELIVERY_PERFORMANCE,
|
||||
performance_metric_id=delivery_metric.id,
|
||||
priority_score=90 if severity == AlertSeverity.CRITICAL else 70,
|
||||
business_impact="high" if severity == AlertSeverity.CRITICAL else "medium",
|
||||
recommended_actions=[
|
||||
{"action": "Review delivery processes with supplier"},
|
||||
{"action": "Request delivery improvement plan"},
|
||||
{"action": "Consider alternative suppliers"}
|
||||
]
|
||||
)
|
||||
|
||||
db.add(alert)
|
||||
alerts.append(alert)
|
||||
|
||||
return alerts
|
||||
|
||||
async def _evaluate_quality_alerts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier: Supplier,
|
||||
metrics: Dict[PerformanceMetricType, SupplierPerformanceMetric]
|
||||
) -> List[SupplierAlert]:
|
||||
"""Evaluate quality performance alerts"""
|
||||
alerts = []
|
||||
|
||||
quality_metric = metrics.get(PerformanceMetricType.QUALITY_SCORE)
|
||||
if not quality_metric:
|
||||
return alerts
|
||||
|
||||
# Poor quality performance alert
|
||||
if quality_metric.metric_value < settings.POOR_QUALITY_RATE:
|
||||
severity = AlertSeverity.CRITICAL if quality_metric.metric_value < 70 else AlertSeverity.HIGH
|
||||
|
||||
alert = SupplierAlert(
|
||||
tenant_id=supplier.tenant_id,
|
||||
supplier_id=supplier.id,
|
||||
alert_type=AlertType.POOR_QUALITY,
|
||||
severity=severity,
|
||||
title=f"Poor Quality Performance - {supplier.name}",
|
||||
message=f"Quality performance has dropped to {quality_metric.metric_value:.1f}%",
|
||||
description=f"Supplier {supplier.name} quality performance is below acceptable threshold",
|
||||
trigger_value=quality_metric.metric_value,
|
||||
threshold_value=settings.POOR_QUALITY_RATE,
|
||||
metric_type=PerformanceMetricType.QUALITY_SCORE,
|
||||
performance_metric_id=quality_metric.id,
|
||||
priority_score=95 if severity == AlertSeverity.CRITICAL else 75,
|
||||
business_impact="high" if severity == AlertSeverity.CRITICAL else "medium",
|
||||
recommended_actions=[
|
||||
{"action": "Conduct quality audit with supplier"},
|
||||
{"action": "Request quality improvement plan"},
|
||||
{"action": "Increase incoming inspection frequency"}
|
||||
]
|
||||
)
|
||||
|
||||
db.add(alert)
|
||||
alerts.append(alert)
|
||||
|
||||
return alerts
|
||||
|
||||
async def _evaluate_cost_alerts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
supplier: Supplier,
|
||||
metrics: Dict[PerformanceMetricType, SupplierPerformanceMetric]
|
||||
) -> List[SupplierAlert]:
|
||||
"""Evaluate cost variance alerts based on historical pricing"""
|
||||
alerts = []
|
||||
|
||||
try:
|
||||
from shared.clients.procurement_client import ProcurementServiceClient
|
||||
from shared.config.base import get_settings
|
||||
from datetime import timedelta
|
||||
from collections import defaultdict
|
||||
from decimal import Decimal
|
||||
|
||||
# Configuration thresholds
|
||||
WARNING_THRESHOLD = Decimal('0.10') # 10% variance
|
||||
CRITICAL_THRESHOLD = Decimal('0.20') # 20% variance
|
||||
SAVINGS_THRESHOLD = Decimal('0.10') # 10% decrease
|
||||
MIN_SAMPLE_SIZE = 3
|
||||
LOOKBACK_DAYS = 30
|
||||
|
||||
config = get_settings()
|
||||
procurement_client = ProcurementServiceClient(config, "suppliers")
|
||||
|
||||
# Get purchase orders for this supplier from last 60 days (30 days lookback + 30 days current)
|
||||
date_to = datetime.now(timezone.utc).date()
|
||||
date_from = date_to - timedelta(days=LOOKBACK_DAYS * 2)
|
||||
|
||||
purchase_orders = await procurement_client.get_purchase_orders_by_supplier(
|
||||
tenant_id=str(supplier.tenant_id),
|
||||
supplier_id=str(supplier.id),
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
status=None # Get all statuses
|
||||
)
|
||||
|
||||
if not purchase_orders or len(purchase_orders) < MIN_SAMPLE_SIZE:
|
||||
self.logger.debug("Insufficient purchase order history for cost variance analysis",
|
||||
supplier_id=str(supplier.id),
|
||||
po_count=len(purchase_orders) if purchase_orders else 0)
|
||||
return alerts
|
||||
|
||||
# Group items by ingredient/product and calculate price statistics
|
||||
ingredient_prices = defaultdict(list)
|
||||
cutoff_date = date_to - timedelta(days=LOOKBACK_DAYS)
|
||||
|
||||
for po in purchase_orders:
|
||||
po_date = datetime.fromisoformat(po.get('created_at').replace('Z', '+00:00')).date() if po.get('created_at') else None
|
||||
if not po_date:
|
||||
continue
|
||||
|
||||
# Process items in the PO
|
||||
for item in po.get('items', []):
|
||||
ingredient_id = item.get('ingredient_id')
|
||||
ingredient_name = item.get('ingredient_name') or item.get('product_name', 'Unknown')
|
||||
unit_price = Decimal(str(item.get('unit_price', 0)))
|
||||
|
||||
if not ingredient_id or unit_price <= 0:
|
||||
continue
|
||||
|
||||
# Categorize as historical (for baseline) or recent (for comparison)
|
||||
is_recent = po_date >= cutoff_date
|
||||
ingredient_prices[ingredient_id].append({
|
||||
'price': unit_price,
|
||||
'date': po_date,
|
||||
'name': ingredient_name,
|
||||
'is_recent': is_recent
|
||||
})
|
||||
|
||||
# Analyze each ingredient for cost variance
|
||||
for ingredient_id, price_history in ingredient_prices.items():
|
||||
if len(price_history) < MIN_SAMPLE_SIZE:
|
||||
continue
|
||||
|
||||
# Split into historical baseline and recent prices
|
||||
historical_prices = [p['price'] for p in price_history if not p['is_recent']]
|
||||
recent_prices = [p['price'] for p in price_history if p['is_recent']]
|
||||
|
||||
if not historical_prices or not recent_prices:
|
||||
continue
|
||||
|
||||
# Calculate averages
|
||||
avg_historical = sum(historical_prices) / len(historical_prices)
|
||||
avg_recent = sum(recent_prices) / len(recent_prices)
|
||||
|
||||
if avg_historical == 0:
|
||||
continue
|
||||
|
||||
# Calculate variance
|
||||
variance = (avg_recent - avg_historical) / avg_historical
|
||||
ingredient_name = price_history[0]['name']
|
||||
|
||||
# Generate alerts based on variance
|
||||
if variance >= CRITICAL_THRESHOLD:
|
||||
# Critical price increase alert
|
||||
alert = SupplierAlert(
|
||||
tenant_id=supplier.tenant_id,
|
||||
supplier_id=supplier.id,
|
||||
alert_type=AlertType.cost_variance,
|
||||
severity=AlertSeverity.critical,
|
||||
status=AlertStatus.active,
|
||||
title=f"Critical Price Increase: {ingredient_name}",
|
||||
description=(
|
||||
f"Significant price increase detected for {ingredient_name}. "
|
||||
f"Average price increased from ${avg_historical:.2f} to ${avg_recent:.2f} "
|
||||
f"({variance * 100:.1f}% increase) over the last {LOOKBACK_DAYS} days."
|
||||
),
|
||||
affected_products=ingredient_name,
|
||||
detection_date=datetime.now(timezone.utc),
|
||||
metadata={
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"avg_historical_price": float(avg_historical),
|
||||
"avg_recent_price": float(avg_recent),
|
||||
"variance_percent": float(variance * 100),
|
||||
"historical_sample_size": len(historical_prices),
|
||||
"recent_sample_size": len(recent_prices),
|
||||
"lookback_days": LOOKBACK_DAYS
|
||||
},
|
||||
recommended_actions=[
|
||||
{"action": "Contact supplier to negotiate pricing"},
|
||||
{"action": "Request explanation for price increase"},
|
||||
{"action": "Evaluate alternative suppliers for this ingredient"},
|
||||
{"action": "Review contract terms and pricing agreements"}
|
||||
]
|
||||
)
|
||||
db.add(alert)
|
||||
alerts.append(alert)
|
||||
|
||||
elif variance >= WARNING_THRESHOLD:
|
||||
# Warning price increase alert
|
||||
alert = SupplierAlert(
|
||||
tenant_id=supplier.tenant_id,
|
||||
supplier_id=supplier.id,
|
||||
alert_type=AlertType.cost_variance,
|
||||
severity=AlertSeverity.warning,
|
||||
status=AlertStatus.active,
|
||||
title=f"Price Increase Detected: {ingredient_name}",
|
||||
description=(
|
||||
f"Moderate price increase detected for {ingredient_name}. "
|
||||
f"Average price increased from ${avg_historical:.2f} to ${avg_recent:.2f} "
|
||||
f"({variance * 100:.1f}% increase) over the last {LOOKBACK_DAYS} days."
|
||||
),
|
||||
affected_products=ingredient_name,
|
||||
detection_date=datetime.now(timezone.utc),
|
||||
metadata={
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"avg_historical_price": float(avg_historical),
|
||||
"avg_recent_price": float(avg_recent),
|
||||
"variance_percent": float(variance * 100),
|
||||
"historical_sample_size": len(historical_prices),
|
||||
"recent_sample_size": len(recent_prices),
|
||||
"lookback_days": LOOKBACK_DAYS
|
||||
},
|
||||
recommended_actions=[
|
||||
{"action": "Monitor pricing trend over next few orders"},
|
||||
{"action": "Contact supplier to discuss pricing"},
|
||||
{"action": "Review market prices for this ingredient"}
|
||||
]
|
||||
)
|
||||
db.add(alert)
|
||||
alerts.append(alert)
|
||||
|
||||
elif variance <= -SAVINGS_THRESHOLD:
|
||||
# Cost savings opportunity alert
|
||||
alert = SupplierAlert(
|
||||
tenant_id=supplier.tenant_id,
|
||||
supplier_id=supplier.id,
|
||||
alert_type=AlertType.cost_variance,
|
||||
severity=AlertSeverity.info,
|
||||
status=AlertStatus.active,
|
||||
title=f"Cost Savings Opportunity: {ingredient_name}",
|
||||
description=(
|
||||
f"Favorable price decrease detected for {ingredient_name}. "
|
||||
f"Average price decreased from ${avg_historical:.2f} to ${avg_recent:.2f} "
|
||||
f"({abs(variance) * 100:.1f}% decrease) over the last {LOOKBACK_DAYS} days. "
|
||||
f"Consider increasing order volumes to capitalize on lower pricing."
|
||||
),
|
||||
affected_products=ingredient_name,
|
||||
detection_date=datetime.now(timezone.utc),
|
||||
metadata={
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"avg_historical_price": float(avg_historical),
|
||||
"avg_recent_price": float(avg_recent),
|
||||
"variance_percent": float(variance * 100),
|
||||
"historical_sample_size": len(historical_prices),
|
||||
"recent_sample_size": len(recent_prices),
|
||||
"lookback_days": LOOKBACK_DAYS
|
||||
},
|
||||
recommended_actions=[
|
||||
{"action": "Consider increasing order quantities"},
|
||||
{"action": "Negotiate long-term pricing lock at current rates"},
|
||||
{"action": "Update forecast to account for favorable pricing"}
|
||||
]
|
||||
)
|
||||
db.add(alert)
|
||||
alerts.append(alert)
|
||||
|
||||
if alerts:
|
||||
self.logger.info("Cost variance alerts generated",
|
||||
supplier_id=str(supplier.id),
|
||||
alert_count=len(alerts))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error evaluating cost variance alerts",
|
||||
supplier_id=str(supplier.id),
|
||||
error=str(e),
|
||||
exc_info=True)
|
||||
|
||||
return alerts
|
||||
568
services/suppliers/app/services/supplier_service.py
Normal file
568
services/suppliers/app/services/supplier_service.py
Normal file
@@ -0,0 +1,568 @@
|
||||
# services/suppliers/app/services/supplier_service.py
|
||||
"""
|
||||
Supplier service for business logic operations
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.repositories.supplier_repository import SupplierRepository
|
||||
from app.models.suppliers import Supplier, SupplierStatus, SupplierType
|
||||
from app.schemas.suppliers import (
|
||||
SupplierCreate, SupplierUpdate, SupplierResponse,
|
||||
SupplierSearchParams, SupplierStatistics,
|
||||
SupplierPriceListCreate, SupplierPriceListUpdate, SupplierPriceListResponse
|
||||
)
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SupplierService:
|
||||
"""Service for supplier management operations"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
self.repository = SupplierRepository(db)
|
||||
|
||||
async def create_supplier(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_data: SupplierCreate,
|
||||
created_by: UUID,
|
||||
created_by_role: str = "member"
|
||||
) -> Supplier:
|
||||
"""Create a new supplier with role-based auto-approval"""
|
||||
logger.info("Creating supplier", tenant_id=str(tenant_id), name=supplier_data.name, role=created_by_role)
|
||||
|
||||
# Check for duplicate name
|
||||
existing = await self.repository.get_by_name(tenant_id, supplier_data.name)
|
||||
if existing:
|
||||
raise ValueError(f"Supplier with name '{supplier_data.name}' already exists")
|
||||
|
||||
# Check for duplicate supplier code if provided
|
||||
if supplier_data.supplier_code:
|
||||
existing_code = await self.repository.get_by_supplier_code(
|
||||
tenant_id, supplier_data.supplier_code
|
||||
)
|
||||
if existing_code:
|
||||
raise ValueError(
|
||||
f"Supplier with code '{supplier_data.supplier_code}' already exists"
|
||||
)
|
||||
|
||||
# Generate supplier code if not provided
|
||||
supplier_code = supplier_data.supplier_code
|
||||
if not supplier_code:
|
||||
supplier_code = self._generate_supplier_code(supplier_data.name)
|
||||
|
||||
# Fetch tenant supplier settings to determine approval workflow
|
||||
try:
|
||||
from shared.clients.tenant_client import create_tenant_client
|
||||
tenant_client = create_tenant_client(settings)
|
||||
supplier_settings = await tenant_client.get_supplier_settings(str(tenant_id)) or {}
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch tenant settings, using defaults", error=str(e))
|
||||
supplier_settings = {}
|
||||
|
||||
# Determine initial status based on settings and role
|
||||
require_approval = supplier_settings.get('require_supplier_approval', True)
|
||||
auto_approve_admin = supplier_settings.get('auto_approve_for_admin_owner', True)
|
||||
|
||||
# Auto-approval logic
|
||||
if not require_approval:
|
||||
# Workflow disabled globally - always auto-approve
|
||||
initial_status = SupplierStatus.active
|
||||
auto_approved = True
|
||||
logger.info("Supplier approval workflow disabled - auto-approving")
|
||||
elif auto_approve_admin and created_by_role.lower() in ['admin', 'owner']:
|
||||
# Auto-approve for admin/owner roles
|
||||
initial_status = SupplierStatus.active
|
||||
auto_approved = True
|
||||
logger.info("Auto-approving supplier created by admin/owner", role=created_by_role)
|
||||
else:
|
||||
# Require approval for other roles
|
||||
initial_status = SupplierStatus.pending_approval
|
||||
auto_approved = False
|
||||
logger.info("Supplier requires approval", role=created_by_role)
|
||||
|
||||
# Create supplier data
|
||||
create_data = supplier_data.model_dump(exclude_unset=True)
|
||||
create_data.update({
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_code': supplier_code,
|
||||
'status': initial_status,
|
||||
'created_by': created_by,
|
||||
'updated_by': created_by,
|
||||
'quality_rating': 0.0,
|
||||
'delivery_rating': 0.0,
|
||||
'total_orders': 0,
|
||||
'total_amount': 0.0
|
||||
})
|
||||
|
||||
# Set approval fields if auto-approved
|
||||
if auto_approved:
|
||||
create_data['approved_by'] = created_by
|
||||
create_data['approved_at'] = datetime.utcnow()
|
||||
|
||||
supplier = await self.repository.create(create_data)
|
||||
|
||||
logger.info(
|
||||
"Supplier created successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(supplier.id),
|
||||
name=supplier.name,
|
||||
status=initial_status.value,
|
||||
auto_approved=auto_approved
|
||||
)
|
||||
|
||||
return supplier
|
||||
|
||||
async def get_supplier(self, supplier_id: UUID) -> Optional[Supplier]:
|
||||
"""Get supplier by ID"""
|
||||
return await self.repository.get_by_id(supplier_id)
|
||||
|
||||
async def get_suppliers_batch(self, tenant_id: UUID, supplier_ids: List[UUID]) -> List[Supplier]:
|
||||
"""
|
||||
Get multiple suppliers by IDs in a single database query.
|
||||
|
||||
This method is optimized for batch fetching to eliminate N+1 query patterns.
|
||||
Used when enriching multiple purchase orders or other entities with supplier data.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID for security filtering
|
||||
supplier_ids: List of supplier UUIDs to fetch
|
||||
|
||||
Returns:
|
||||
List of Supplier objects (may be fewer than requested if some IDs don't exist)
|
||||
"""
|
||||
logger.info("Batch fetching suppliers", tenant_id=str(tenant_id), count=len(supplier_ids))
|
||||
return await self.repository.get_suppliers_by_ids(tenant_id, supplier_ids)
|
||||
|
||||
async def update_supplier(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
supplier_data: SupplierUpdate,
|
||||
updated_by: UUID
|
||||
) -> Optional[Supplier]:
|
||||
"""Update supplier information"""
|
||||
logger.info("Updating supplier", supplier_id=str(supplier_id))
|
||||
|
||||
supplier = self.repository.get_by_id(supplier_id)
|
||||
if not supplier:
|
||||
return None
|
||||
|
||||
# Check for duplicate name if changing
|
||||
if supplier_data.name and supplier_data.name != supplier.name:
|
||||
existing = self.repository.get_by_name(supplier.tenant_id, supplier_data.name)
|
||||
if existing:
|
||||
raise ValueError(f"Supplier with name '{supplier_data.name}' already exists")
|
||||
|
||||
# Check for duplicate supplier code if changing
|
||||
if (supplier_data.supplier_code and
|
||||
supplier_data.supplier_code != supplier.supplier_code):
|
||||
existing_code = self.repository.get_by_supplier_code(
|
||||
supplier.tenant_id, supplier_data.supplier_code
|
||||
)
|
||||
if existing_code:
|
||||
raise ValueError(
|
||||
f"Supplier with code '{supplier_data.supplier_code}' already exists"
|
||||
)
|
||||
|
||||
# Prepare update data
|
||||
update_data = supplier_data.model_dump(exclude_unset=True)
|
||||
update_data['updated_by'] = updated_by
|
||||
update_data['updated_at'] = datetime.utcnow()
|
||||
|
||||
supplier = self.repository.update(supplier_id, update_data)
|
||||
|
||||
logger.info("Supplier updated successfully", supplier_id=str(supplier_id))
|
||||
return supplier
|
||||
|
||||
async def delete_supplier(self, supplier_id: UUID) -> bool:
|
||||
"""Delete supplier (soft delete by changing status)"""
|
||||
logger.info("Deleting supplier", supplier_id=str(supplier_id))
|
||||
|
||||
supplier = self.repository.get_by_id(supplier_id)
|
||||
if not supplier:
|
||||
return False
|
||||
|
||||
# Check if supplier has active purchase orders via procurement service
|
||||
try:
|
||||
from shared.clients.procurement_client import ProcurementServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
procurement_client = ProcurementServiceClient(settings)
|
||||
|
||||
# Check for active purchase orders (pending, approved, in-progress)
|
||||
active_statuses = ['draft', 'pending_approval', 'approved', 'in_progress']
|
||||
active_pos_found = False
|
||||
|
||||
for status in active_statuses:
|
||||
pos = await procurement_client.get_purchase_orders_by_supplier(
|
||||
tenant_id=str(supplier.tenant_id),
|
||||
supplier_id=str(supplier_id),
|
||||
status=status,
|
||||
limit=1 # We only need to know if any exist
|
||||
)
|
||||
if pos and len(pos) > 0:
|
||||
active_pos_found = True
|
||||
break
|
||||
|
||||
if active_pos_found:
|
||||
logger.warning(
|
||||
"Cannot delete supplier with active purchase orders",
|
||||
supplier_id=str(supplier_id),
|
||||
supplier_name=supplier.name
|
||||
)
|
||||
raise ValueError(
|
||||
f"Cannot delete supplier '{supplier.name}' as it has active purchase orders. "
|
||||
"Please complete or cancel all purchase orders first."
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Procurement client not available, skipping active PO check")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking active purchase orders",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e)
|
||||
)
|
||||
# Don't fail deletion if we can't check POs, just log warning
|
||||
logger.warning("Proceeding with deletion despite PO check failure")
|
||||
|
||||
# Soft delete by changing status
|
||||
self.repository.update(supplier_id, {
|
||||
'status': SupplierStatus.inactive,
|
||||
'updated_at': datetime.utcnow()
|
||||
})
|
||||
|
||||
logger.info("Supplier deleted successfully", supplier_id=str(supplier_id))
|
||||
return True
|
||||
|
||||
async def hard_delete_supplier(self, supplier_id: UUID, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Hard delete supplier and all associated data (permanent deletion)
|
||||
Returns deletion summary for audit purposes
|
||||
"""
|
||||
logger.info("Hard deleting supplier", supplier_id=str(supplier_id), tenant_id=str(tenant_id))
|
||||
|
||||
# Delegate to repository layer - all DB access is done there
|
||||
deletion_summary = await self.repository.hard_delete_supplier(supplier_id)
|
||||
|
||||
if not deletion_summary:
|
||||
raise ValueError("Supplier not found")
|
||||
|
||||
logger.info(
|
||||
"Supplier hard deleted successfully",
|
||||
supplier_id=str(supplier_id),
|
||||
**deletion_summary
|
||||
)
|
||||
|
||||
return deletion_summary
|
||||
|
||||
async def search_suppliers(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
search_params: SupplierSearchParams
|
||||
) -> List[Supplier]:
|
||||
"""Search suppliers with filters"""
|
||||
return await self.repository.search_suppliers(
|
||||
tenant_id=tenant_id,
|
||||
search_term=search_params.search_term,
|
||||
supplier_type=search_params.supplier_type,
|
||||
status=search_params.status,
|
||||
limit=search_params.limit,
|
||||
offset=search_params.offset
|
||||
)
|
||||
|
||||
async def get_active_suppliers(self, tenant_id: UUID) -> List[Supplier]:
|
||||
"""Get all active suppliers"""
|
||||
return await self.repository.get_active_suppliers(tenant_id)
|
||||
|
||||
async def get_suppliers_by_type(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_type: SupplierType
|
||||
) -> List[Supplier]:
|
||||
"""Get suppliers by type"""
|
||||
return self.repository.get_suppliers_by_type(tenant_id, supplier_type)
|
||||
|
||||
async def get_top_suppliers(self, tenant_id: UUID, limit: int = 10) -> List[Supplier]:
|
||||
"""Get top performing suppliers"""
|
||||
return self.repository.get_top_suppliers(tenant_id, limit)
|
||||
|
||||
async def approve_supplier(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
approved_by: UUID,
|
||||
notes: Optional[str] = None
|
||||
) -> Optional[Supplier]:
|
||||
"""Approve a pending supplier"""
|
||||
logger.info("Approving supplier", supplier_id=str(supplier_id))
|
||||
|
||||
supplier = await self.repository.approve_supplier(supplier_id, approved_by)
|
||||
if not supplier:
|
||||
logger.warning("Failed to approve supplier - not found or not pending")
|
||||
return None
|
||||
|
||||
if notes:
|
||||
await self.repository.update(supplier_id, {
|
||||
'notes': (supplier.notes or "") + f"\nApproval notes: {notes}",
|
||||
'updated_at': datetime.utcnow()
|
||||
})
|
||||
|
||||
logger.info("Supplier approved successfully", supplier_id=str(supplier_id))
|
||||
return supplier
|
||||
|
||||
async def reject_supplier(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
rejection_reason: str,
|
||||
rejected_by: UUID
|
||||
) -> Optional[Supplier]:
|
||||
"""Reject a pending supplier"""
|
||||
logger.info("Rejecting supplier", supplier_id=str(supplier_id))
|
||||
|
||||
supplier = await self.repository.reject_supplier(
|
||||
supplier_id, rejection_reason, rejected_by
|
||||
)
|
||||
if not supplier:
|
||||
logger.warning("Failed to reject supplier - not found or not pending")
|
||||
return None
|
||||
|
||||
logger.info("Supplier rejected successfully", supplier_id=str(supplier_id))
|
||||
return supplier
|
||||
|
||||
async def update_supplier_performance(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
quality_rating: Optional[float] = None,
|
||||
delivery_rating: Optional[float] = None,
|
||||
order_increment: int = 0,
|
||||
amount_increment: float = 0.0
|
||||
) -> Optional[Supplier]:
|
||||
"""Update supplier performance metrics"""
|
||||
logger.info("Updating supplier performance", supplier_id=str(supplier_id))
|
||||
|
||||
return self.repository.update_supplier_stats(
|
||||
supplier_id=supplier_id,
|
||||
total_orders_increment=order_increment,
|
||||
total_amount_increment=amount_increment,
|
||||
new_quality_rating=quality_rating,
|
||||
new_delivery_rating=delivery_rating
|
||||
)
|
||||
|
||||
async def get_supplier_statistics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get supplier statistics for dashboard"""
|
||||
return await self.repository.get_supplier_statistics(tenant_id)
|
||||
|
||||
async def get_suppliers_needing_review(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_since_last_order: int = 30
|
||||
) -> List[Supplier]:
|
||||
"""Get suppliers that may need performance review"""
|
||||
return self.repository.get_suppliers_needing_review(
|
||||
tenant_id, days_since_last_order
|
||||
)
|
||||
|
||||
def _generate_supplier_code(self, supplier_name: str) -> str:
|
||||
"""Generate supplier code from name"""
|
||||
# Take first 3 characters of each word, uppercase
|
||||
words = supplier_name.strip().split()[:3] # Max 3 words
|
||||
code_parts = []
|
||||
|
||||
for word in words:
|
||||
if len(word) >= 3:
|
||||
code_parts.append(word[:3].upper())
|
||||
else:
|
||||
code_parts.append(word.upper())
|
||||
|
||||
base_code = "".join(code_parts)[:8] # Max 8 characters
|
||||
|
||||
# Add random suffix to ensure uniqueness
|
||||
import random
|
||||
import string
|
||||
suffix = ''.join(random.choices(string.digits, k=2))
|
||||
|
||||
return f"{base_code}{suffix}"
|
||||
|
||||
async def validate_supplier_data(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
supplier_data: Dict[str, Any],
|
||||
supplier_id: Optional[UUID] = None
|
||||
) -> Dict[str, str]:
|
||||
"""Validate supplier data and return errors"""
|
||||
errors = {}
|
||||
|
||||
# Check required fields
|
||||
if not supplier_data.get('name'):
|
||||
errors['name'] = "Supplier name is required"
|
||||
|
||||
# Check email format if provided
|
||||
email = supplier_data.get('email')
|
||||
if email:
|
||||
import re
|
||||
email_pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
||||
if not re.match(email_pattern, email):
|
||||
errors['email'] = "Invalid email format"
|
||||
|
||||
# Check phone format if provided
|
||||
phone = supplier_data.get('phone')
|
||||
if phone:
|
||||
# Basic phone validation (digits, spaces, dashes, parentheses)
|
||||
import re
|
||||
phone_pattern = r'^[\d\s\-\(\)\+]+$'
|
||||
if not re.match(phone_pattern, phone):
|
||||
errors['phone'] = "Invalid phone format"
|
||||
|
||||
# Check lead time range
|
||||
lead_time = supplier_data.get('standard_lead_time')
|
||||
if lead_time is not None:
|
||||
if lead_time < 0 or lead_time > 365:
|
||||
errors['standard_lead_time'] = "Lead time must be between 0 and 365 days"
|
||||
|
||||
# Check credit limit
|
||||
credit_limit = supplier_data.get('credit_limit')
|
||||
if credit_limit is not None and credit_limit < 0:
|
||||
errors['credit_limit'] = "Credit limit cannot be negative"
|
||||
|
||||
# Check minimum order amount
|
||||
min_order = supplier_data.get('minimum_order_amount')
|
||||
if min_order is not None and min_order < 0:
|
||||
errors['minimum_order_amount'] = "Minimum order amount cannot be negative"
|
||||
|
||||
return errors
|
||||
|
||||
async def get_supplier_price_lists(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
is_active: bool = True
|
||||
) -> List[Any]:
|
||||
"""Get all price list items for a supplier"""
|
||||
logger.info(
|
||||
"Getting supplier price lists",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=str(tenant_id),
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
return await self.repository.get_supplier_price_lists(
|
||||
supplier_id=supplier_id,
|
||||
tenant_id=tenant_id,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
async def get_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[Any]:
|
||||
"""Get specific price list item"""
|
||||
logger.info(
|
||||
"Getting supplier price list item",
|
||||
price_list_id=str(price_list_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
return await self.repository.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
async def create_supplier_price_list(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
price_list_data: SupplierPriceListCreate,
|
||||
tenant_id: UUID,
|
||||
created_by: UUID
|
||||
) -> Any:
|
||||
"""Create a new price list item for a supplier"""
|
||||
logger.info(
|
||||
"Creating supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Prepare creation data
|
||||
create_data = price_list_data.model_dump(exclude_unset=True)
|
||||
create_data.update({
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_id': supplier_id,
|
||||
'created_by': created_by,
|
||||
'updated_by': created_by,
|
||||
})
|
||||
|
||||
# Calculate price_per_unit if not provided
|
||||
if 'price_per_unit' not in create_data or create_data['price_per_unit'] is None:
|
||||
create_data['price_per_unit'] = create_data['unit_price']
|
||||
|
||||
price_list = await self.repository.create_supplier_price_list(create_data)
|
||||
|
||||
logger.info(
|
||||
"Supplier price list item created successfully",
|
||||
price_list_id=str(price_list.id),
|
||||
supplier_id=str(supplier_id)
|
||||
)
|
||||
|
||||
return price_list
|
||||
|
||||
async def update_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
price_list_data: SupplierPriceListUpdate,
|
||||
tenant_id: UUID,
|
||||
updated_by: UUID
|
||||
) -> Any:
|
||||
"""Update a price list item"""
|
||||
logger.info(
|
||||
"Updating supplier price list item",
|
||||
price_list_id=str(price_list_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Prepare update data
|
||||
update_data = price_list_data.model_dump(exclude_unset=True)
|
||||
update_data['updated_by'] = updated_by
|
||||
update_data['updated_at'] = datetime.now()
|
||||
|
||||
price_list = await self.repository.update_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
update_data=update_data
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Supplier price list item updated successfully",
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return price_list
|
||||
|
||||
async def delete_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> bool:
|
||||
"""Delete a price list item"""
|
||||
logger.info(
|
||||
"Deleting supplier price list item",
|
||||
price_list_id=str(price_list_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
success = await self.repository.delete_supplier_price_list(
|
||||
price_list_id=price_list_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Supplier price list item deletion completed",
|
||||
price_list_id=str(price_list_id),
|
||||
success=success
|
||||
)
|
||||
|
||||
return success
|
||||
191
services/suppliers/app/services/tenant_deletion_service.py
Normal file
191
services/suppliers/app/services/tenant_deletion_service.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""
|
||||
Suppliers Service - Tenant Data Deletion
|
||||
Handles deletion of all supplier-related data for a tenant
|
||||
"""
|
||||
from typing import Dict
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
|
||||
from shared.services.tenant_deletion import BaseTenantDataDeletionService, TenantDataDeletionResult
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SuppliersTenantDeletionService(BaseTenantDataDeletionService):
|
||||
"""Service for deleting all supplier-related data for a tenant"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
super().__init__("suppliers-service")
|
||||
self.db = db_session
|
||||
|
||||
async def get_tenant_data_preview(self, tenant_id: str) -> Dict[str, int]:
|
||||
"""Get counts of what would be deleted"""
|
||||
|
||||
try:
|
||||
preview = {}
|
||||
|
||||
# Import models here to avoid circular imports
|
||||
from app.models.suppliers import (
|
||||
Supplier,
|
||||
SupplierProduct,
|
||||
PurchaseOrder,
|
||||
PurchaseOrderItem,
|
||||
SupplierPerformance
|
||||
)
|
||||
|
||||
# Count suppliers
|
||||
supplier_count = await self.db.scalar(
|
||||
select(func.count(Supplier.id)).where(Supplier.tenant_id == tenant_id)
|
||||
)
|
||||
preview["suppliers"] = supplier_count or 0
|
||||
|
||||
# Count supplier products
|
||||
product_count = await self.db.scalar(
|
||||
select(func.count(SupplierProduct.id)).where(SupplierProduct.tenant_id == tenant_id)
|
||||
)
|
||||
preview["supplier_products"] = product_count or 0
|
||||
|
||||
# Count purchase orders
|
||||
po_count = await self.db.scalar(
|
||||
select(func.count(PurchaseOrder.id)).where(PurchaseOrder.tenant_id == tenant_id)
|
||||
)
|
||||
preview["purchase_orders"] = po_count or 0
|
||||
|
||||
# Count purchase order items (CASCADE will delete these)
|
||||
poi_count = await self.db.scalar(
|
||||
select(func.count(PurchaseOrderItem.id))
|
||||
.join(PurchaseOrder)
|
||||
.where(PurchaseOrder.tenant_id == tenant_id)
|
||||
)
|
||||
preview["purchase_order_items"] = poi_count or 0
|
||||
|
||||
# Count supplier performance records
|
||||
try:
|
||||
perf_count = await self.db.scalar(
|
||||
select(func.count(SupplierPerformance.id)).where(SupplierPerformance.tenant_id == tenant_id)
|
||||
)
|
||||
preview["supplier_performance"] = perf_count or 0
|
||||
except Exception:
|
||||
# Table might not exist in all versions
|
||||
preview["supplier_performance"] = 0
|
||||
|
||||
return preview
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting deletion preview",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {}
|
||||
|
||||
async def delete_tenant_data(self, tenant_id: str) -> TenantDataDeletionResult:
|
||||
"""Delete all data for a tenant"""
|
||||
|
||||
result = TenantDataDeletionResult(tenant_id, self.service_name)
|
||||
|
||||
try:
|
||||
# Import models here to avoid circular imports
|
||||
from app.models.suppliers import (
|
||||
Supplier,
|
||||
SupplierProduct,
|
||||
PurchaseOrder,
|
||||
PurchaseOrderItem,
|
||||
SupplierPerformance
|
||||
)
|
||||
|
||||
# Get preview for CASCADE items
|
||||
preview = await self.get_tenant_data_preview(tenant_id)
|
||||
|
||||
# Delete purchase order items first (foreign key to purchase orders)
|
||||
try:
|
||||
poi_delete = await self.db.execute(
|
||||
delete(PurchaseOrderItem)
|
||||
.where(PurchaseOrderItem.purchase_order_id.in_(
|
||||
select(PurchaseOrder.id).where(PurchaseOrder.tenant_id == tenant_id)
|
||||
))
|
||||
)
|
||||
result.add_deleted_items("purchase_order_items", poi_delete.rowcount)
|
||||
except Exception as e:
|
||||
logger.error("Error deleting purchase order items",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Purchase order item deletion: {str(e)}")
|
||||
|
||||
# Delete purchase orders
|
||||
try:
|
||||
po_delete = await self.db.execute(
|
||||
delete(PurchaseOrder).where(PurchaseOrder.tenant_id == tenant_id)
|
||||
)
|
||||
result.add_deleted_items("purchase_orders", po_delete.rowcount)
|
||||
|
||||
logger.info("Deleted purchase orders for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=po_delete.rowcount)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting purchase orders",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Purchase order deletion: {str(e)}")
|
||||
|
||||
# Delete supplier performance records
|
||||
try:
|
||||
perf_delete = await self.db.execute(
|
||||
delete(SupplierPerformance).where(SupplierPerformance.tenant_id == tenant_id)
|
||||
)
|
||||
result.add_deleted_items("supplier_performance", perf_delete.rowcount)
|
||||
except Exception as e:
|
||||
logger.warning("Error deleting supplier performance (table might not exist)",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Supplier performance deletion: {str(e)}")
|
||||
|
||||
# Delete supplier products
|
||||
try:
|
||||
product_delete = await self.db.execute(
|
||||
delete(SupplierProduct).where(SupplierProduct.tenant_id == tenant_id)
|
||||
)
|
||||
result.add_deleted_items("supplier_products", product_delete.rowcount)
|
||||
|
||||
logger.info("Deleted supplier products for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=product_delete.rowcount)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting supplier products",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Supplier product deletion: {str(e)}")
|
||||
|
||||
# Delete suppliers (parent table)
|
||||
try:
|
||||
supplier_delete = await self.db.execute(
|
||||
delete(Supplier).where(Supplier.tenant_id == tenant_id)
|
||||
)
|
||||
result.add_deleted_items("suppliers", supplier_delete.rowcount)
|
||||
|
||||
logger.info("Deleted suppliers for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=supplier_delete.rowcount)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting suppliers",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Supplier deletion: {str(e)}")
|
||||
|
||||
# Commit all deletions
|
||||
await self.db.commit()
|
||||
|
||||
logger.info("Tenant data deletion completed",
|
||||
tenant_id=tenant_id,
|
||||
deleted_counts=result.deleted_counts)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Fatal error during tenant data deletion",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
await self.db.rollback()
|
||||
result.add_error(f"Fatal error: {str(e)}")
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user