Improve the frontend

This commit is contained in:
Urtzi Alfaro
2025-10-21 19:50:07 +02:00
parent 05da20357d
commit 8d30172483
105 changed files with 14699 additions and 4630 deletions

View File

@@ -0,0 +1,7 @@
"""
Alert Processor API Endpoints
"""
from .analytics import router as analytics_router
__all__ = ['analytics_router']

View File

@@ -0,0 +1,238 @@
"""
Alert Analytics API Endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, Path, Body, Query
from typing import List, Dict, Any, Optional
from uuid import UUID
from pydantic import BaseModel, Field
import structlog
from shared.auth.decorators import get_current_user_dep
logger = structlog.get_logger()
router = APIRouter()
# Schemas
class InteractionCreate(BaseModel):
"""Schema for creating an alert interaction"""
alert_id: str = Field(..., description="Alert ID")
interaction_type: str = Field(..., description="Type of interaction: acknowledged, resolved, snoozed, dismissed")
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
class InteractionBatchCreate(BaseModel):
"""Schema for creating multiple interactions"""
interactions: List[Dict[str, Any]] = Field(..., description="List of interactions to create")
class AnalyticsResponse(BaseModel):
"""Schema for analytics response"""
trends: List[Dict[str, Any]]
averageResponseTime: int
topCategories: List[Dict[str, Any]]
totalAlerts: int
resolvedAlerts: int
activeAlerts: int
resolutionRate: int
predictedDailyAverage: int
busiestDay: str
def get_analytics_repository(current_user: dict = Depends(get_current_user_dep)):
"""Dependency to get analytics repository"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async def _get_repo():
async with db_manager.get_session() as session:
yield AlertAnalyticsRepository(session)
return _get_repo
@router.post(
"/api/v1/tenants/{tenant_id}/alerts/{alert_id}/interactions",
response_model=Dict[str, Any],
summary="Track alert interaction"
)
async def create_interaction(
tenant_id: UUID = Path(..., description="Tenant ID"),
alert_id: UUID = Path(..., description="Alert ID"),
interaction: InteractionCreate = Body(...),
current_user: dict = Depends(get_current_user_dep)
):
"""
Track a user interaction with an alert
- **acknowledged**: User has seen and acknowledged the alert
- **resolved**: User has resolved the alert
- **snoozed**: User has snoozed the alert
- **dismissed**: User has dismissed the alert
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
alert_interaction = await repo.create_interaction(
tenant_id=tenant_id,
alert_id=alert_id,
user_id=UUID(current_user['user_id']),
interaction_type=interaction.interaction_type,
metadata=interaction.metadata
)
return {
'id': str(alert_interaction.id),
'alert_id': str(alert_interaction.alert_id),
'interaction_type': alert_interaction.interaction_type,
'interacted_at': alert_interaction.interacted_at.isoformat(),
'response_time_seconds': alert_interaction.response_time_seconds
}
except ValueError as e:
logger.error("Invalid alert interaction", error=str(e), alert_id=str(alert_id))
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error("Failed to create alert interaction", error=str(e), alert_id=str(alert_id))
raise HTTPException(status_code=500, detail=f"Failed to create interaction: {str(e)}")
@router.post(
"/api/v1/tenants/{tenant_id}/alerts/interactions/batch",
response_model=Dict[str, Any],
summary="Track multiple alert interactions"
)
async def create_interactions_batch(
tenant_id: UUID = Path(..., description="Tenant ID"),
batch: InteractionBatchCreate = Body(...),
current_user: dict = Depends(get_current_user_dep)
):
"""
Track multiple alert interactions in a single request
Useful for offline sync or bulk operations
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
# Add user_id to each interaction
for interaction in batch.interactions:
interaction['user_id'] = current_user['user_id']
created_interactions = await repo.create_interactions_batch(
tenant_id=tenant_id,
interactions=batch.interactions
)
return {
'created_count': len(created_interactions),
'interactions': [
{
'id': str(i.id),
'alert_id': str(i.alert_id),
'interaction_type': i.interaction_type,
'interacted_at': i.interacted_at.isoformat()
}
for i in created_interactions
]
}
except Exception as e:
logger.error("Failed to create batch interactions", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to create batch interactions: {str(e)}")
@router.get(
"/api/v1/tenants/{tenant_id}/alerts/analytics",
response_model=AnalyticsResponse,
summary="Get alert analytics"
)
async def get_analytics(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(7, ge=1, le=90, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep)
):
"""
Get comprehensive analytics for alerts
Returns:
- 7-day trend chart with severity breakdown
- Average response time (time to acknowledgment)
- Top 3 alert categories
- Total alerts, resolved, active counts
- Resolution rate percentage
- Predicted daily average
- Busiest day of week
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
analytics = await repo.get_full_analytics(
tenant_id=tenant_id,
days=days
)
return analytics
except Exception as e:
logger.error("Failed to get alert analytics", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to get analytics: {str(e)}")
@router.get(
"/api/v1/tenants/{tenant_id}/alerts/analytics/trends",
response_model=List[Dict[str, Any]],
summary="Get alert trends"
)
async def get_trends(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(7, ge=1, le=90, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep)
):
"""Get alert trends over time with severity breakdown"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
trends = await repo.get_analytics_trends(
tenant_id=tenant_id,
days=days
)
return trends
except Exception as e:
logger.error("Failed to get alert trends", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to get trends: {str(e)}")

View File

@@ -0,0 +1,84 @@
"""
Alert Processor API Server
Provides REST API endpoints for alert analytics
"""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import structlog
from app.config import AlertProcessorConfig
from app.api import analytics_router
from shared.database.base import create_database_manager
logger = structlog.get_logger()
# Create FastAPI app
app = FastAPI(
title="Alert Processor API",
description="API for alert analytics and interaction tracking",
version="1.0.0"
)
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(analytics_router, tags=["analytics"])
# Initialize database
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor-api")
@app.on_event("startup")
async def startup():
"""Initialize on startup"""
logger.info("Alert Processor API starting up")
# Create tables
try:
from app.models.alerts import Base
await db_manager.create_tables(Base.metadata)
logger.info("Database tables ensured")
except Exception as e:
logger.error("Failed to create tables", error=str(e))
@app.on_event("shutdown")
async def shutdown():
"""Cleanup on shutdown"""
logger.info("Alert Processor API shutting down")
await db_manager.close_connections()
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {"status": "healthy", "service": "alert-processor-api"}
@app.get("/")
async def root():
"""Root endpoint"""
return {
"service": "Alert Processor API",
"version": "1.0.0",
"endpoints": {
"health": "/health",
"docs": "/docs",
"analytics": "/api/v1/tenants/{tenant_id}/alerts/analytics",
"interactions": "/api/v1/tenants/{tenant_id}/alerts/{alert_id}/interactions"
}
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8010)

View File

@@ -55,14 +55,14 @@ class NotificationServiceClient(BaseServiceClient):
"""Send notification via notification service"""
try:
response = await self.post(
"/api/v1/notifications/send",
json={
"notifications/send",
data={
"tenant_id": tenant_id,
"notification": notification,
"channels": channels
}
)
return response
return response if response else {"status": "failed", "error": "No response from notification service"}
except Exception as e:
logger.error("Failed to send notification", error=str(e), tenant_id=tenant_id)
return {"status": "failed", "error": str(e)}
@@ -187,8 +187,8 @@ class AlertProcessorService:
},
channels=channels
)
if notification_result.get('status') == 'success':
if notification_result and notification_result.get('status') == 'success':
self.notifications_sent += 1
# Stream to SSE for real-time dashboard (always)
@@ -255,16 +255,27 @@ class AlertProcessorService:
return alert_dict
async def _cache_active_alerts(self, tenant_id: str):
"""Cache all active alerts for a tenant in Redis for quick SSE access"""
"""
Cache today's active alerts for a tenant in Redis for quick SSE access
Only caches alerts from today (00:00 UTC onwards) to avoid flooding
the dashboard with historical alerts on initial connection.
Analytics endpoints should query the database directly for historical data.
"""
try:
from app.models.alerts import Alert, AlertStatus
from sqlalchemy import select
async with self.db_manager.get_session() as session:
# Query all active alerts for this tenant
# Calculate start of today (UTC) to filter only today's alerts
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
# Query only today's active alerts for this tenant
# This prevents showing yesterday's alerts on dashboard initial load
query = select(Alert).where(
Alert.tenant_id == tenant_id,
Alert.status == AlertStatus.ACTIVE
Alert.status == AlertStatus.ACTIVE,
Alert.created_at >= today_start # Only today's alerts
).order_by(Alert.created_at.desc()).limit(50)
result = await session.execute(query)
@@ -294,9 +305,10 @@ class AlertProcessorService:
json.dumps(active_items)
)
logger.debug("Cached active alerts in Redis",
logger.debug("Cached today's active alerts in Redis",
tenant_id=tenant_id,
count=len(active_items))
count=len(active_items),
filter_date=today_start.isoformat())
except Exception as e:
logger.error("Failed to cache active alerts",

View File

@@ -3,15 +3,20 @@
Alert models for the alert processor service
"""
from sqlalchemy import Column, String, Text, DateTime, JSON, Enum
from sqlalchemy.dialects.postgresql import UUID
from datetime import datetime
from sqlalchemy import Column, String, Text, DateTime, JSON, Enum, Integer, ForeignKey
from sqlalchemy.dialects.postgresql import UUID, JSONB
from datetime import datetime, timezone
import uuid
import enum
from shared.database.base import Base
def utc_now():
"""Return current UTC time as timezone-aware datetime"""
return datetime.now(timezone.utc)
class AlertStatus(enum.Enum):
"""Alert status values"""
ACTIVE = "active"
@@ -28,6 +33,14 @@ class AlertSeverity(enum.Enum):
URGENT = "urgent"
class InteractionType(enum.Enum):
"""Alert interaction types"""
ACKNOWLEDGED = "acknowledged"
RESOLVED = "resolved"
SNOOZED = "snoozed"
DISMISSED = "dismissed"
class Alert(Base):
"""Alert records for the alert processor service"""
__tablename__ = "alerts"
@@ -51,6 +64,27 @@ class Alert(Base):
alert_metadata = Column(JSON, nullable=True) # Additional alert-specific data
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, index=True)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
resolved_at = Column(DateTime, nullable=True)
created_at = Column(DateTime(timezone=True), default=utc_now, index=True)
updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
resolved_at = Column(DateTime(timezone=True), nullable=True)
class AlertInteraction(Base):
"""Alert interaction tracking for analytics"""
__tablename__ = "alert_interactions"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
alert_id = Column(UUID(as_uuid=True), ForeignKey('alerts.id', ondelete='CASCADE'), nullable=False)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# Interaction details
interaction_type = Column(String(50), nullable=False, index=True)
interacted_at = Column(DateTime(timezone=True), nullable=False, default=utc_now, index=True)
response_time_seconds = Column(Integer, nullable=True)
# Context
interaction_metadata = Column(JSONB, nullable=True)
# Timestamps
created_at = Column(DateTime(timezone=True), nullable=False, default=utc_now)

View File

@@ -0,0 +1,7 @@
"""
Alert Processor Repositories
"""
from .analytics_repository import AlertAnalyticsRepository
__all__ = ['AlertAnalyticsRepository']

View File

@@ -0,0 +1,382 @@
"""
Alert Analytics Repository
Handles all database operations for alert analytics
"""
from typing import List, Dict, Any, Optional
from datetime import datetime, timedelta
from uuid import UUID
from sqlalchemy import select, func, and_, extract, case
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from app.models.alerts import Alert, AlertInteraction, AlertSeverity, AlertStatus
logger = structlog.get_logger()
class AlertAnalyticsRepository:
"""Repository for alert analytics operations"""
def __init__(self, session: AsyncSession):
self.session = session
async def create_interaction(
self,
tenant_id: UUID,
alert_id: UUID,
user_id: UUID,
interaction_type: str,
metadata: Optional[Dict[str, Any]] = None
) -> AlertInteraction:
"""Create a new alert interaction"""
# Get alert to calculate response time
alert_query = select(Alert).where(Alert.id == alert_id)
result = await self.session.execute(alert_query)
alert = result.scalar_one_or_none()
if not alert:
raise ValueError(f"Alert {alert_id} not found")
# Calculate response time
now = datetime.utcnow()
response_time_seconds = int((now - alert.created_at).total_seconds())
# Create interaction
interaction = AlertInteraction(
tenant_id=tenant_id,
alert_id=alert_id,
user_id=user_id,
interaction_type=interaction_type,
interacted_at=now,
response_time_seconds=response_time_seconds,
interaction_metadata=metadata or {}
)
self.session.add(interaction)
# Update alert status if applicable
if interaction_type == 'acknowledged' and alert.status == AlertStatus.ACTIVE:
alert.status = AlertStatus.ACKNOWLEDGED
elif interaction_type == 'resolved':
alert.status = AlertStatus.RESOLVED
alert.resolved_at = now
elif interaction_type == 'dismissed':
alert.status = AlertStatus.IGNORED
await self.session.commit()
await self.session.refresh(interaction)
logger.info(
"Alert interaction created",
alert_id=str(alert_id),
interaction_type=interaction_type,
response_time=response_time_seconds
)
return interaction
async def create_interactions_batch(
self,
tenant_id: UUID,
interactions: List[Dict[str, Any]]
) -> List[AlertInteraction]:
"""Create multiple interactions in batch"""
created_interactions = []
for interaction_data in interactions:
try:
interaction = await self.create_interaction(
tenant_id=tenant_id,
alert_id=UUID(interaction_data['alert_id']),
user_id=UUID(interaction_data['user_id']),
interaction_type=interaction_data['interaction_type'],
metadata=interaction_data.get('metadata')
)
created_interactions.append(interaction)
except Exception as e:
logger.error(
"Failed to create interaction in batch",
error=str(e),
alert_id=interaction_data.get('alert_id')
)
continue
return created_interactions
async def get_analytics_trends(
self,
tenant_id: UUID,
days: int = 7
) -> List[Dict[str, Any]]:
"""Get alert trends for the last N days"""
start_date = datetime.utcnow() - timedelta(days=days)
# Query alerts grouped by date and severity
query = (
select(
func.date(Alert.created_at).label('date'),
func.count(Alert.id).label('total_count'),
func.sum(
case((Alert.severity == AlertSeverity.URGENT, 1), else_=0)
).label('urgent_count'),
func.sum(
case((Alert.severity == AlertSeverity.HIGH, 1), else_=0)
).label('high_count'),
func.sum(
case((Alert.severity == AlertSeverity.MEDIUM, 1), else_=0)
).label('medium_count'),
func.sum(
case((Alert.severity == AlertSeverity.LOW, 1), else_=0)
).label('low_count')
)
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
.group_by(func.date(Alert.created_at))
.order_by(func.date(Alert.created_at))
)
result = await self.session.execute(query)
rows = result.all()
# Fill in missing dates with zeros
trends = []
current_date = start_date.date()
end_date = datetime.utcnow().date()
# Create a dict for quick lookup
data_by_date = {row.date: row for row in rows}
while current_date <= end_date:
date_str = current_date.isoformat()
row = data_by_date.get(current_date)
trends.append({
'date': date_str,
'count': int(row.total_count) if row else 0,
'urgentCount': int(row.urgent_count) if row else 0,
'highCount': int(row.high_count) if row else 0,
'mediumCount': int(row.medium_count) if row else 0,
'lowCount': int(row.low_count) if row else 0,
})
current_date += timedelta(days=1)
return trends
async def get_average_response_time(
self,
tenant_id: UUID,
days: int = 7
) -> int:
"""Get average response time in minutes for acknowledged alerts"""
start_date = datetime.utcnow() - timedelta(days=days)
query = (
select(func.avg(AlertInteraction.response_time_seconds))
.where(
and_(
AlertInteraction.tenant_id == tenant_id,
AlertInteraction.interaction_type == 'acknowledged',
AlertInteraction.interacted_at >= start_date,
AlertInteraction.response_time_seconds < 86400 # Less than 24 hours
)
)
)
result = await self.session.execute(query)
avg_seconds = result.scalar_one_or_none()
if avg_seconds is None:
return 0
# Convert to minutes
return round(avg_seconds / 60)
async def get_top_categories(
self,
tenant_id: UUID,
days: int = 7,
limit: int = 3
) -> List[Dict[str, Any]]:
"""Get top alert categories"""
start_date = datetime.utcnow() - timedelta(days=days)
query = (
select(
Alert.alert_type,
func.count(Alert.id).label('count')
)
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
.group_by(Alert.alert_type)
.order_by(func.count(Alert.id).desc())
.limit(limit)
)
result = await self.session.execute(query)
rows = result.all()
# Calculate total for percentages
total_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
)
total_result = await self.session.execute(total_query)
total = total_result.scalar_one() or 1
categories = []
for row in rows:
percentage = round((row.count / total) * 100) if total > 0 else 0
categories.append({
'category': row.alert_type,
'count': row.count,
'percentage': percentage
})
return categories
async def get_resolution_stats(
self,
tenant_id: UUID,
days: int = 7
) -> Dict[str, Any]:
"""Get resolution statistics"""
start_date = datetime.utcnow() - timedelta(days=days)
# Total alerts
total_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
)
total_result = await self.session.execute(total_query)
total_alerts = total_result.scalar_one() or 0
# Resolved alerts
resolved_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date,
Alert.status == AlertStatus.RESOLVED
)
)
)
resolved_result = await self.session.execute(resolved_query)
resolved_alerts = resolved_result.scalar_one() or 0
# Active alerts
active_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date,
Alert.status == AlertStatus.ACTIVE
)
)
)
active_result = await self.session.execute(active_query)
active_alerts = active_result.scalar_one() or 0
resolution_rate = round((resolved_alerts / total_alerts) * 100) if total_alerts > 0 else 0
return {
'totalAlerts': total_alerts,
'resolvedAlerts': resolved_alerts,
'activeAlerts': active_alerts,
'resolutionRate': resolution_rate
}
async def get_busiest_day(
self,
tenant_id: UUID,
days: int = 7
) -> str:
"""Get busiest day of week"""
start_date = datetime.utcnow() - timedelta(days=days)
query = (
select(
extract('dow', Alert.created_at).label('day_of_week'),
func.count(Alert.id).label('count')
)
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
.group_by(extract('dow', Alert.created_at))
.order_by(func.count(Alert.id).desc())
.limit(1)
)
result = await self.session.execute(query)
row = result.first()
if not row:
return 'N/A'
day_names = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
return day_names[int(row.day_of_week)]
async def get_predicted_daily_average(
self,
tenant_id: UUID,
days: int = 7
) -> int:
"""Calculate predicted daily average based on trends"""
trends = await self.get_analytics_trends(tenant_id, days)
if not trends:
return 0
total_count = sum(trend['count'] for trend in trends)
return round(total_count / len(trends))
async def get_full_analytics(
self,
tenant_id: UUID,
days: int = 7
) -> Dict[str, Any]:
"""Get complete analytics data"""
trends = await self.get_analytics_trends(tenant_id, days)
avg_response_time = await self.get_average_response_time(tenant_id, days)
top_categories = await self.get_top_categories(tenant_id, days)
resolution_stats = await self.get_resolution_stats(tenant_id, days)
busiest_day = await self.get_busiest_day(tenant_id, days)
predicted_avg = await self.get_predicted_daily_average(tenant_id, days)
return {
'trends': trends,
'averageResponseTime': avg_response_time,
'topCategories': top_categories,
'totalAlerts': resolution_stats['totalAlerts'],
'resolvedAlerts': resolution_stats['resolvedAlerts'],
'activeAlerts': resolution_stats['activeAlerts'],
'resolutionRate': resolution_stats['resolutionRate'],
'predictedDailyAverage': predicted_avg,
'busiestDay': busiest_day
}

View File

@@ -0,0 +1,51 @@
"""add_alert_interactions
Revision ID: a1b2c3d4e5f6
Revises: 5ad7a76c1b10
Create Date: 2025-10-19 14:30:00.000000+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'a1b2c3d4e5f6'
down_revision: Union[str, None] = '5ad7a76c1b10'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create alert_interactions table
op.create_table('alert_interactions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('alert_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('interaction_type', sa.String(length=50), nullable=False),
sa.Column('interacted_at', sa.DateTime(), nullable=False),
sa.Column('response_time_seconds', sa.Integer(), nullable=True),
sa.Column('interaction_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for efficient queries
op.create_index('idx_alert_interactions_tenant_alert', 'alert_interactions', ['tenant_id', 'alert_id'], unique=False)
op.create_index('idx_alert_interactions_user', 'alert_interactions', ['user_id'], unique=False)
op.create_index('idx_alert_interactions_time', 'alert_interactions', ['interacted_at'], unique=False)
op.create_index('idx_alert_interactions_type', 'alert_interactions', ['interaction_type'], unique=False)
op.create_index('idx_alert_interactions_tenant_time', 'alert_interactions', ['tenant_id', 'interacted_at'], unique=False)
def downgrade() -> None:
op.drop_index('idx_alert_interactions_tenant_time', table_name='alert_interactions')
op.drop_index('idx_alert_interactions_type', table_name='alert_interactions')
op.drop_index('idx_alert_interactions_time', table_name='alert_interactions')
op.drop_index('idx_alert_interactions_user', table_name='alert_interactions')
op.drop_index('idx_alert_interactions_tenant_alert', table_name='alert_interactions')
op.drop_table('alert_interactions')

View File

@@ -45,7 +45,7 @@ class CloneOrchestrator:
name="inventory",
url=os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000"),
required=False, # Optional - provides ingredients/recipes
timeout=5.0 # Reduced from 10s - optimized data volume
timeout=30.0 # Increased for inventory data cloning
),
ServiceDefinition(
name="recipes",
@@ -63,7 +63,7 @@ class CloneOrchestrator:
name="sales",
url=os.getenv("SALES_SERVICE_URL", "http://sales-service:8000"),
required=False, # Optional - provides sales history
timeout=5.0 # Reduced from 10s - optimized to 30 days history
timeout=30.0 # Increased for sales data cloning
),
ServiceDefinition(
name="orders",
@@ -83,6 +83,12 @@ class CloneOrchestrator:
required=False, # Optional - provides historical forecasts
timeout=15.0
),
ServiceDefinition(
name="pos",
url=os.getenv("POS_SERVICE_URL", "http://pos-service:8000"),
required=False, # Optional - provides POS configurations
timeout=30.0 # Increased for POS configurations cloning
),
]
async def clone_all_services(

View File

@@ -246,7 +246,7 @@ class DemoDataCloner:
"orders": settings.ORDERS_SERVICE_URL,
"production": settings.PRODUCTION_SERVICE_URL,
"suppliers": settings.SUPPLIERS_SERVICE_URL,
"pos": settings.SALES_SERVICE_URL,
"pos": settings.POS_SERVICE_URL,
}
return url_map.get(service_name, "")

View File

@@ -107,8 +107,30 @@ async def clone_demo_data(
)
for ingredient in base_ingredients:
# Create new ingredient with same attributes but new ID and tenant
new_ingredient_id = uuid.uuid4()
# Transform ingredient ID using XOR to ensure consistency across services
# This formula matches the suppliers service ID transformation
# Formula: virtual_ingredient_id = virtual_tenant_id XOR base_ingredient_id
base_ingredient_int = int(ingredient.id.hex, 16)
virtual_tenant_int = int(virtual_uuid.hex, 16)
base_tenant_int = int(base_uuid.hex, 16)
# Reverse the original XOR to get the base ingredient ID
# base_ingredient = base_tenant ^ base_ingredient_id
# So: base_ingredient_id = base_tenant ^ base_ingredient
base_ingredient_id_int = base_tenant_int ^ base_ingredient_int
# Now apply virtual tenant XOR to get the new ingredient ID
new_ingredient_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_id_int)
logger.debug(
"Transforming ingredient ID using XOR",
base_ingredient_id=str(ingredient.id),
new_ingredient_id=str(new_ingredient_id),
ingredient_sku=ingredient.sku,
ingredient_name=ingredient.name
)
new_ingredient = Ingredient(
id=new_ingredient_id,
tenant_id=virtual_uuid,

View File

@@ -11,8 +11,6 @@ from typing import List, Optional, Dict, Any
from uuid import UUID
import structlog
from shared.database.transactions import transactional
from app.core.config import settings
from app.services.inventory_service import InventoryService
from app.services.food_safety_service import FoodSafetyService
@@ -58,7 +56,6 @@ class DashboardService:
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db)
}
@transactional
async def get_inventory_dashboard_summary(
self,
db,

View File

@@ -337,15 +337,17 @@ class FoodSafetyService:
"""Get food safety dashboard data"""
try:
# Get compliance overview
compliance_query = """
from sqlalchemy import text
compliance_query = text("""
SELECT
COUNT(*) as total,
COUNT(CASE WHEN compliance_status = 'compliant' THEN 1 END) as compliant,
COUNT(CASE WHEN compliance_status = 'non_compliant' THEN 1 END) as non_compliant,
COUNT(CASE WHEN compliance_status = 'pending_review' THEN 1 END) as pending_review
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
FROM food_safety_compliance
WHERE tenant_id = :tenant_id AND is_active = true
"""
""")
compliance_result = await db.execute(compliance_query, {"tenant_id": tenant_id})
compliance_stats = compliance_result.fetchone()
@@ -355,19 +357,19 @@ class FoodSafetyService:
compliance_percentage = (compliant_items / total_compliance * 100) if total_compliance > 0 else 0
# Get temperature monitoring status
temp_query = """
temp_query = text("""
SELECT
COUNT(DISTINCT equipment_id) as sensors_online,
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
FROM temperature_logs
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
"""
""")
temp_result = await db.execute(temp_query, {"tenant_id": tenant_id})
temp_stats = temp_result.fetchone()
# Get expiration tracking
expiration_query = """
expiration_query = text("""
SELECT
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
@@ -375,20 +377,20 @@ class FoodSafetyService:
FROM stock s
JOIN ingredients i ON s.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id AND s.is_available = true
"""
""")
expiration_result = await db.execute(expiration_query, {"tenant_id": tenant_id})
expiration_stats = expiration_result.fetchone()
# Get alert counts
alert_query = """
alert_query = text("""
SELECT
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
COUNT(CASE WHEN regulatory_action_required AND NOT resolved_at THEN 1 END) as regulatory_pending
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
FROM food_safety_alerts
WHERE tenant_id = :tenant_id AND status = 'active'
"""
""")
alert_result = await db.execute(alert_query, {"tenant_id": tenant_id})
alert_stats = alert_result.fetchone()
@@ -425,7 +427,9 @@ class FoodSafetyService:
async def _validate_compliance_data(self, db, compliance_data: FoodSafetyComplianceCreate):
"""Validate compliance data for business rules"""
# Check if ingredient exists
ingredient_query = "SELECT id FROM ingredients WHERE id = :ingredient_id AND tenant_id = :tenant_id"
from sqlalchemy import text
ingredient_query = text("SELECT id FROM ingredients WHERE id = :ingredient_id AND tenant_id = :tenant_id")
result = await db.execute(ingredient_query, {
"ingredient_id": compliance_data.ingredient_id,
"tenant_id": compliance_data.tenant_id
@@ -629,4 +633,4 @@ class FoodSafetyService:
except Exception as e:
logger.warning("Failed to send alert notifications",
alert_id=str(alert.id),
error=str(e))
error=str(e))

View File

@@ -546,7 +546,21 @@ class InventoryService:
ingredients = await ingredient_repo.get_ingredients_by_tenant(tenant_id, 0, 1000)
for ingredient in ingredients:
category = ingredient.category.value if ingredient.category else 'other'
# Determine category based on product type, similar to to_dict() method
category = 'other'
if ingredient.product_type and ingredient.product_type.value == 'finished_product':
# For finished products, use product_category
if ingredient.product_category:
category = ingredient.product_category.value
elif ingredient.ingredient_category and ingredient.ingredient_category.value != 'other':
category = ingredient.ingredient_category.value
else:
# For ingredients, use ingredient_category
if ingredient.ingredient_category and ingredient.ingredient_category.value != 'other':
category = ingredient.ingredient_category.value
elif ingredient.product_category:
category = ingredient.product_category.value
if category not in stock_by_category:
stock_by_category[category] = {
'count': 0,
@@ -826,4 +840,4 @@ class InventoryService:
# This is now handled in stock creation/update validation
# Add more validations as needed
pass
pass

View File

@@ -62,7 +62,7 @@ async def get_orders_service(db = Depends(get_db)) -> OrdersService:
# ===== Order CRUD Endpoints =====
@router.post(
route_builder.build_base_route("orders"),
route_builder.build_base_route("").rstrip("/"),
response_model=OrderResponse,
status_code=status.HTTP_201_CREATED
)
@@ -106,7 +106,7 @@ async def create_order(
@router.get(
route_builder.build_base_route("orders"),
route_builder.build_base_route("").rstrip("/"),
response_model=List[OrderResponse]
)
async def get_orders(

View File

@@ -57,6 +57,7 @@ class CustomerSegment(enum.Enum):
class PriorityLevel(enum.Enum):
"""Priority levels for orders and customers"""
URGENT = "urgent"
HIGH = "high"
NORMAL = "normal"
LOW = "low"

View File

@@ -0,0 +1,200 @@
# services/orders/app/services/approval_rules_service.py
"""
Approval Rules Service - Smart auto-approval logic for purchase orders
Evaluates POs against configurable business rules to determine if auto-approval is appropriate
"""
from typing import Dict, List, Any, Optional, Tuple
from decimal import Decimal
from uuid import UUID
import structlog
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
class ApprovalRulesService:
"""
Service for evaluating purchase orders against approval rules
Implements smart auto-approval logic based on multiple criteria
"""
def __init__(self, config: BaseServiceSettings):
self.config = config
async def evaluate_po_for_auto_approval(
self,
po_data: Dict[str, Any],
supplier_data: Optional[Dict[str, Any]] = None,
requirements_data: Optional[List[Dict[str, Any]]] = None
) -> Tuple[bool, List[str]]:
"""
Evaluate if a PO should be auto-approved
Returns:
Tuple of (should_auto_approve, reasons)
"""
if not self.config.AUTO_APPROVE_ENABLED:
return False, ["Auto-approval is disabled in configuration"]
reasons = []
should_approve = True
# Rule 1: Amount threshold check
total_amount = self._calculate_po_total(po_data)
if total_amount > self.config.AUTO_APPROVE_THRESHOLD_EUR:
should_approve = False
reasons.append(
f"PO amount €{total_amount:.2f} exceeds threshold €{self.config.AUTO_APPROVE_THRESHOLD_EUR:.2f}"
)
else:
reasons.append(
f"PO amount €{total_amount:.2f} within threshold €{self.config.AUTO_APPROVE_THRESHOLD_EUR:.2f}"
)
# Rule 2: Supplier trust score check
if supplier_data and self.config.AUTO_APPROVE_TRUSTED_SUPPLIERS:
supplier_score = supplier_data.get('trust_score', 0.0)
is_preferred = supplier_data.get('is_preferred_supplier', False)
auto_approve_enabled = supplier_data.get('auto_approve_enabled', False)
if supplier_score < self.config.AUTO_APPROVE_MIN_SUPPLIER_SCORE:
should_approve = False
reasons.append(
f"Supplier trust score {supplier_score:.2f} below minimum {self.config.AUTO_APPROVE_MIN_SUPPLIER_SCORE:.2f}"
)
else:
reasons.append(f"Supplier trust score {supplier_score:.2f} meets minimum requirements")
if not is_preferred:
should_approve = False
reasons.append("Supplier is not marked as preferred")
else:
reasons.append("Supplier is a preferred supplier")
if not auto_approve_enabled:
should_approve = False
reasons.append("Auto-approve is disabled for this supplier")
else:
reasons.append("Auto-approve is enabled for this supplier")
elif supplier_data is None:
should_approve = False
reasons.append("No supplier data available")
# Rule 3: New supplier check
if supplier_data and self.config.REQUIRE_APPROVAL_NEW_SUPPLIERS:
total_pos = supplier_data.get('total_pos_count', 0)
if total_pos < 5:
should_approve = False
reasons.append(f"New supplier with only {total_pos} previous orders (minimum 5 required)")
else:
reasons.append(f"Established supplier with {total_pos} previous orders")
# Rule 4: Critical/urgent items check
if requirements_data and self.config.REQUIRE_APPROVAL_CRITICAL_ITEMS:
critical_count = sum(
1 for req in requirements_data
if req.get('priority') in ['critical', 'urgent', 'CRITICAL', 'URGENT']
)
if critical_count > 0:
should_approve = False
reasons.append(f"Contains {critical_count} critical/urgent items requiring manual review")
else:
reasons.append("No critical/urgent items detected")
# Rule 5: Historical approval rate check
if supplier_data:
total_pos = supplier_data.get('total_pos_count', 0)
approved_pos = supplier_data.get('approved_pos_count', 0)
if total_pos > 0:
approval_rate = approved_pos / total_pos
if approval_rate < 0.95:
should_approve = False
reasons.append(
f"Historical approval rate {approval_rate:.1%} below 95% threshold"
)
else:
reasons.append(f"High historical approval rate {approval_rate:.1%}")
# Rule 6: PO priority check
priority = po_data.get('priority', 'normal')
if priority in ['urgent', 'critical', 'URGENT', 'CRITICAL']:
should_approve = False
reasons.append(f"PO priority is '{priority}' - requires manual review")
logger.info(
"PO auto-approval evaluation completed",
should_auto_approve=should_approve,
total_amount=float(total_amount),
supplier_id=supplier_data.get('id') if supplier_data else None,
reasons_count=len(reasons),
po_data=po_data.get('id') if isinstance(po_data, dict) else str(po_data)
)
return should_approve, reasons
def _calculate_po_total(self, po_data: Dict[str, Any]) -> Decimal:
"""Calculate total PO amount including tax and shipping"""
subtotal = Decimal(str(po_data.get('subtotal', 0)))
tax = Decimal(str(po_data.get('tax_amount', 0)))
shipping = Decimal(str(po_data.get('shipping_cost', 0)))
discount = Decimal(str(po_data.get('discount_amount', 0)))
total = subtotal + tax + shipping - discount
return total
def get_approval_summary(
self,
should_approve: bool,
reasons: List[str]
) -> Dict[str, Any]:
"""
Generate a human-readable approval summary
Returns:
Dict with summary data for UI display
"""
return {
"auto_approved": should_approve,
"decision": "APPROVED" if should_approve else "REQUIRES_MANUAL_APPROVAL",
"reasons": reasons,
"reason_count": len(reasons),
"summary": self._format_summary(should_approve, reasons)
}
def _format_summary(self, should_approve: bool, reasons: List[str]) -> str:
"""Format approval decision summary"""
if should_approve:
return f"Auto-approved: {', '.join(reasons[:2])}"
else:
failing_reasons = [r for r in reasons if any(
keyword in r.lower()
for keyword in ['exceeds', 'below', 'not', 'disabled', 'new', 'critical']
)]
if failing_reasons:
return f"Manual approval required: {failing_reasons[0]}"
return "Manual approval required"
def validate_approval_override(
self,
override_reason: str,
user_role: str
) -> Tuple[bool, Optional[str]]:
"""
Validate if a user can override auto-approval decision
Returns:
Tuple of (is_valid, error_message)
"""
# Only admin/owner can override
if user_role not in ['admin', 'owner']:
return False, "Insufficient permissions to override approval rules"
# Require a reason
if not override_reason or len(override_reason.strip()) < 10:
return False, "Override reason must be at least 10 characters"
return True, None

View File

@@ -0,0 +1,257 @@
# services/orders/app/services/procurement_notification_service.py
"""
Procurement Notification Service - Send alerts and notifications for procurement events
Handles PO approval notifications, reminders, escalations, and summaries
"""
from typing import Dict, List, Any, Optional
from uuid import UUID
from datetime import datetime, timedelta, timezone
import structlog
from shared.config.base import BaseServiceSettings
from shared.alerts.base_service import AlertServiceMixin
logger = structlog.get_logger()
class ProcurementNotificationService(AlertServiceMixin):
"""Service for sending procurement-related notifications and alerts"""
def __init__(self, config: BaseServiceSettings):
self.config = config
async def send_pos_pending_approval_alert(
self,
tenant_id: UUID,
pos_data: List[Dict[str, Any]]
):
"""
Send alert when new POs are created and need approval
Groups POs and sends a summary notification
"""
try:
if not pos_data:
return
# Calculate totals
total_amount = sum(float(po.get('total_amount', 0)) for po in pos_data)
critical_count = sum(1 for po in pos_data if po.get('priority') in ['high', 'critical', 'urgent'])
# Determine severity based on amount and urgency
severity = "medium"
if critical_count > 0 or total_amount > 5000:
severity = "high"
elif total_amount > 10000:
severity = "critical"
alert_data = {
"type": "procurement_pos_pending_approval",
"severity": severity,
"title": f"{len(pos_data)} Pedidos Pendientes de Aprobación",
"message": f"Se han creado {len(pos_data)} pedidos de compra que requieren tu aprobación. Total: €{total_amount:.2f}",
"metadata": {
"tenant_id": str(tenant_id),
"pos_count": len(pos_data),
"total_amount": total_amount,
"critical_count": critical_count,
"pos": [
{
"po_id": po.get("po_id"),
"po_number": po.get("po_number"),
"supplier_id": po.get("supplier_id"),
"total_amount": po.get("total_amount"),
"auto_approved": po.get("auto_approved", False)
}
for po in pos_data
],
"action_required": True,
"action_url": "/app/comprar"
}
}
await self.publish_item(tenant_id, alert_data, item_type='alert')
logger.info("POs pending approval alert sent",
tenant_id=str(tenant_id),
pos_count=len(pos_data),
total_amount=total_amount)
except Exception as e:
logger.error("Error sending POs pending approval alert",
tenant_id=str(tenant_id),
error=str(e))
async def send_approval_reminder(
self,
tenant_id: UUID,
po_data: Dict[str, Any],
hours_pending: int
):
"""
Send reminder for POs that haven't been approved within threshold
"""
try:
alert_data = {
"type": "procurement_approval_reminder",
"severity": "medium" if hours_pending < 36 else "high",
"title": f"Recordatorio: Pedido {po_data.get('po_number')} Pendiente",
"message": f"El pedido {po_data.get('po_number')} lleva {hours_pending} horas sin aprobarse. Total: €{po_data.get('total_amount', 0):.2f}",
"metadata": {
"tenant_id": str(tenant_id),
"po_id": po_data.get("po_id"),
"po_number": po_data.get("po_number"),
"supplier_name": po_data.get("supplier_name"),
"total_amount": po_data.get("total_amount"),
"hours_pending": hours_pending,
"created_at": po_data.get("created_at"),
"action_required": True,
"action_url": f"/app/comprar?po={po_data.get('po_id')}"
}
}
await self.publish_item(tenant_id, alert_data, item_type='alert')
logger.info("Approval reminder sent",
tenant_id=str(tenant_id),
po_id=po_data.get("po_id"),
hours_pending=hours_pending)
except Exception as e:
logger.error("Error sending approval reminder",
tenant_id=str(tenant_id),
po_id=po_data.get("po_id"),
error=str(e))
async def send_critical_po_escalation(
self,
tenant_id: UUID,
po_data: Dict[str, Any],
hours_pending: int
):
"""
Send escalation alert for critical/urgent POs not approved in time
"""
try:
alert_data = {
"type": "procurement_critical_po",
"severity": "critical",
"title": f"🚨 URGENTE: Pedido Crítico {po_data.get('po_number')}",
"message": f"El pedido crítico {po_data.get('po_number')} lleva {hours_pending} horas sin aprobar. Se requiere acción inmediata.",
"metadata": {
"tenant_id": str(tenant_id),
"po_id": po_data.get("po_id"),
"po_number": po_data.get("po_number"),
"supplier_name": po_data.get("supplier_name"),
"total_amount": po_data.get("total_amount"),
"priority": po_data.get("priority"),
"required_delivery_date": po_data.get("required_delivery_date"),
"hours_pending": hours_pending,
"escalated": True,
"action_required": True,
"action_url": f"/app/comprar?po={po_data.get('po_id')}"
}
}
await self.publish_item(tenant_id, alert_data, item_type='alert')
logger.warning("Critical PO escalation sent",
tenant_id=str(tenant_id),
po_id=po_data.get("po_id"),
hours_pending=hours_pending)
except Exception as e:
logger.error("Error sending critical PO escalation",
tenant_id=str(tenant_id),
po_id=po_data.get("po_id"),
error=str(e))
async def send_auto_approval_summary(
self,
tenant_id: UUID,
summary_data: Dict[str, Any]
):
"""
Send daily summary of auto-approved POs
"""
try:
auto_approved_count = summary_data.get("auto_approved_count", 0)
total_amount = summary_data.get("total_auto_approved_amount", 0)
manual_approval_count = summary_data.get("manual_approval_count", 0)
if auto_approved_count == 0 and manual_approval_count == 0:
# No activity, skip notification
return
alert_data = {
"type": "procurement_auto_approval_summary",
"severity": "low",
"title": "Resumen Diario de Pedidos",
"message": f"Hoy se aprobaron automáticamente {auto_approved_count} pedidos (€{total_amount:.2f}). {manual_approval_count} requieren aprobación manual.",
"metadata": {
"tenant_id": str(tenant_id),
"auto_approved_count": auto_approved_count,
"total_auto_approved_amount": total_amount,
"manual_approval_count": manual_approval_count,
"summary_date": summary_data.get("date"),
"auto_approved_pos": summary_data.get("auto_approved_pos", []),
"pending_approval_pos": summary_data.get("pending_approval_pos", []),
"action_url": "/app/comprar"
}
}
await self.publish_item(tenant_id, alert_data, item_type='notification')
logger.info("Auto-approval summary sent",
tenant_id=str(tenant_id),
auto_approved_count=auto_approved_count,
manual_count=manual_approval_count)
except Exception as e:
logger.error("Error sending auto-approval summary",
tenant_id=str(tenant_id),
error=str(e))
async def send_po_approved_confirmation(
self,
tenant_id: UUID,
po_data: Dict[str, Any],
approved_by: str,
auto_approved: bool = False
):
"""
Send confirmation when a PO is approved
"""
try:
approval_type = "automáticamente" if auto_approved else f"por {approved_by}"
alert_data = {
"type": "procurement_po_approved",
"severity": "low",
"title": f"Pedido {po_data.get('po_number')} Aprobado",
"message": f"El pedido {po_data.get('po_number')} ha sido aprobado {approval_type}. Total: €{po_data.get('total_amount', 0):.2f}",
"metadata": {
"tenant_id": str(tenant_id),
"po_id": po_data.get("po_id"),
"po_number": po_data.get("po_number"),
"supplier_name": po_data.get("supplier_name"),
"total_amount": po_data.get("total_amount"),
"approved_by": approved_by,
"auto_approved": auto_approved,
"approved_at": datetime.now(timezone.utc).isoformat(),
"action_url": f"/app/comprar?po={po_data.get('po_id')}"
}
}
await self.publish_item(tenant_id, alert_data, item_type='notification')
logger.info("PO approved confirmation sent",
tenant_id=str(tenant_id),
po_id=po_data.get("po_id"),
auto_approved=auto_approved)
except Exception as e:
logger.error("Error sending PO approved confirmation",
tenant_id=str(tenant_id),
po_id=po_data.get("po_id"),
error=str(e))

View File

@@ -297,16 +297,24 @@ class ProcurementSchedulerService(BaseAlertService, AlertServiceMixin):
result = await procurement_service.generate_procurement_plan(tenant_id, request)
if result.success and result.plan:
# Send notification about new plan
await self.send_procurement_notification(
tenant_id, result.plan, "plan_created"
)
logger.info("🎉 Procurement plan created successfully",
tenant_id=str(tenant_id),
plan_id=str(result.plan.id),
plan_date=str(planning_date),
total_requirements=result.plan.total_requirements)
# Auto-create POs from the plan (NEW FEATURE)
if self.config.AUTO_CREATE_POS_FROM_PLAN:
await self._auto_create_purchase_orders_from_plan(
procurement_service,
tenant_id,
result.plan.id
)
# Send notification about new plan
await self.send_procurement_notification(
tenant_id, result.plan, "plan_created"
)
else:
logger.warning("⚠️ Failed to generate procurement plan",
tenant_id=str(tenant_id),
@@ -400,6 +408,70 @@ class ProcurementSchedulerService(BaseAlertService, AlertServiceMixin):
notification_type=notification_type,
error=str(e))
async def _auto_create_purchase_orders_from_plan(
self,
procurement_service,
tenant_id: UUID,
plan_id: UUID
):
"""
Automatically create purchase orders from procurement plan
Integrates with auto-approval rules
"""
try:
logger.info("🛒 Auto-creating purchase orders from plan",
tenant_id=str(tenant_id),
plan_id=str(plan_id))
# Create POs with auto-approval evaluation enabled
po_result = await procurement_service.create_purchase_orders_from_plan(
tenant_id=tenant_id,
plan_id=plan_id,
auto_approve=True # Enable auto-approval evaluation
)
if po_result.get("success"):
total_created = po_result.get("total_created", 0)
auto_approved = po_result.get("total_auto_approved", 0)
pending_approval = po_result.get("total_pending_approval", 0)
logger.info("✅ Purchase orders created from plan",
tenant_id=str(tenant_id),
plan_id=str(plan_id),
total_created=total_created,
auto_approved=auto_approved,
pending_approval=pending_approval)
# Send notifications
from app.services.procurement_notification_service import ProcurementNotificationService
notification_service = ProcurementNotificationService(self.config)
# Notify about pending approvals
if pending_approval > 0:
await notification_service.send_pos_pending_approval_alert(
tenant_id=tenant_id,
pos_data=po_result.get("pending_approval_pos", [])
)
# Log auto-approved POs for summary
if auto_approved > 0:
logger.info("🤖 Auto-approved POs",
tenant_id=str(tenant_id),
count=auto_approved,
pos=po_result.get("auto_approved_pos", []))
else:
logger.error("❌ Failed to create purchase orders from plan",
tenant_id=str(tenant_id),
plan_id=str(plan_id),
error=po_result.get("error"))
except Exception as e:
logger.error("💥 Error auto-creating purchase orders",
tenant_id=str(tenant_id),
plan_id=str(plan_id),
error=str(e))
async def test_procurement_generation(self):
"""Test method to manually trigger procurement planning"""
# Get the first available tenant for testing

View File

@@ -482,8 +482,9 @@ class ProcurementService:
auto_approve: bool = False
) -> Dict[str, Any]:
"""
Create purchase orders from procurement plan requirements (Feature #1)
Create purchase orders from procurement plan requirements with smart auto-approval
Groups requirements by supplier and creates POs automatically
Evaluates auto-approval rules and approves qualifying POs
"""
try:
plan = await self.plan_repo.get_plan_by_id(plan_id, tenant_id)
@@ -493,6 +494,10 @@ class ProcurementService:
if not plan.requirements:
return {"success": False, "error": "No requirements in plan"}
# Import approval rules service
from app.services.approval_rules_service import ApprovalRulesService
approval_service = ApprovalRulesService(self.config)
# Group requirements by supplier
supplier_requirements = {}
for req in plan.requirements:
@@ -507,6 +512,8 @@ class ProcurementService:
# Create PO for each supplier
created_pos = []
failed_pos = []
auto_approved_pos = []
pending_approval_pos = []
for supplier_id, requirements in supplier_requirements.items():
if supplier_id == 'no_supplier':
@@ -530,7 +537,19 @@ class ProcurementService:
for item in po_items
)
# Create PO via suppliers service
# Get supplier data for approval evaluation
supplier_data = await self._get_supplier_performance_data(tenant_id, supplier_id)
# Prepare requirements data for approval evaluation
requirements_data = [
{
"priority": req.priority,
"risk_level": req.risk_level
}
for req in requirements
]
# Create PO data structure
po_data = {
"supplier_id": supplier_id,
"items": po_items,
@@ -539,9 +558,26 @@ class ProcurementService:
"notes": f"Auto-generated from procurement plan {plan.plan_number}",
"tax_amount": subtotal * 0.1, # 10% tax estimation
"shipping_cost": 0,
"discount_amount": 0
"discount_amount": 0,
"subtotal": subtotal
}
# Evaluate auto-approval rules
should_auto_approve = False
approval_reasons = []
if auto_approve and self.config.AUTO_CREATE_POS_FROM_PLAN:
should_auto_approve, approval_reasons = await approval_service.evaluate_po_for_auto_approval(
po_data=po_data,
supplier_data=supplier_data,
requirements_data=requirements_data
)
# Add approval metadata to PO
po_data["auto_approval_evaluated"] = True
po_data["auto_approval_decision"] = "APPROVED" if should_auto_approve else "REQUIRES_MANUAL_APPROVAL"
po_data["auto_approval_reasons"] = approval_reasons
# Call suppliers service to create PO
po_response = await self.suppliers_client.create_purchase_order(
str(tenant_id),
@@ -563,15 +599,48 @@ class ProcurementService:
expected_delivery_date=req.required_by_date
)
# Auto-approve PO if rules pass
if should_auto_approve:
await self._auto_approve_purchase_order(
tenant_id=tenant_id,
po_id=po_id,
approval_reasons=approval_reasons
)
auto_approved_pos.append({
"po_id": po_id,
"po_number": po_number,
"supplier_id": supplier_id,
"items_count": len(requirements),
"total_amount": subtotal,
"auto_approved": True,
"approval_reasons": approval_reasons
})
logger.info("PO auto-approved", po_id=po_id, supplier_id=supplier_id)
else:
pending_approval_pos.append({
"po_id": po_id,
"po_number": po_number,
"supplier_id": supplier_id,
"items_count": len(requirements),
"total_amount": subtotal,
"auto_approved": False,
"requires_manual_approval": True,
"approval_reasons": approval_reasons
})
created_pos.append({
"po_id": po_id,
"po_number": po_number,
"supplier_id": supplier_id,
"items_count": len(requirements),
"total_amount": subtotal
"total_amount": subtotal,
"auto_approved": should_auto_approve
})
logger.info("PO created from plan", po_id=po_id, supplier_id=supplier_id)
logger.info("PO created from plan",
po_id=po_id,
supplier_id=supplier_id,
auto_approved=should_auto_approve)
else:
failed_pos.append({
"supplier_id": supplier_id,
@@ -589,11 +658,21 @@ class ProcurementService:
await self.db.commit()
logger.info("PO creation from plan completed",
total_created=len(created_pos),
auto_approved=len(auto_approved_pos),
pending_approval=len(pending_approval_pos),
failed=len(failed_pos))
return {
"success": True,
"created_pos": created_pos,
"failed_pos": failed_pos,
"auto_approved_pos": auto_approved_pos,
"pending_approval_pos": pending_approval_pos,
"total_created": len(created_pos),
"total_auto_approved": len(auto_approved_pos),
"total_pending_approval": len(pending_approval_pos),
"total_failed": len(failed_pos)
}
@@ -1612,3 +1691,75 @@ class ProcurementService:
"plan_completion_rate": 0.0,
"supplier_performance": 0.0
}
async def _get_supplier_performance_data(self, tenant_id: uuid.UUID, supplier_id: str) -> Optional[Dict[str, Any]]:
"""
Get supplier performance data from suppliers service
Used for auto-approval evaluation
"""
try:
# Call suppliers service to get supplier with performance metrics
supplier_data = await self.suppliers_client.get_supplier(str(tenant_id), supplier_id)
if not supplier_data:
logger.warning("Supplier not found", supplier_id=supplier_id)
return None
# Extract relevant performance fields
return {
"id": supplier_data.get("id"),
"name": supplier_data.get("name"),
"trust_score": supplier_data.get("trust_score", 0.0),
"is_preferred_supplier": supplier_data.get("is_preferred_supplier", False),
"auto_approve_enabled": supplier_data.get("auto_approve_enabled", False),
"total_pos_count": supplier_data.get("total_pos_count", 0),
"approved_pos_count": supplier_data.get("approved_pos_count", 0),
"on_time_delivery_rate": supplier_data.get("on_time_delivery_rate", 0.0),
"fulfillment_rate": supplier_data.get("fulfillment_rate", 0.0),
"quality_rating": supplier_data.get("quality_rating", 0.0),
"delivery_rating": supplier_data.get("delivery_rating", 0.0),
"status": supplier_data.get("status")
}
except Exception as e:
logger.error("Error getting supplier performance data",
supplier_id=supplier_id,
error=str(e))
return None
async def _auto_approve_purchase_order(
self,
tenant_id: uuid.UUID,
po_id: str,
approval_reasons: List[str]
):
"""
Auto-approve a purchase order via suppliers service
Updates PO status to 'approved' with system approval
"""
try:
# Call suppliers service to approve the PO
approval_data = {
"approved_by": "system", # System auto-approval
"approval_notes": f"Auto-approved: {', '.join(approval_reasons[:2])}",
"auto_approved": True,
"approval_reasons": approval_reasons
}
await self.suppliers_client.approve_purchase_order(
str(tenant_id),
po_id,
approval_data
)
logger.info("PO auto-approved successfully",
po_id=po_id,
tenant_id=str(tenant_id),
reasons_count=len(approval_reasons))
except Exception as e:
logger.error("Error auto-approving PO",
po_id=po_id,
tenant_id=str(tenant_id),
error=str(e))
raise

View File

@@ -0,0 +1,226 @@
"""
Internal Demo API Endpoints for POS Service
Used by demo_session service to clone data for virtual demo tenants
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from typing import Dict, Any
from uuid import UUID
import structlog
import os
from app.core.database import get_db
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.models.pos_config import POSConfiguration
from app.models.pos_transaction import POSTransaction, POSTransactionItem
import uuid
from datetime import datetime, timezone
from typing import Optional
router = APIRouter()
logger = structlog.get_logger()
# Internal API key for service-to-service communication
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
def verify_internal_api_key(x_internal_api_key: str = Header(...)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone POS demo data from base tenant to virtual tenant
This endpoint is called by the demo_session service during session initialization.
It clones POS configurations and recent transactions.
"""
start_time = datetime.now(timezone.utc)
session_created_at = datetime.now(timezone.utc)
logger.info(
"Starting POS data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at.isoformat()
)
try:
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Fetch base tenant POS configurations
result = await db.execute(
select(POSConfiguration).where(POSConfiguration.tenant_id == base_uuid)
)
base_configs = list(result.scalars().all())
configs_cloned = 0
transactions_cloned = 0
# Clone each configuration
for base_config in base_configs:
# Create new config for virtual tenant
new_config = POSConfiguration(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
pos_system=base_config.pos_system,
provider_name=f"{base_config.provider_name} (Demo Session)",
is_active=base_config.is_active,
is_connected=base_config.is_connected,
encrypted_credentials=base_config.encrypted_credentials,
webhook_url=base_config.webhook_url,
webhook_secret=base_config.webhook_secret,
environment=base_config.environment,
location_id=base_config.location_id,
merchant_id=base_config.merchant_id,
sync_enabled=base_config.sync_enabled,
sync_interval_minutes=base_config.sync_interval_minutes,
auto_sync_products=base_config.auto_sync_products,
auto_sync_transactions=base_config.auto_sync_transactions,
last_sync_at=base_config.last_sync_at,
last_successful_sync_at=base_config.last_successful_sync_at,
last_sync_status=base_config.last_sync_status,
last_sync_message=base_config.last_sync_message,
provider_settings=base_config.provider_settings,
last_health_check_at=base_config.last_health_check_at,
health_status=base_config.health_status,
health_message=base_config.health_message,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
notes=f"Cloned from base config {base_config.id} for demo session {session_id}"
)
db.add(new_config)
await db.flush()
configs_cloned += 1
# Clone recent transactions for this config
tx_result = await db.execute(
select(POSTransaction)
.where(POSTransaction.pos_config_id == base_config.id)
.order_by(POSTransaction.transaction_date.desc())
.limit(10) # Clone last 10 transactions
)
base_transactions = list(tx_result.scalars().all())
# Clone each transaction
for base_tx in base_transactions:
new_tx = POSTransaction(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
pos_config_id=new_config.id,
pos_system=base_tx.pos_system,
external_transaction_id=base_tx.external_transaction_id,
external_order_id=base_tx.external_order_id,
transaction_type=base_tx.transaction_type,
status=base_tx.status,
subtotal=base_tx.subtotal,
tax_amount=base_tx.tax_amount,
tip_amount=base_tx.tip_amount,
discount_amount=base_tx.discount_amount,
total_amount=base_tx.total_amount,
currency=base_tx.currency,
payment_method=base_tx.payment_method,
payment_status=base_tx.payment_status,
transaction_date=base_tx.transaction_date,
pos_created_at=base_tx.pos_created_at,
pos_updated_at=base_tx.pos_updated_at,
location_id=base_tx.location_id,
location_name=base_tx.location_name,
staff_id=base_tx.staff_id,
staff_name=base_tx.staff_name,
customer_id=base_tx.customer_id,
customer_email=base_tx.customer_email,
customer_phone=base_tx.customer_phone,
order_type=base_tx.order_type,
table_number=base_tx.table_number,
receipt_number=base_tx.receipt_number,
is_synced_to_sales=base_tx.is_synced_to_sales,
sales_record_id=base_tx.sales_record_id,
sync_attempted_at=base_tx.sync_attempted_at,
sync_completed_at=base_tx.sync_completed_at,
sync_error=base_tx.sync_error,
sync_retry_count=base_tx.sync_retry_count,
raw_data=base_tx.raw_data,
is_processed=base_tx.is_processed,
processing_error=base_tx.processing_error,
is_duplicate=base_tx.is_duplicate,
duplicate_of=base_tx.duplicate_of,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)
db.add(new_tx)
await db.flush()
transactions_cloned += 1
# Clone transaction items
item_result = await db.execute(
select(POSTransactionItem).where(POSTransactionItem.transaction_id == base_tx.id)
)
base_items = list(item_result.scalars().all())
for base_item in base_items:
new_item = POSTransactionItem(
id=uuid.uuid4(),
transaction_id=new_tx.id,
tenant_id=virtual_uuid,
external_item_id=base_item.external_item_id,
sku=base_item.sku,
product_name=base_item.product_name,
product_category=base_item.product_category,
product_subcategory=base_item.product_subcategory,
quantity=base_item.quantity,
unit_price=base_item.unit_price,
total_price=base_item.total_price,
discount_amount=base_item.discount_amount,
tax_amount=base_item.tax_amount,
modifiers=base_item.modifiers,
inventory_product_id=base_item.inventory_product_id,
is_mapped_to_inventory=base_item.is_mapped_to_inventory,
is_synced_to_sales=base_item.is_synced_to_sales,
sync_error=base_item.sync_error,
raw_data=base_item.raw_data,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)
db.add(new_item)
await db.commit()
logger.info(
"POS demo data cloned successfully",
virtual_tenant_id=str(virtual_tenant_id),
configs_cloned=configs_cloned,
transactions_cloned=transactions_cloned
)
return {
"success": True,
"records_cloned": configs_cloned + transactions_cloned,
"configs_cloned": configs_cloned,
"transactions_cloned": transactions_cloned,
"service": "pos"
}
except Exception as e:
logger.error("Failed to clone POS demo data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=f"Failed to clone POS demo data: {str(e)}")

View File

@@ -11,6 +11,7 @@ from app.api.configurations import router as configurations_router
from app.api.transactions import router as transactions_router
from app.api.pos_operations import router as pos_operations_router
from app.api.analytics import router as analytics_router
from app.api.internal_demo import router as internal_demo_router
from app.core.database import database_manager
from shared.service_base import StandardFastAPIService
@@ -173,6 +174,7 @@ service.add_router(configurations_router, tags=["pos-configurations"])
service.add_router(transactions_router, tags=["pos-transactions"])
service.add_router(pos_operations_router, tags=["pos-operations"])
service.add_router(analytics_router, tags=["pos-analytics"])
service.add_router(internal_demo_router, tags=["internal-demo"])
if __name__ == "__main__":

View File

@@ -0,0 +1,301 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo POS Configurations Seeding Script for POS Service
Creates realistic POS configurations and transactions for demo template tenants
This script runs as a Kubernetes init job inside the pos-service container.
"""
import asyncio
import uuid
import sys
import os
from datetime import datetime, timezone, timedelta
from pathlib import Path
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
import structlog
from app.models.pos_config import POSConfiguration
from app.models.pos_transaction import POSTransaction, POSTransactionItem
# Configure logging
logger = structlog.get_logger()
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # Central bakery
# Base reference date for date calculations
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
async def generate_pos_config_for_tenant(
db: AsyncSession,
tenant_id: uuid.UUID,
tenant_name: str,
pos_system: str,
provider_name: str
):
"""Generate a demo POS configuration for a tenant"""
logger.info(f"Generating POS config for: {tenant_name}", tenant_id=str(tenant_id), pos_system=pos_system)
# Check if config already exists
result = await db.execute(
select(POSConfiguration).where(
POSConfiguration.tenant_id == tenant_id,
POSConfiguration.pos_system == pos_system
).limit(1)
)
existing = result.scalar_one_or_none()
if existing:
logger.info(f"POS config already exists for {tenant_name}, skipping")
return {"tenant_id": str(tenant_id), "configs_created": 0, "skipped": True}
# Create demo POS configuration
config = POSConfiguration(
id=uuid.uuid4(),
tenant_id=tenant_id,
pos_system=pos_system,
provider_name=provider_name,
is_active=True,
is_connected=True,
encrypted_credentials="demo_credentials_encrypted", # In real scenario, this would be encrypted
environment="sandbox",
location_id=f"LOC-{tenant_name.replace(' ', '-').upper()}-001",
merchant_id=f"MERCH-{tenant_name.replace(' ', '-').upper()}",
sync_enabled=True,
sync_interval_minutes="5",
auto_sync_products=True,
auto_sync_transactions=True,
last_sync_at=BASE_REFERENCE_DATE - timedelta(hours=1),
last_successful_sync_at=BASE_REFERENCE_DATE - timedelta(hours=1),
last_sync_status="success",
last_sync_message="Sincronización completada exitosamente",
provider_settings={
"api_key": "demo_api_key_***",
"location_id": f"LOC-{tenant_name.replace(' ', '-').upper()}-001",
"environment": "sandbox"
},
last_health_check_at=BASE_REFERENCE_DATE - timedelta(minutes=30),
health_status="healthy",
health_message="Conexión saludable - todas las operaciones funcionando correctamente",
created_at=BASE_REFERENCE_DATE - timedelta(days=30),
updated_at=BASE_REFERENCE_DATE - timedelta(hours=1),
notes=f"Configuración demo para {tenant_name}"
)
db.add(config)
await db.flush()
logger.info(f"Created POS config for {tenant_name}", config_id=str(config.id))
# Generate demo transactions
transactions_created = await generate_demo_transactions(db, tenant_id, config.id, pos_system)
return {
"tenant_id": str(tenant_id),
"configs_created": 1,
"transactions_created": transactions_created,
"skipped": False
}
async def generate_demo_transactions(
db: AsyncSession,
tenant_id: uuid.UUID,
pos_config_id: uuid.UUID,
pos_system: str
):
"""Generate demo POS transactions"""
transactions_to_create = 10 # Create 10 demo transactions
transactions_created = 0
for i in range(transactions_to_create):
# Calculate transaction date (spread over last 7 days)
days_ago = i % 7
transaction_date = BASE_REFERENCE_DATE - timedelta(days=days_ago, hours=i % 12)
# Generate realistic transaction amounts
base_amounts = [12.50, 25.00, 45.75, 18.20, 32.00, 60.50, 15.80, 28.90, 55.00, 40.25]
subtotal = base_amounts[i % len(base_amounts)]
tax_amount = round(subtotal * 0.10, 2) # 10% tax
total_amount = subtotal + tax_amount
# Create transaction
transaction = POSTransaction(
id=uuid.uuid4(),
tenant_id=tenant_id,
pos_config_id=pos_config_id,
pos_system=pos_system,
external_transaction_id=f"{pos_system.upper()}-TXN-{i+1:05d}",
external_order_id=f"{pos_system.upper()}-ORD-{i+1:05d}",
transaction_type="sale",
status="completed",
subtotal=subtotal,
tax_amount=tax_amount,
tip_amount=0.00,
discount_amount=0.00,
total_amount=total_amount,
currency="EUR",
payment_method="card" if i % 2 == 0 else "cash",
payment_status="paid",
transaction_date=transaction_date,
pos_created_at=transaction_date,
pos_updated_at=transaction_date,
location_id=f"LOC-001",
location_name="Tienda Principal",
order_type="takeout" if i % 3 == 0 else "dine_in",
receipt_number=f"RCP-{i+1:06d}",
is_synced_to_sales=True,
sync_completed_at=transaction_date + timedelta(minutes=5),
sync_retry_count=0,
is_processed=True,
is_duplicate=False,
created_at=transaction_date,
updated_at=transaction_date
)
db.add(transaction)
await db.flush()
# Add transaction items
num_items = (i % 3) + 1 # 1-3 items per transaction
for item_idx in range(num_items):
product_names = [
"Pan de masa madre", "Croissant de mantequilla", "Pastel de chocolate",
"Baguette artesanal", "Tarta de manzana", "Bollería variada",
"Pan integral", "Galletas artesanales", "Café con leche"
]
product_name = product_names[(i + item_idx) % len(product_names)]
item_price = round(subtotal / num_items, 2)
item = POSTransactionItem(
id=uuid.uuid4(),
transaction_id=transaction.id,
tenant_id=tenant_id,
external_item_id=f"ITEM-{i+1:05d}-{item_idx+1}",
sku=f"SKU-{(i + item_idx) % len(product_names):03d}",
product_name=product_name,
product_category="bakery",
quantity=1,
unit_price=item_price,
total_price=item_price,
discount_amount=0.00,
tax_amount=round(item_price * 0.10, 2),
is_mapped_to_inventory=False,
is_synced_to_sales=True,
created_at=transaction_date,
updated_at=transaction_date
)
db.add(item)
transactions_created += 1
logger.info(f"Created {transactions_created} demo transactions for tenant {tenant_id}")
return transactions_created
async def seed_all(db: AsyncSession):
"""Seed all demo tenants with POS configurations"""
logger.info("Starting demo POS configurations seed process")
results = []
# Seed San Pablo with Square POS
result_san_pablo = await generate_pos_config_for_tenant(
db,
DEMO_TENANT_SAN_PABLO,
"San Pablo",
"square",
"Square POS - San Pablo"
)
results.append(result_san_pablo)
# Seed La Espiga with Toast POS
result_la_espiga = await generate_pos_config_for_tenant(
db,
DEMO_TENANT_LA_ESPIGA,
"La Espiga",
"toast",
"Toast POS - La Espiga"
)
results.append(result_la_espiga)
await db.commit()
total_configs = sum(r["configs_created"] for r in results)
total_transactions = sum(r.get("transactions_created", 0) for r in results)
return {
"results": results,
"total_configs_created": total_configs,
"total_transactions_created": total_transactions,
"status": "completed"
}
async def main():
"""Main execution function"""
# Get database URL from environment
database_url = os.getenv("POS_DATABASE_URL")
if not database_url:
logger.error("POS_DATABASE_URL environment variable must be set")
return 1
# Ensure asyncpg driver
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
# Create async engine
engine = create_async_engine(database_url, echo=False)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
try:
async with async_session() as session:
result = await seed_all(session)
logger.info(
"POS configurations seed completed successfully!",
total_configs=result["total_configs_created"],
total_transactions=result["total_transactions_created"],
status=result["status"]
)
# Print summary
print("\n" + "="*60)
print("DEMO POS CONFIGURATIONS SEED SUMMARY")
print("="*60)
for tenant_result in result["results"]:
tenant_id = tenant_result["tenant_id"]
configs = tenant_result["configs_created"]
transactions = tenant_result.get("transactions_created", 0)
skipped = tenant_result.get("skipped", False)
status = "SKIPPED (already exists)" if skipped else f"CREATED {configs} config(s), {transactions} transaction(s)"
print(f"Tenant {tenant_id}: {status}")
print(f"\nTotal Configs: {result['total_configs_created']}")
print(f"Total Transactions: {result['total_transactions_created']}")
print("="*60 + "\n")
return 0
except Exception as e:
logger.error(f"POS configurations seed failed: {str(e)}", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

View File

@@ -177,11 +177,33 @@ async def clone_demo_data(
new_price_id = uuid.uuid4()
price_list_map[price_list.id] = new_price_id
# Transform inventory_product_id to match virtual tenant's ingredient IDs
# Using same formula as inventory service: tenant_int ^ base_int
base_product_int = int(price_list.inventory_product_id.hex, 16)
virtual_tenant_int = int(virtual_uuid.hex, 16)
base_tenant_int = int(base_uuid.hex, 16)
# Reverse the original XOR to get the base ingredient ID
# base_product = base_tenant ^ base_ingredient_id
# So: base_ingredient_id = base_tenant ^ base_product
base_ingredient_int = base_tenant_int ^ base_product_int
# Now apply virtual tenant XOR
new_product_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_int)
logger.debug(
"Transforming price list product ID using XOR",
supplier_name=supplier.name,
base_product_id=str(price_list.inventory_product_id),
new_product_id=str(new_product_id),
product_code=price_list.product_code
)
new_price_list = SupplierPriceList(
id=new_price_id,
tenant_id=virtual_uuid,
supplier_id=new_supplier_id,
inventory_product_id=price_list.inventory_product_id, # Keep product reference
inventory_product_id=new_product_id, # Transformed for virtual tenant
product_code=price_list.product_code,
unit_price=price_list.unit_price,
unit_of_measure=price_list.unit_of_measure,
@@ -290,12 +312,20 @@ async def clone_demo_data(
new_price_list_id = price_list_map.get(item.price_list_item_id, item.price_list_item_id) if item.price_list_item_id else None
# Transform inventory_product_id to match virtual tenant's ingredient IDs
if item.inventory_product_id:
base_product_int = int(item.inventory_product_id.hex, 16)
base_ingredient_int = base_tenant_int ^ base_product_int
new_inventory_product_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_int)
else:
new_inventory_product_id = None
new_item = PurchaseOrderItem(
id=new_item_id,
tenant_id=virtual_uuid,
purchase_order_id=new_po_id,
price_list_item_id=new_price_list_id,
inventory_product_id=item.inventory_product_id, # Keep product reference
inventory_product_id=new_inventory_product_id, # Transformed product reference
product_code=item.product_code,
ordered_quantity=item.ordered_quantity,
unit_of_measure=item.unit_of_measure,
@@ -377,12 +407,20 @@ async def clone_demo_data(
for item in delivery_items:
new_po_item_id = po_item_map.get(item.purchase_order_item_id, item.purchase_order_item_id)
# Transform inventory_product_id to match virtual tenant's ingredient IDs
if item.inventory_product_id:
base_product_int = int(item.inventory_product_id.hex, 16)
base_ingredient_int = base_tenant_int ^ base_product_int
new_inventory_product_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_int)
else:
new_inventory_product_id = None
new_item = DeliveryItem(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
delivery_id=new_delivery_id,
purchase_order_item_id=new_po_item_id,
inventory_product_id=item.inventory_product_id, # Keep product reference
inventory_product_id=new_inventory_product_id, # Transformed product reference
ordered_quantity=item.ordered_quantity,
delivered_quantity=item.delivered_quantity,
accepted_quantity=item.accepted_quantity,

View File

@@ -43,9 +43,9 @@ async def create_purchase_order(
try:
service = PurchaseOrderService(db)
purchase_order = await service.create_purchase_order(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
po_data=po_data,
created_by=current_user.user_id
created_by=current_user["user_id"]
)
return PurchaseOrderResponse.from_orm(purchase_order)
except ValueError as e:
@@ -93,7 +93,9 @@ async def list_purchase_orders(
status_enum = None
if status:
try:
status_enum = PurchaseOrderStatus(status.upper())
# Convert from PENDING_APPROVAL to pending_approval format
status_value = status.lower()
status_enum = PurchaseOrderStatus(status_value)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid status")
@@ -110,11 +112,29 @@ async def list_purchase_orders(
)
orders = await service.search_purchase_orders(
tenant_id=current_user.tenant_id,
tenant_id=current_user["tenant_id"],
search_params=search_params
)
return [PurchaseOrderSummary.from_orm(order) for order in orders]
# Convert to response with supplier names
response = []
for order in orders:
order_dict = {
"id": order.id,
"po_number": order.po_number,
"supplier_id": order.supplier_id,
"supplier_name": order.supplier.name if order.supplier else None,
"status": order.status,
"priority": order.priority,
"order_date": order.order_date,
"required_delivery_date": order.required_delivery_date,
"total_amount": order.total_amount,
"currency": order.currency,
"created_at": order.created_at
}
response.append(PurchaseOrderSummary(**order_dict))
return response
except HTTPException:
raise
except Exception as e:
@@ -134,14 +154,11 @@ async def get_purchase_order(
try:
service = PurchaseOrderService(db)
purchase_order = await service.get_purchase_order(po_id)
if not purchase_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
# Check tenant access
if purchase_order.tenant_id != current_user.tenant_id:
raise HTTPException(status_code=403, detail="Access denied")
# Tenant access control is handled by the gateway
return PurchaseOrderResponse.from_orm(purchase_order)
except HTTPException:
raise
@@ -168,13 +185,13 @@ async def update_purchase_order(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
purchase_order = await service.update_purchase_order(
po_id=po_id,
po_data=po_data,
updated_by=current_user.user_id
updated_by=current_user["user_id"]
)
if not purchase_order:
@@ -206,7 +223,7 @@ async def delete_purchase_order(
existing_order = await service.get_purchase_order(po_id)
if not existing_order:
raise HTTPException(status_code=404, detail="Purchase order not found")
if existing_order.tenant_id != current_user.tenant_id:
if existing_order.tenant_id != current_user["tenant_id"]:
raise HTTPException(status_code=403, detail="Access denied")
# Capture PO data before deletion

View File

@@ -9,8 +9,10 @@ from uuid import UUID
import structlog
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.core.database import get_db
from app.services.supplier_service import SupplierService
from app.models.suppliers import SupplierPriceList
from app.schemas.suppliers import (
SupplierCreate, SupplierUpdate, SupplierResponse, SupplierSummary,
SupplierSearchParams
@@ -202,4 +204,54 @@ async def delete_supplier(
raise
except Exception as e:
logger.error("Error deleting supplier", supplier_id=str(supplier_id), error=str(e))
raise HTTPException(status_code=500, detail="Failed to delete supplier")
raise HTTPException(status_code=500, detail="Failed to delete supplier")
@router.get(
route_builder.build_resource_action_route("suppliers", "supplier_id", "products"),
response_model=List[Dict[str, Any]]
)
async def get_supplier_products(
supplier_id: UUID = Path(..., description="Supplier ID"),
tenant_id: str = Path(..., description="Tenant ID"),
is_active: bool = Query(True, description="Filter by active price lists"),
db: AsyncSession = Depends(get_db)
):
"""
Get list of product IDs that a supplier provides
Returns a list of inventory product IDs from the supplier's price list
"""
try:
# Query supplier price lists
query = select(SupplierPriceList).where(
SupplierPriceList.tenant_id == UUID(tenant_id),
SupplierPriceList.supplier_id == supplier_id
)
if is_active:
query = query.where(SupplierPriceList.is_active == True)
result = await db.execute(query)
price_lists = result.scalars().all()
# Extract unique product IDs
product_ids = list(set([str(pl.inventory_product_id) for pl in price_lists]))
logger.info(
"Retrieved supplier products",
supplier_id=str(supplier_id),
product_count=len(product_ids)
)
return [{"inventory_product_id": pid} for pid in product_ids]
except Exception as e:
logger.error(
"Error getting supplier products",
supplier_id=str(supplier_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail="Failed to retrieve supplier products"
)

View File

@@ -145,7 +145,17 @@ class Supplier(Base):
delivery_rating = Column(Float, nullable=True, default=0.0) # Average delivery rating (1-5)
total_orders = Column(Integer, nullable=False, default=0)
total_amount = Column(Numeric(12, 2), nullable=False, default=0.0)
# Trust and auto-approval metrics
trust_score = Column(Float, nullable=False, default=0.0) # Calculated trust score (0.0-1.0)
is_preferred_supplier = Column(Boolean, nullable=False, default=False) # Preferred supplier status
auto_approve_enabled = Column(Boolean, nullable=False, default=False) # Enable auto-approval for this supplier
total_pos_count = Column(Integer, nullable=False, default=0) # Total purchase orders created
approved_pos_count = Column(Integer, nullable=False, default=0) # Total POs approved
on_time_delivery_rate = Column(Float, nullable=False, default=0.0) # Percentage of on-time deliveries
fulfillment_rate = Column(Float, nullable=False, default=0.0) # Percentage of orders fully fulfilled
last_performance_update = Column(DateTime(timezone=True), nullable=True) # Last time metrics were calculated
# Onboarding and approval
approved_by = Column(UUID(as_uuid=True), nullable=True) # User who approved
approved_at = Column(DateTime(timezone=True), nullable=True)

View File

@@ -1,16 +1,17 @@
# services/suppliers/app/repositories/purchase_order_repository.py
"""
Purchase Order repository for database operations
Purchase Order repository for database operations (Async SQLAlchemy 2.0)
"""
from typing import List, Optional, Dict, Any, Tuple
from sqlalchemy.orm import Session, joinedload
from sqlalchemy import and_, or_, func, desc
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from sqlalchemy import select, and_, or_, func, desc
from uuid import UUID
from datetime import datetime, timedelta
from app.models.suppliers import (
PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus,
PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus,
Supplier, SupplierPriceList
)
from app.repositories.base import BaseRepository
@@ -18,33 +19,35 @@ from app.repositories.base import BaseRepository
class PurchaseOrderRepository(BaseRepository[PurchaseOrder]):
"""Repository for purchase order management operations"""
def __init__(self, db: Session):
def __init__(self, db: AsyncSession):
super().__init__(PurchaseOrder, db)
def get_by_po_number(self, tenant_id: UUID, po_number: str) -> Optional[PurchaseOrder]:
async def get_by_po_number(self, tenant_id: UUID, po_number: str) -> Optional[PurchaseOrder]:
"""Get purchase order by PO number within tenant"""
return (
self.db.query(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.po_number == po_number
)
stmt = select(self.model).filter(
and_(
self.model.tenant_id == tenant_id,
self.model.po_number == po_number
)
.first()
)
def get_with_items(self, po_id: UUID) -> Optional[PurchaseOrder]:
"""Get purchase order with all items loaded"""
return (
self.db.query(self.model)
.options(joinedload(self.model.items))
result = await self.db.execute(stmt)
return result.scalar_one_or_none()
async def get_with_items(self, po_id: UUID) -> Optional[PurchaseOrder]:
"""Get purchase order with all items and supplier loaded"""
stmt = (
select(self.model)
.options(
selectinload(self.model.items),
selectinload(self.model.supplier)
)
.filter(self.model.id == po_id)
.first()
)
def search_purchase_orders(
result = await self.db.execute(stmt)
return result.scalar_one_or_none()
async def search_purchase_orders(
self,
tenant_id: UUID,
supplier_id: Optional[UUID] = None,
@@ -57,320 +60,196 @@ class PurchaseOrderRepository(BaseRepository[PurchaseOrder]):
offset: int = 0
) -> List[PurchaseOrder]:
"""Search purchase orders with comprehensive filters"""
query = (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
stmt = (
select(self.model)
.options(selectinload(self.model.supplier))
.filter(self.model.tenant_id == tenant_id)
)
# Supplier filter
if supplier_id:
query = query.filter(self.model.supplier_id == supplier_id)
stmt = stmt.filter(self.model.supplier_id == supplier_id)
# Status filter
if status:
query = query.filter(self.model.status == status)
stmt = stmt.filter(self.model.status == status)
# Priority filter
if priority:
query = query.filter(self.model.priority == priority)
stmt = stmt.filter(self.model.priority == priority)
# Date range filter
if date_from:
query = query.filter(self.model.order_date >= date_from)
stmt = stmt.filter(self.model.order_date >= date_from)
if date_to:
query = query.filter(self.model.order_date <= date_to)
# Search term filter (PO number, reference, supplier name)
stmt = stmt.filter(self.model.order_date <= date_to)
# Search term filter (PO number, reference)
if search_term:
search_filter = or_(
self.model.po_number.ilike(f"%{search_term}%"),
self.model.reference_number.ilike(f"%{search_term}%"),
self.model.supplier.has(Supplier.name.ilike(f"%{search_term}%"))
self.model.reference_number.ilike(f"%{search_term}%")
)
query = query.filter(search_filter)
return (
query.order_by(desc(self.model.order_date))
.limit(limit)
.offset(offset)
.all()
)
def get_orders_by_status(
self,
tenant_id: UUID,
status: PurchaseOrderStatus
stmt = stmt.filter(search_filter)
stmt = stmt.order_by(desc(self.model.order_date)).limit(limit).offset(offset)
result = await self.db.execute(stmt)
return list(result.scalars().all())
async def get_orders_by_status(
self,
tenant_id: UUID,
status: PurchaseOrderStatus,
limit: int = 100
) -> List[PurchaseOrder]:
"""Get all orders with specific status"""
return (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
"""Get purchase orders by status"""
stmt = (
select(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status == status
)
)
.order_by(self.model.order_date.desc())
.all()
.order_by(desc(self.model.order_date))
.limit(limit)
)
def get_orders_requiring_approval(self, tenant_id: UUID) -> List[PurchaseOrder]:
"""Get orders pending approval"""
return (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status == PurchaseOrderStatus.PENDING_APPROVAL,
self.model.requires_approval == True
)
)
.order_by(self.model.order_date.asc())
.all()
)
def get_overdue_orders(self, tenant_id: UUID) -> List[PurchaseOrder]:
"""Get orders that are overdue for delivery"""
today = datetime.utcnow().date()
return (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status.in_([
PurchaseOrderStatus.CONFIRMED,
PurchaseOrderStatus.SENT_TO_SUPPLIER,
PurchaseOrderStatus.PARTIALLY_RECEIVED
]),
self.model.required_delivery_date < today
)
)
.order_by(self.model.required_delivery_date.asc())
.all()
)
def get_orders_by_supplier(
self,
tenant_id: UUID,
supplier_id: UUID,
limit: int = 20
result = await self.db.execute(stmt)
return list(result.scalars().all())
async def get_pending_approval(
self,
tenant_id: UUID,
limit: int = 50
) -> List[PurchaseOrder]:
"""Get recent orders for a specific supplier"""
return (
self.db.query(self.model)
"""Get purchase orders pending approval"""
return await self.get_orders_by_status(
tenant_id,
PurchaseOrderStatus.pending_approval,
limit
)
async def get_by_supplier(
self,
tenant_id: UUID,
supplier_id: UUID,
limit: int = 100,
offset: int = 0
) -> List[PurchaseOrder]:
"""Get purchase orders for a specific supplier"""
stmt = (
select(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.supplier_id == supplier_id
)
)
.order_by(self.model.order_date.desc())
.order_by(desc(self.model.order_date))
.limit(limit)
.all()
.offset(offset)
)
def generate_po_number(self, tenant_id: UUID) -> str:
"""Generate next PO number for tenant"""
# Get current year
current_year = datetime.utcnow().year
# Find highest PO number for current year
year_prefix = f"PO{current_year}"
latest_po = (
self.db.query(self.model.po_number)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.po_number.like(f"{year_prefix}%")
)
)
.order_by(self.model.po_number.desc())
.first()
)
if latest_po:
# Extract number and increment
try:
last_number = int(latest_po.po_number.replace(year_prefix, ""))
new_number = last_number + 1
except ValueError:
new_number = 1
else:
new_number = 1
return f"{year_prefix}{new_number:04d}"
def update_order_status(
result = await self.db.execute(stmt)
return list(result.scalars().all())
async def get_orders_requiring_delivery(
self,
po_id: UUID,
status: PurchaseOrderStatus,
updated_by: UUID,
notes: Optional[str] = None
) -> Optional[PurchaseOrder]:
"""Update purchase order status with audit trail"""
po = self.get_by_id(po_id)
if not po:
return None
po.status = status
po.updated_by = updated_by
po.updated_at = datetime.utcnow()
if notes:
existing_notes = po.internal_notes or ""
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M")
po.internal_notes = f"{existing_notes}\n[{timestamp}] Status changed to {status.value}: {notes}".strip()
# Set specific timestamps based on status
if status == PurchaseOrderStatus.APPROVED:
po.approved_at = datetime.utcnow()
elif status == PurchaseOrderStatus.SENT_TO_SUPPLIER:
po.sent_to_supplier_at = datetime.utcnow()
elif status == PurchaseOrderStatus.CONFIRMED:
po.supplier_confirmation_date = datetime.utcnow()
self.db.commit()
self.db.refresh(po)
return po
def approve_order(
self,
po_id: UUID,
approved_by: UUID,
approval_notes: Optional[str] = None
) -> Optional[PurchaseOrder]:
"""Approve a purchase order"""
po = self.get_by_id(po_id)
if not po or po.status != PurchaseOrderStatus.PENDING_APPROVAL:
return None
po.status = PurchaseOrderStatus.APPROVED
po.approved_by = approved_by
po.approved_at = datetime.utcnow()
po.updated_by = approved_by
po.updated_at = datetime.utcnow()
if approval_notes:
po.internal_notes = (po.internal_notes or "") + f"\nApproval notes: {approval_notes}"
self.db.commit()
self.db.refresh(po)
return po
def calculate_order_totals(self, po_id: UUID) -> Optional[PurchaseOrder]:
"""Recalculate order totals based on line items"""
po = self.get_with_items(po_id)
if not po:
return None
# Calculate subtotal from items
subtotal = sum(item.line_total for item in po.items)
# Keep existing tax, shipping, and discount
tax_amount = po.tax_amount or 0
shipping_cost = po.shipping_cost or 0
discount_amount = po.discount_amount or 0
# Calculate total
total_amount = subtotal + tax_amount + shipping_cost - discount_amount
# Update PO
po.subtotal = subtotal
po.total_amount = max(0, total_amount) # Ensure non-negative
po.updated_at = datetime.utcnow()
self.db.commit()
self.db.refresh(po)
return po
def get_purchase_order_statistics(self, tenant_id: UUID) -> Dict[str, Any]:
"""Get purchase order statistics for dashboard"""
# Total orders
total_orders = self.count_by_tenant(tenant_id)
# Orders by status
status_counts = (
self.db.query(
self.model.status,
func.count(self.model.id).label('count')
)
.filter(self.model.tenant_id == tenant_id)
.group_by(self.model.status)
.all()
)
status_dict = {status.value: 0 for status in PurchaseOrderStatus}
for status, count in status_counts:
status_dict[status.value] = count
# This month's orders
first_day_month = datetime.utcnow().replace(day=1, hour=0, minute=0, second=0, microsecond=0)
this_month_orders = (
self.db.query(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.order_date >= first_day_month
)
)
.count()
)
# Total spend this month
this_month_spend = (
self.db.query(func.sum(self.model.total_amount))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.order_date >= first_day_month,
self.model.status != PurchaseOrderStatus.CANCELLED
)
)
.scalar()
) or 0.0
# Average order value
avg_order_value = (
self.db.query(func.avg(self.model.total_amount))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status != PurchaseOrderStatus.CANCELLED
)
)
.scalar()
) or 0.0
# Overdue orders count
today = datetime.utcnow().date()
overdue_count = (
self.db.query(self.model)
tenant_id: UUID,
days_ahead: int = 7
) -> List[PurchaseOrder]:
"""Get orders expecting delivery within specified days"""
cutoff_date = datetime.utcnow().date() + timedelta(days=days_ahead)
stmt = (
select(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status.in_([
PurchaseOrderStatus.CONFIRMED,
PurchaseOrderStatus.SENT_TO_SUPPLIER,
PurchaseOrderStatus.PARTIALLY_RECEIVED
PurchaseOrderStatus.approved,
PurchaseOrderStatus.sent_to_supplier,
PurchaseOrderStatus.confirmed
]),
self.model.required_delivery_date <= cutoff_date
)
)
.order_by(self.model.required_delivery_date)
)
result = await self.db.execute(stmt)
return list(result.scalars().all())
async def get_overdue_deliveries(self, tenant_id: UUID) -> List[PurchaseOrder]:
"""Get purchase orders with overdue deliveries"""
today = datetime.utcnow().date()
stmt = (
select(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status.in_([
PurchaseOrderStatus.approved,
PurchaseOrderStatus.sent_to_supplier,
PurchaseOrderStatus.confirmed
]),
self.model.required_delivery_date < today
)
)
.count()
.order_by(self.model.required_delivery_date)
)
return {
"total_orders": total_orders,
"status_counts": status_dict,
"this_month_orders": this_month_orders,
"this_month_spend": float(this_month_spend),
"avg_order_value": round(float(avg_order_value), 2),
"overdue_count": overdue_count,
"pending_approval": status_dict.get(PurchaseOrderStatus.PENDING_APPROVAL.value, 0)
}
result = await self.db.execute(stmt)
return list(result.scalars().all())
async def get_total_spent_by_supplier(
self,
tenant_id: UUID,
supplier_id: UUID,
date_from: Optional[datetime] = None,
date_to: Optional[datetime] = None
) -> float:
"""Calculate total amount spent with a supplier"""
stmt = (
select(func.sum(self.model.total_amount))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.supplier_id == supplier_id,
self.model.status.in_([
PurchaseOrderStatus.approved,
PurchaseOrderStatus.sent_to_supplier,
PurchaseOrderStatus.confirmed,
PurchaseOrderStatus.received,
PurchaseOrderStatus.completed
])
)
)
)
if date_from:
stmt = stmt.filter(self.model.order_date >= date_from)
if date_to:
stmt = stmt.filter(self.model.order_date <= date_to)
result = await self.db.execute(stmt)
total = result.scalar()
return float(total) if total else 0.0
async def count_by_status(
self,
tenant_id: UUID,
status: PurchaseOrderStatus
) -> int:
"""Count purchase orders by status"""
stmt = (
select(func.count())
.select_from(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status == status
)
)
)
result = await self.db.execute(stmt)
return result.scalar() or 0

View File

@@ -0,0 +1,376 @@
# services/suppliers/app/repositories/purchase_order_repository.py
"""
Purchase Order repository for database operations
"""
from typing import List, Optional, Dict, Any, Tuple
from sqlalchemy.orm import Session, joinedload
from sqlalchemy import and_, or_, func, desc
from uuid import UUID
from datetime import datetime, timedelta
from app.models.suppliers import (
PurchaseOrder, PurchaseOrderItem, PurchaseOrderStatus,
Supplier, SupplierPriceList
)
from app.repositories.base import BaseRepository
class PurchaseOrderRepository(BaseRepository[PurchaseOrder]):
"""Repository for purchase order management operations"""
def __init__(self, db: Session):
super().__init__(PurchaseOrder, db)
def get_by_po_number(self, tenant_id: UUID, po_number: str) -> Optional[PurchaseOrder]:
"""Get purchase order by PO number within tenant"""
return (
self.db.query(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.po_number == po_number
)
)
.first()
)
def get_with_items(self, po_id: UUID) -> Optional[PurchaseOrder]:
"""Get purchase order with all items loaded"""
return (
self.db.query(self.model)
.options(joinedload(self.model.items))
.filter(self.model.id == po_id)
.first()
)
def search_purchase_orders(
self,
tenant_id: UUID,
supplier_id: Optional[UUID] = None,
status: Optional[PurchaseOrderStatus] = None,
priority: Optional[str] = None,
date_from: Optional[datetime] = None,
date_to: Optional[datetime] = None,
search_term: Optional[str] = None,
limit: int = 50,
offset: int = 0
) -> List[PurchaseOrder]:
"""Search purchase orders with comprehensive filters"""
query = (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
.filter(self.model.tenant_id == tenant_id)
)
# Supplier filter
if supplier_id:
query = query.filter(self.model.supplier_id == supplier_id)
# Status filter
if status:
query = query.filter(self.model.status == status)
# Priority filter
if priority:
query = query.filter(self.model.priority == priority)
# Date range filter
if date_from:
query = query.filter(self.model.order_date >= date_from)
if date_to:
query = query.filter(self.model.order_date <= date_to)
# Search term filter (PO number, reference, supplier name)
if search_term:
search_filter = or_(
self.model.po_number.ilike(f"%{search_term}%"),
self.model.reference_number.ilike(f"%{search_term}%"),
self.model.supplier.has(Supplier.name.ilike(f"%{search_term}%"))
)
query = query.filter(search_filter)
return (
query.order_by(desc(self.model.order_date))
.limit(limit)
.offset(offset)
.all()
)
def get_orders_by_status(
self,
tenant_id: UUID,
status: PurchaseOrderStatus
) -> List[PurchaseOrder]:
"""Get all orders with specific status"""
return (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status == status
)
)
.order_by(self.model.order_date.desc())
.all()
)
def get_orders_requiring_approval(self, tenant_id: UUID) -> List[PurchaseOrder]:
"""Get orders pending approval"""
return (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status == PurchaseOrderStatus.PENDING_APPROVAL,
self.model.requires_approval == True
)
)
.order_by(self.model.order_date.asc())
.all()
)
def get_overdue_orders(self, tenant_id: UUID) -> List[PurchaseOrder]:
"""Get orders that are overdue for delivery"""
today = datetime.utcnow().date()
return (
self.db.query(self.model)
.options(joinedload(self.model.supplier))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status.in_([
PurchaseOrderStatus.CONFIRMED,
PurchaseOrderStatus.SENT_TO_SUPPLIER,
PurchaseOrderStatus.PARTIALLY_RECEIVED
]),
self.model.required_delivery_date < today
)
)
.order_by(self.model.required_delivery_date.asc())
.all()
)
def get_orders_by_supplier(
self,
tenant_id: UUID,
supplier_id: UUID,
limit: int = 20
) -> List[PurchaseOrder]:
"""Get recent orders for a specific supplier"""
return (
self.db.query(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.supplier_id == supplier_id
)
)
.order_by(self.model.order_date.desc())
.limit(limit)
.all()
)
def generate_po_number(self, tenant_id: UUID) -> str:
"""Generate next PO number for tenant"""
# Get current year
current_year = datetime.utcnow().year
# Find highest PO number for current year
year_prefix = f"PO{current_year}"
latest_po = (
self.db.query(self.model.po_number)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.po_number.like(f"{year_prefix}%")
)
)
.order_by(self.model.po_number.desc())
.first()
)
if latest_po:
# Extract number and increment
try:
last_number = int(latest_po.po_number.replace(year_prefix, ""))
new_number = last_number + 1
except ValueError:
new_number = 1
else:
new_number = 1
return f"{year_prefix}{new_number:04d}"
def update_order_status(
self,
po_id: UUID,
status: PurchaseOrderStatus,
updated_by: UUID,
notes: Optional[str] = None
) -> Optional[PurchaseOrder]:
"""Update purchase order status with audit trail"""
po = self.get_by_id(po_id)
if not po:
return None
po.status = status
po.updated_by = updated_by
po.updated_at = datetime.utcnow()
if notes:
existing_notes = po.internal_notes or ""
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M")
po.internal_notes = f"{existing_notes}\n[{timestamp}] Status changed to {status.value}: {notes}".strip()
# Set specific timestamps based on status
if status == PurchaseOrderStatus.APPROVED:
po.approved_at = datetime.utcnow()
elif status == PurchaseOrderStatus.SENT_TO_SUPPLIER:
po.sent_to_supplier_at = datetime.utcnow()
elif status == PurchaseOrderStatus.CONFIRMED:
po.supplier_confirmation_date = datetime.utcnow()
self.db.commit()
self.db.refresh(po)
return po
def approve_order(
self,
po_id: UUID,
approved_by: UUID,
approval_notes: Optional[str] = None
) -> Optional[PurchaseOrder]:
"""Approve a purchase order"""
po = self.get_by_id(po_id)
if not po or po.status != PurchaseOrderStatus.PENDING_APPROVAL:
return None
po.status = PurchaseOrderStatus.APPROVED
po.approved_by = approved_by
po.approved_at = datetime.utcnow()
po.updated_by = approved_by
po.updated_at = datetime.utcnow()
if approval_notes:
po.internal_notes = (po.internal_notes or "") + f"\nApproval notes: {approval_notes}"
self.db.commit()
self.db.refresh(po)
return po
def calculate_order_totals(self, po_id: UUID) -> Optional[PurchaseOrder]:
"""Recalculate order totals based on line items"""
po = self.get_with_items(po_id)
if not po:
return None
# Calculate subtotal from items
subtotal = sum(item.line_total for item in po.items)
# Keep existing tax, shipping, and discount
tax_amount = po.tax_amount or 0
shipping_cost = po.shipping_cost or 0
discount_amount = po.discount_amount or 0
# Calculate total
total_amount = subtotal + tax_amount + shipping_cost - discount_amount
# Update PO
po.subtotal = subtotal
po.total_amount = max(0, total_amount) # Ensure non-negative
po.updated_at = datetime.utcnow()
self.db.commit()
self.db.refresh(po)
return po
def get_purchase_order_statistics(self, tenant_id: UUID) -> Dict[str, Any]:
"""Get purchase order statistics for dashboard"""
# Total orders
total_orders = self.count_by_tenant(tenant_id)
# Orders by status
status_counts = (
self.db.query(
self.model.status,
func.count(self.model.id).label('count')
)
.filter(self.model.tenant_id == tenant_id)
.group_by(self.model.status)
.all()
)
status_dict = {status.value: 0 for status in PurchaseOrderStatus}
for status, count in status_counts:
status_dict[status.value] = count
# This month's orders
first_day_month = datetime.utcnow().replace(day=1, hour=0, minute=0, second=0, microsecond=0)
this_month_orders = (
self.db.query(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.order_date >= first_day_month
)
)
.count()
)
# Total spend this month
this_month_spend = (
self.db.query(func.sum(self.model.total_amount))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.order_date >= first_day_month,
self.model.status != PurchaseOrderStatus.CANCELLED
)
)
.scalar()
) or 0.0
# Average order value
avg_order_value = (
self.db.query(func.avg(self.model.total_amount))
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status != PurchaseOrderStatus.CANCELLED
)
)
.scalar()
) or 0.0
# Overdue orders count
today = datetime.utcnow().date()
overdue_count = (
self.db.query(self.model)
.filter(
and_(
self.model.tenant_id == tenant_id,
self.model.status.in_([
PurchaseOrderStatus.CONFIRMED,
PurchaseOrderStatus.SENT_TO_SUPPLIER,
PurchaseOrderStatus.PARTIALLY_RECEIVED
]),
self.model.required_delivery_date < today
)
)
.count()
)
return {
"total_orders": total_orders,
"status_counts": status_dict,
"this_month_orders": this_month_orders,
"this_month_spend": float(this_month_spend),
"avg_order_value": round(float(avg_order_value), 2),
"overdue_count": overdue_count,
"pending_approval": status_dict.get(PurchaseOrderStatus.PENDING_APPROVAL.value, 0)
}

View File

@@ -0,0 +1,289 @@
# services/suppliers/app/repositories/supplier_performance_repository.py
"""
Supplier Performance Repository - Calculate and manage supplier trust scores
Handles supplier performance metrics, trust scores, and auto-approval eligibility
"""
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime, timedelta, timezone
from decimal import Decimal
import structlog
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, and_, case
from sqlalchemy.orm import selectinload
from app.models.suppliers import (
Supplier,
PurchaseOrder,
PurchaseOrderStatus
)
logger = structlog.get_logger()
class SupplierPerformanceRepository:
"""Repository for calculating and managing supplier performance metrics"""
def __init__(self, db: AsyncSession):
self.db = db
async def calculate_trust_score(self, supplier_id: UUID) -> float:
"""
Calculate comprehensive trust score for a supplier
Score components (weighted):
- Quality rating: 30%
- Delivery rating: 30%
- On-time delivery rate: 20%
- Fulfillment rate: 15%
- Order history: 5%
Returns:
float: Trust score between 0.0 and 1.0
"""
try:
# Get supplier with current metrics
stmt = select(Supplier).where(Supplier.id == supplier_id)
result = await self.db.execute(stmt)
supplier = result.scalar_one_or_none()
if not supplier:
logger.warning("Supplier not found for trust score calculation", supplier_id=str(supplier_id))
return 0.0
# Calculate on-time delivery rate from recent POs
on_time_rate = await self._calculate_on_time_delivery_rate(supplier_id)
# Calculate fulfillment rate from recent POs
fulfillment_rate = await self._calculate_fulfillment_rate(supplier_id)
# Calculate order history score (more orders = higher confidence)
order_history_score = min(1.0, supplier.total_pos_count / 50.0)
# Weighted components
quality_score = (supplier.quality_rating or 0.0) / 5.0 # Normalize to 0-1
delivery_score = (supplier.delivery_rating or 0.0) / 5.0 # Normalize to 0-1
trust_score = (
quality_score * 0.30 +
delivery_score * 0.30 +
on_time_rate * 0.20 +
fulfillment_rate * 0.15 +
order_history_score * 0.05
)
# Ensure score is between 0 and 1
trust_score = max(0.0, min(1.0, trust_score))
logger.info(
"Trust score calculated",
supplier_id=str(supplier_id),
trust_score=trust_score,
quality_score=quality_score,
delivery_score=delivery_score,
on_time_rate=on_time_rate,
fulfillment_rate=fulfillment_rate,
order_history_score=order_history_score
)
return trust_score
except Exception as e:
logger.error("Error calculating trust score", supplier_id=str(supplier_id), error=str(e))
return 0.0
async def _calculate_on_time_delivery_rate(self, supplier_id: UUID, days: int = 90) -> float:
"""Calculate percentage of orders delivered on time in the last N days"""
try:
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
# Get completed orders with delivery dates
stmt = select(
func.count(PurchaseOrder.id).label('total_orders'),
func.count(
case(
(PurchaseOrder.actual_delivery_date <= PurchaseOrder.required_delivery_date, 1)
)
).label('on_time_orders')
).where(
and_(
PurchaseOrder.supplier_id == supplier_id,
PurchaseOrder.status == PurchaseOrderStatus.completed,
PurchaseOrder.actual_delivery_date.isnot(None),
PurchaseOrder.created_at >= cutoff_date
)
)
result = await self.db.execute(stmt)
row = result.one()
if row.total_orders == 0:
return 0.0
on_time_rate = float(row.on_time_orders) / float(row.total_orders)
return on_time_rate
except Exception as e:
logger.error("Error calculating on-time delivery rate", supplier_id=str(supplier_id), error=str(e))
return 0.0
async def _calculate_fulfillment_rate(self, supplier_id: UUID, days: int = 90) -> float:
"""Calculate percentage of orders fully fulfilled (no shortages) in the last N days"""
try:
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
# Get completed/confirmed orders
stmt = select(
func.count(PurchaseOrder.id).label('total_orders'),
func.count(
case(
(PurchaseOrder.status == PurchaseOrderStatus.completed, 1)
)
).label('completed_orders')
).where(
and_(
PurchaseOrder.supplier_id == supplier_id,
PurchaseOrder.status.in_([
PurchaseOrderStatus.completed,
PurchaseOrderStatus.partially_received
]),
PurchaseOrder.created_at >= cutoff_date
)
)
result = await self.db.execute(stmt)
row = result.one()
if row.total_orders == 0:
return 0.0
fulfillment_rate = float(row.completed_orders) / float(row.total_orders)
return fulfillment_rate
except Exception as e:
logger.error("Error calculating fulfillment rate", supplier_id=str(supplier_id), error=str(e))
return 0.0
async def update_supplier_performance_metrics(self, supplier_id: UUID) -> Dict[str, Any]:
"""
Update all performance metrics for a supplier
Returns:
Dict with updated metrics
"""
try:
# Calculate all metrics
trust_score = await self.calculate_trust_score(supplier_id)
on_time_rate = await self._calculate_on_time_delivery_rate(supplier_id)
fulfillment_rate = await self._calculate_fulfillment_rate(supplier_id)
# Get current supplier
stmt = select(Supplier).where(Supplier.id == supplier_id)
result = await self.db.execute(stmt)
supplier = result.scalar_one_or_none()
if not supplier:
return {}
# Update supplier metrics
supplier.trust_score = trust_score
supplier.on_time_delivery_rate = on_time_rate
supplier.fulfillment_rate = fulfillment_rate
supplier.last_performance_update = datetime.now(timezone.utc)
# Auto-update preferred status based on performance
supplier.is_preferred_supplier = (
supplier.total_pos_count >= 10 and
trust_score >= 0.80 and
supplier.status.value == 'active'
)
# Auto-update auto-approve eligibility
supplier.auto_approve_enabled = (
supplier.total_pos_count >= 20 and
trust_score >= 0.85 and
on_time_rate >= 0.90 and
supplier.is_preferred_supplier and
supplier.status.value == 'active'
)
await self.db.commit()
logger.info(
"Supplier performance metrics updated",
supplier_id=str(supplier_id),
trust_score=trust_score,
is_preferred=supplier.is_preferred_supplier,
auto_approve_enabled=supplier.auto_approve_enabled
)
return {
"supplier_id": str(supplier_id),
"trust_score": trust_score,
"on_time_delivery_rate": on_time_rate,
"fulfillment_rate": fulfillment_rate,
"is_preferred_supplier": supplier.is_preferred_supplier,
"auto_approve_enabled": supplier.auto_approve_enabled,
"last_updated": supplier.last_performance_update.isoformat()
}
except Exception as e:
await self.db.rollback()
logger.error("Error updating supplier performance metrics", supplier_id=str(supplier_id), error=str(e))
raise
async def increment_po_counters(self, supplier_id: UUID, approved: bool = False):
"""Increment PO counters when a new PO is created or approved"""
try:
stmt = select(Supplier).where(Supplier.id == supplier_id)
result = await self.db.execute(stmt)
supplier = result.scalar_one_or_none()
if supplier:
supplier.total_pos_count += 1
if approved:
supplier.approved_pos_count += 1
await self.db.commit()
logger.info(
"Supplier PO counters incremented",
supplier_id=str(supplier_id),
total=supplier.total_pos_count,
approved=supplier.approved_pos_count
)
except Exception as e:
await self.db.rollback()
logger.error("Error incrementing PO counters", supplier_id=str(supplier_id), error=str(e))
async def get_supplier_with_performance(self, supplier_id: UUID) -> Optional[Dict[str, Any]]:
"""Get supplier data with all performance metrics"""
try:
stmt = select(Supplier).where(Supplier.id == supplier_id)
result = await self.db.execute(stmt)
supplier = result.scalar_one_or_none()
if not supplier:
return None
return {
"id": str(supplier.id),
"name": supplier.name,
"trust_score": supplier.trust_score,
"is_preferred_supplier": supplier.is_preferred_supplier,
"auto_approve_enabled": supplier.auto_approve_enabled,
"total_pos_count": supplier.total_pos_count,
"approved_pos_count": supplier.approved_pos_count,
"on_time_delivery_rate": supplier.on_time_delivery_rate,
"fulfillment_rate": supplier.fulfillment_rate,
"quality_rating": supplier.quality_rating,
"delivery_rating": supplier.delivery_rating,
"status": supplier.status.value if supplier.status else None,
"last_performance_update": supplier.last_performance_update.isoformat() if supplier.last_performance_update else None
}
except Exception as e:
logger.error("Error getting supplier with performance", supplier_id=str(supplier_id), error=str(e))
return None

View File

@@ -133,7 +133,7 @@ class PurchaseOrderService:
async def get_purchase_order(self, po_id: UUID) -> Optional[PurchaseOrder]:
"""Get purchase order by ID with items"""
return self.repository.get_with_items(po_id)
return await self.repository.get_with_items(po_id)
async def update_purchase_order(
self,
@@ -298,7 +298,7 @@ class PurchaseOrderService:
search_params: PurchaseOrderSearchParams
) -> List[PurchaseOrder]:
"""Search purchase orders with filters"""
return self.repository.search_purchase_orders(
return await self.repository.search_purchase_orders(
tenant_id=tenant_id,
supplier_id=search_params.supplier_id,
status=search_params.status,

View File

@@ -0,0 +1,84 @@
"""add_supplier_trust_metrics
Revision ID: add_supplier_trust_metrics
Revises: 93d6ea3dc888
Create Date: 2025-10-20 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'add_supplier_trust_metrics'
down_revision = '93d6ea3dc888'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add trust and auto-approval metrics to suppliers table"""
# Add trust and auto-approval metric columns
op.add_column('suppliers', sa.Column('trust_score', sa.Float(), nullable=False, server_default='0.0'))
op.add_column('suppliers', sa.Column('is_preferred_supplier', sa.Boolean(), nullable=False, server_default='false'))
op.add_column('suppliers', sa.Column('auto_approve_enabled', sa.Boolean(), nullable=False, server_default='false'))
op.add_column('suppliers', sa.Column('total_pos_count', sa.Integer(), nullable=False, server_default='0'))
op.add_column('suppliers', sa.Column('approved_pos_count', sa.Integer(), nullable=False, server_default='0'))
op.add_column('suppliers', sa.Column('on_time_delivery_rate', sa.Float(), nullable=False, server_default='0.0'))
op.add_column('suppliers', sa.Column('fulfillment_rate', sa.Float(), nullable=False, server_default='0.0'))
op.add_column('suppliers', sa.Column('last_performance_update', sa.DateTime(timezone=True), nullable=True))
# Create index for trust score queries
op.create_index('ix_suppliers_trust_score', 'suppliers', ['trust_score'], unique=False)
op.create_index('ix_suppliers_preferred', 'suppliers', ['is_preferred_supplier'], unique=False)
op.create_index('ix_suppliers_auto_approve', 'suppliers', ['auto_approve_enabled'], unique=False)
# Update existing active suppliers to have reasonable default trust scores
# Suppliers with good ratings and history get higher initial trust
op.execute("""
UPDATE suppliers
SET
trust_score = LEAST(1.0, GREATEST(0.0,
(COALESCE(quality_rating, 0) / 5.0 * 0.4) +
(COALESCE(delivery_rating, 0) / 5.0 * 0.4) +
(CASE WHEN total_orders > 10 THEN 0.2 ELSE total_orders / 50.0 END)
)),
is_preferred_supplier = (
total_orders >= 10 AND
quality_rating >= 4.0 AND
delivery_rating >= 4.0 AND
status = 'active'
),
auto_approve_enabled = (
total_orders >= 20 AND
quality_rating >= 4.5 AND
delivery_rating >= 4.5 AND
status = 'active'
),
total_pos_count = total_orders,
approved_pos_count = total_orders,
on_time_delivery_rate = COALESCE(delivery_rating / 5.0, 0.0),
fulfillment_rate = COALESCE(quality_rating / 5.0, 0.0),
last_performance_update = NOW()
WHERE status = 'active'
""")
def downgrade() -> None:
"""Remove trust and auto-approval metrics from suppliers table"""
# Drop indexes
op.drop_index('ix_suppliers_auto_approve', table_name='suppliers')
op.drop_index('ix_suppliers_preferred', table_name='suppliers')
op.drop_index('ix_suppliers_trust_score', table_name='suppliers')
# Drop columns
op.drop_column('suppliers', 'last_performance_update')
op.drop_column('suppliers', 'fulfillment_rate')
op.drop_column('suppliers', 'on_time_delivery_rate')
op.drop_column('suppliers', 'approved_pos_count')
op.drop_column('suppliers', 'total_pos_count')
op.drop_column('suppliers', 'auto_approve_enabled')
op.drop_column('suppliers', 'is_preferred_supplier')
op.drop_column('suppliers', 'trust_score')

View File

@@ -0,0 +1,463 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Purchase Orders Seeding Script for Suppliers Service
Creates realistic PO scenarios in various states for demo purposes
This script creates:
- 3 PENDING_APPROVAL POs (created today, need user action)
- 2 APPROVED POs (approved yesterday, in progress)
- 1 AUTO_APPROVED PO (small amount, trusted supplier)
- 2 COMPLETED POs (delivered last week)
- 1 REJECTED PO (quality concerns)
- 1 CANCELLED PO (supplier unavailable)
"""
import asyncio
import uuid
import sys
import os
import random
from datetime import datetime, timezone, timedelta, date
from pathlib import Path
from decimal import Decimal
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
import structlog
from app.models.suppliers import (
Supplier, PurchaseOrder, PurchaseOrderItem,
PurchaseOrderStatus, SupplierStatus, SupplierType
)
# Configure logging
logger = structlog.get_logger()
# Demo tenant IDs (match those from orders service)
DEMO_TENANT_IDS = [
uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"), # San Pablo
uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # La Espiga
]
# System user ID for auto-approvals
SYSTEM_USER_ID = uuid.UUID("00000000-0000-0000-0000-000000000001")
async def create_or_get_supplier(
db: AsyncSession,
tenant_id: uuid.UUID,
name: str,
supplier_type: SupplierType,
trust_score: float = 0.0,
is_preferred: bool = False,
auto_approve_enabled: bool = False
) -> Supplier:
"""Create or get a demo supplier"""
# Check if supplier exists
result = await db.execute(
select(Supplier).where(
Supplier.tenant_id == tenant_id,
Supplier.name == name
)
)
existing = result.scalar_one_or_none()
if existing:
return existing
# Create new supplier
supplier = Supplier(
id=uuid.uuid4(),
tenant_id=tenant_id,
name=name,
supplier_code=f"SUP-{name[:3].upper()}",
supplier_type=supplier_type,
status=SupplierStatus.active,
contact_person=f"Contact {name}",
email=f"contact@{name.lower().replace(' ', '')}.com",
phone="+34 91 555 " + str(random.randint(1000, 9999)),
city="Madrid",
country="España",
standard_lead_time=random.randint(1, 3),
quality_rating=random.uniform(4.0, 5.0),
delivery_rating=random.uniform(4.0, 5.0),
total_orders=random.randint(20, 100),
total_amount=Decimal(str(random.uniform(10000, 50000))),
# Trust metrics
trust_score=trust_score,
is_preferred_supplier=is_preferred,
auto_approve_enabled=auto_approve_enabled,
total_pos_count=random.randint(25, 80),
approved_pos_count=random.randint(24, 78),
on_time_delivery_rate=random.uniform(0.85, 0.98),
fulfillment_rate=random.uniform(0.90, 0.99),
last_performance_update=datetime.now(timezone.utc),
approved_by=SYSTEM_USER_ID,
approved_at=datetime.now(timezone.utc) - timedelta(days=30),
created_by=SYSTEM_USER_ID,
updated_by=SYSTEM_USER_ID
)
db.add(supplier)
await db.flush()
logger.info(f"Created supplier: {name}", supplier_id=str(supplier.id))
return supplier
async def create_purchase_order(
db: AsyncSession,
tenant_id: uuid.UUID,
supplier: Supplier,
status: PurchaseOrderStatus,
total_amount: Decimal,
created_offset_days: int = 0,
auto_approved: bool = False,
priority: str = "normal",
items_data: list = None
) -> PurchaseOrder:
"""Create a purchase order with items"""
created_at = datetime.now(timezone.utc) + timedelta(days=created_offset_days)
required_delivery = created_at + timedelta(days=random.randint(3, 7))
# Generate PO number
po_number = f"PO-{datetime.now().year}-{random.randint(1000, 9999)}"
# Calculate amounts
subtotal = total_amount
tax_amount = subtotal * Decimal("0.10") # 10% IVA
shipping_cost = Decimal(str(random.uniform(0, 20)))
total = subtotal + tax_amount + shipping_cost
# Create PO
po = PurchaseOrder(
id=uuid.uuid4(),
tenant_id=tenant_id,
supplier_id=supplier.id,
po_number=po_number,
status=status,
priority=priority,
order_date=created_at,
required_delivery_date=required_delivery,
subtotal=subtotal,
tax_amount=tax_amount,
shipping_cost=shipping_cost,
discount_amount=Decimal("0.00"),
total_amount=total,
notes=f"Auto-generated demo PO from procurement plan" if not auto_approved else f"Auto-approved: Amount €{subtotal:.2f} within threshold",
created_at=created_at,
updated_at=created_at,
created_by=SYSTEM_USER_ID,
updated_by=SYSTEM_USER_ID
)
# Set approval data if approved
if status in [PurchaseOrderStatus.approved, PurchaseOrderStatus.sent_to_supplier,
PurchaseOrderStatus.confirmed, PurchaseOrderStatus.completed]:
po.approved_at = created_at + timedelta(hours=random.randint(1, 6))
po.approved_by = SYSTEM_USER_ID if auto_approved else uuid.uuid4()
if auto_approved:
po.notes = f"{po.notes}\nAuto-approved by system based on trust score and amount"
# Set sent/confirmed dates
if status in [PurchaseOrderStatus.sent_to_supplier, PurchaseOrderStatus.confirmed,
PurchaseOrderStatus.completed]:
po.sent_to_supplier_at = po.approved_at + timedelta(hours=2)
if status in [PurchaseOrderStatus.confirmed, PurchaseOrderStatus.completed]:
po.supplier_confirmation_date = po.sent_to_supplier_at + timedelta(hours=random.randint(4, 24))
db.add(po)
await db.flush()
# Create items
if not items_data:
items_data = [
{"name": "Harina de Trigo T55", "quantity": 100, "unit_price": 0.85, "uom": "kg"},
{"name": "Levadura Fresca", "quantity": 5, "unit_price": 4.50, "uom": "kg"},
{"name": "Sal Marina", "quantity": 10, "unit_price": 1.20, "uom": "kg"}
]
for idx, item_data in enumerate(items_data, 1):
ordered_qty = int(item_data["quantity"])
unit_price = Decimal(str(item_data["unit_price"]))
line_total = Decimal(str(ordered_qty)) * unit_price
item = PurchaseOrderItem(
id=uuid.uuid4(),
purchase_order_id=po.id,
tenant_id=tenant_id,
inventory_product_id=uuid.uuid4(), # Would link to actual inventory items
product_code=f"PROD-{item_data['name'][:3].upper()}",
ordered_quantity=ordered_qty,
received_quantity=ordered_qty if status == PurchaseOrderStatus.completed else 0,
remaining_quantity=0 if status == PurchaseOrderStatus.completed else ordered_qty,
unit_price=unit_price,
line_total=line_total,
unit_of_measure=item_data["uom"],
item_notes=f"Demo item: {item_data['name']}"
)
db.add(item)
logger.info(f"Created PO: {po_number}", po_id=str(po.id), status=status.value, amount=float(total))
return po
async def seed_purchase_orders_for_tenant(db: AsyncSession, tenant_id: uuid.UUID):
"""Seed purchase orders for a specific tenant"""
logger.info("Seeding purchase orders", tenant_id=str(tenant_id))
# Create/get suppliers with different trust levels
supplier_high_trust = await create_or_get_supplier(
db, tenant_id, "Panadería Central S.L.",
SupplierType.ingredients,
trust_score=0.92, is_preferred=True, auto_approve_enabled=True
)
supplier_medium_trust = await create_or_get_supplier(
db, tenant_id, "Distribuidora Madrid",
SupplierType.ingredients,
trust_score=0.75, is_preferred=True, auto_approve_enabled=False
)
supplier_new = await create_or_get_supplier(
db, tenant_id, "Nuevos Suministros SA",
SupplierType.ingredients,
trust_score=0.50, is_preferred=False, auto_approve_enabled=False
)
supplier_packaging = await create_or_get_supplier(
db, tenant_id, "Embalajes Premium",
SupplierType.packaging,
trust_score=0.88, is_preferred=True, auto_approve_enabled=True
)
pos_created = []
# 1. PENDING_APPROVAL - Critical/Urgent (created today)
po1 = await create_purchase_order(
db, tenant_id, supplier_medium_trust,
PurchaseOrderStatus.pending_approval,
Decimal("1234.56"),
created_offset_days=0,
priority="high",
items_data=[
{"name": "Harina Integral Ecológica", "quantity": 150, "unit_price": 1.20, "uom": "kg"},
{"name": "Semillas de Girasol", "quantity": 20, "unit_price": 3.50, "uom": "kg"},
{"name": "Miel de Azahar", "quantity": 10, "unit_price": 8.90, "uom": "kg"}
]
)
pos_created.append(po1)
# 2. PENDING_APPROVAL - Medium amount, new supplier (created today)
po2 = await create_purchase_order(
db, tenant_id, supplier_new,
PurchaseOrderStatus.pending_approval,
Decimal("789.00"),
created_offset_days=0,
items_data=[
{"name": "Aceite de Oliva Virgen", "quantity": 30, "unit_price": 8.50, "uom": "l"},
{"name": "Azúcar Moreno", "quantity": 50, "unit_price": 1.80, "uom": "kg"}
]
)
pos_created.append(po2)
# 3. PENDING_APPROVAL - Large amount (created yesterday)
po3 = await create_purchase_order(
db, tenant_id, supplier_medium_trust,
PurchaseOrderStatus.pending_approval,
Decimal("2500.00"),
created_offset_days=-1,
priority="normal",
items_data=[
{"name": "Harina de Fuerza T65", "quantity": 500, "unit_price": 0.95, "uom": "kg"},
{"name": "Mantequilla Premium", "quantity": 80, "unit_price": 5.20, "uom": "kg"},
{"name": "Huevos Categoría A", "quantity": 600, "unit_price": 0.22, "uom": "unidad"}
]
)
pos_created.append(po3)
# 4. APPROVED (auto-approved, small amount, trusted supplier)
po4 = await create_purchase_order(
db, tenant_id, supplier_high_trust,
PurchaseOrderStatus.approved,
Decimal("234.50"),
created_offset_days=0,
auto_approved=True,
items_data=[
{"name": "Levadura Seca", "quantity": 5, "unit_price": 6.90, "uom": "kg"},
{"name": "Sal Fina", "quantity": 25, "unit_price": 0.85, "uom": "kg"}
]
)
pos_created.append(po4)
# 5. APPROVED (manually approved yesterday)
po5 = await create_purchase_order(
db, tenant_id, supplier_packaging,
PurchaseOrderStatus.approved,
Decimal("456.78"),
created_offset_days=-1,
items_data=[
{"name": "Bolsas de Papel Kraft", "quantity": 1000, "unit_price": 0.12, "uom": "unidad"},
{"name": "Cajas de Cartón Grande", "quantity": 200, "unit_price": 0.45, "uom": "unidad"}
]
)
pos_created.append(po5)
# 6. COMPLETED (delivered last week)
po6 = await create_purchase_order(
db, tenant_id, supplier_high_trust,
PurchaseOrderStatus.completed,
Decimal("1567.80"),
created_offset_days=-7,
items_data=[
{"name": "Harina T55 Premium", "quantity": 300, "unit_price": 0.90, "uom": "kg"},
{"name": "Chocolate Negro 70%", "quantity": 40, "unit_price": 7.80, "uom": "kg"}
]
)
pos_created.append(po6)
# 7. COMPLETED (delivered 5 days ago)
po7 = await create_purchase_order(
db, tenant_id, supplier_medium_trust,
PurchaseOrderStatus.completed,
Decimal("890.45"),
created_offset_days=-5,
items_data=[
{"name": "Nueces Peladas", "quantity": 20, "unit_price": 12.50, "uom": "kg"},
{"name": "Pasas Sultanas", "quantity": 15, "unit_price": 4.30, "uom": "kg"}
]
)
pos_created.append(po7)
# 8. CANCELLED (supplier unavailable)
po8 = await create_purchase_order(
db, tenant_id, supplier_new,
PurchaseOrderStatus.cancelled,
Decimal("345.00"),
created_offset_days=-3,
items_data=[
{"name": "Avellanas Tostadas", "quantity": 25, "unit_price": 11.80, "uom": "kg"}
]
)
po8.rejection_reason = "Supplier unable to deliver - stock unavailable"
po8.notes = "Cancelled: Supplier stock unavailable at required delivery date"
pos_created.append(po8)
# 9. DISPUTED (quality issues)
po9 = await create_purchase_order(
db, tenant_id, supplier_medium_trust,
PurchaseOrderStatus.disputed,
Decimal("678.90"),
created_offset_days=-4,
priority="high",
items_data=[
{"name": "Cacao en Polvo", "quantity": 30, "unit_price": 18.50, "uom": "kg"},
{"name": "Vainilla en Rama", "quantity": 2, "unit_price": 45.20, "uom": "kg"}
]
)
po9.rejection_reason = "Quality below specifications - requesting replacement"
po9.notes = "DISPUTED: Quality issue reported - batch rejected, requesting replacement or refund"
pos_created.append(po9)
await db.commit()
logger.info(
f"Successfully created {len(pos_created)} purchase orders for tenant",
tenant_id=str(tenant_id),
pending_approval=3,
approved=2,
completed=2,
cancelled=1,
disputed=1
)
return pos_created
async def seed_all(db: AsyncSession):
"""Seed all demo tenants with purchase orders"""
logger.info("Starting demo purchase orders seed process")
all_pos = []
for tenant_id in DEMO_TENANT_IDS:
# Check if POs already exist
result = await db.execute(
select(PurchaseOrder).where(PurchaseOrder.tenant_id == tenant_id).limit(1)
)
existing = result.scalar_one_or_none()
if existing:
logger.info(f"Purchase orders already exist for tenant {tenant_id}, skipping")
continue
pos = await seed_purchase_orders_for_tenant(db, tenant_id)
all_pos.extend(pos)
return {
"total_pos_created": len(all_pos),
"tenants_seeded": len(DEMO_TENANT_IDS),
"status": "completed"
}
async def main():
"""Main execution function"""
# Get database URL from environment
database_url = os.getenv("SUPPLIERS_DATABASE_URL")
if not database_url:
logger.error("SUPPLIERS_DATABASE_URL environment variable must be set")
return 1
# Ensure asyncpg driver
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
# Create async engine
engine = create_async_engine(database_url, echo=False)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
try:
async with async_session() as session:
result = await seed_all(session)
logger.info(
"Purchase orders seed completed successfully!",
total_pos=result["total_pos_created"],
tenants=result["tenants_seeded"]
)
# Print summary
print("\n" + "="*60)
print("DEMO PURCHASE ORDERS SEED SUMMARY")
print("="*60)
print(f"Total POs Created: {result['total_pos_created']}")
print(f"Tenants Seeded: {result['tenants_seeded']}")
print("\nPO Distribution:")
print(" - 3 PENDING_APPROVAL (need user action)")
print(" - 2 APPROVED (in progress)")
print(" - 2 COMPLETED (delivered)")
print(" - 1 CANCELLED (supplier issue)")
print(" - 1 DISPUTED (quality issue)")
print("="*60 + "\n")
return 0
except Exception as e:
logger.error(f"Purchase orders seed failed: {str(e)}", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

View File

@@ -167,62 +167,62 @@ async def seed_suppliers_for_tenant(
existing_supplier = result.scalars().first()
if existing_supplier:
logger.debug(f" ⏭️ Skipping supplier (exists): {supplier_name}")
logger.debug(f" ⏭️ Supplier exists, ensuring price lists: {supplier_name}")
skipped_suppliers += 1
continue
# Don't skip - continue to create/update price lists below
else:
# Parse enums
try:
supplier_type = SupplierType(supplier_data.get("supplier_type", "ingredients"))
except ValueError:
supplier_type = SupplierType.INGREDIENTS
# Parse enums
try:
supplier_type = SupplierType(supplier_data.get("supplier_type", "ingredients"))
except ValueError:
supplier_type = SupplierType.INGREDIENTS
try:
status = SupplierStatus(supplier_data.get("status", "active"))
except ValueError:
status = SupplierStatus.ACTIVE
try:
status = SupplierStatus(supplier_data.get("status", "active"))
except ValueError:
status = SupplierStatus.ACTIVE
try:
payment_terms = PaymentTerms(supplier_data.get("payment_terms", "net_30"))
except ValueError:
payment_terms = PaymentTerms.NET_30
try:
payment_terms = PaymentTerms(supplier_data.get("payment_terms", "net_30"))
except ValueError:
payment_terms = PaymentTerms.NET_30
# Create supplier with pre-defined ID
supplier = Supplier(
id=supplier_id,
tenant_id=tenant_id,
name=supplier_name,
supplier_code=f"SUP-{created_suppliers + 1:03d}",
supplier_type=supplier_type,
status=status,
tax_id=supplier_data.get("tax_id"),
contact_person=supplier_data.get("contact_person"),
email=supplier_data.get("email"),
phone=supplier_data.get("phone"),
mobile=supplier_data.get("mobile"),
website=supplier_data.get("website"),
address_line1=supplier_data.get("address_line1"),
address_line2=supplier_data.get("address_line2"),
city=supplier_data.get("city"),
state_province=supplier_data.get("state_province"),
postal_code=supplier_data.get("postal_code"),
country=supplier_data.get("country", "España"),
payment_terms=payment_terms,
credit_limit=Decimal(str(supplier_data.get("credit_limit", 0.0))),
standard_lead_time=supplier_data.get("standard_lead_time", 3),
quality_rating=supplier_data.get("quality_rating", 4.5),
delivery_rating=supplier_data.get("delivery_rating", 4.5),
notes=supplier_data.get("notes"),
certifications=supplier_data.get("certifications", []),
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
created_by=uuid.UUID("00000000-0000-0000-0000-000000000000"), # System user
updated_by=uuid.UUID("00000000-0000-0000-0000-000000000000") # System user
)
# Create supplier with pre-defined ID
supplier = Supplier(
id=supplier_id,
tenant_id=tenant_id,
name=supplier_name,
supplier_code=f"SUP-{created_suppliers + 1:03d}",
supplier_type=supplier_type,
status=status,
tax_id=supplier_data.get("tax_id"),
contact_person=supplier_data.get("contact_person"),
email=supplier_data.get("email"),
phone=supplier_data.get("phone"),
mobile=supplier_data.get("mobile"),
website=supplier_data.get("website"),
address_line1=supplier_data.get("address_line1"),
address_line2=supplier_data.get("address_line2"),
city=supplier_data.get("city"),
state_province=supplier_data.get("state_province"),
postal_code=supplier_data.get("postal_code"),
country=supplier_data.get("country", "España"),
payment_terms=payment_terms,
credit_limit=Decimal(str(supplier_data.get("credit_limit", 0.0))),
standard_lead_time=supplier_data.get("standard_lead_time", 3),
quality_rating=supplier_data.get("quality_rating", 4.5),
delivery_rating=supplier_data.get("delivery_rating", 4.5),
notes=supplier_data.get("notes"),
certifications=supplier_data.get("certifications", []),
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
created_by=uuid.UUID("00000000-0000-0000-0000-000000000000"), # System user
updated_by=uuid.UUID("00000000-0000-0000-0000-000000000000") # System user
)
db.add(supplier)
created_suppliers += 1
logger.debug(f" ✅ Created supplier: {supplier_name}")
db.add(supplier)
created_suppliers += 1
logger.debug(f" ✅ Created supplier: {supplier_name}")
# Create price lists for products using pre-defined ingredient IDs
products = supplier_data.get("products", [])
@@ -238,6 +238,20 @@ async def seed_suppliers_for_tenant(
tenant_int = int(tenant_id.hex, 16)
ingredient_id = uuid.UUID(int=tenant_int ^ int(base_ingredient_id.hex, 16))
# Check if price list already exists
existing_price_list_result = await db.execute(
select(SupplierPriceList).where(
SupplierPriceList.tenant_id == tenant_id,
SupplierPriceList.supplier_id == supplier_id,
SupplierPriceList.inventory_product_id == ingredient_id
)
)
existing_price_list = existing_price_list_result.scalars().first()
if existing_price_list:
# Price list already exists, skip
continue
# Get base cost from hardcoded costs
base_cost = INGREDIENT_COSTS.get(product_sku, 1.0)

View File

@@ -9,6 +9,7 @@ import structlog
from datetime import datetime, timezone
import uuid
import shared.redis_utils
from sqlalchemy.ext.asyncio import AsyncSession
from shared.routing import RouteBuilder
from shared.monitoring.decorators import track_execution_time
@@ -39,6 +40,7 @@ from app.services.training_events import (
publish_training_failed
)
from app.core.config import settings
from app.core.database import get_db
logger = structlog.get_logger()
route_builder = RouteBuilder('training')
@@ -86,7 +88,8 @@ async def start_training_job(
request_obj: Request = None,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
enhanced_training_service: EnhancedTrainingService = Depends(get_enhanced_training_service),
rate_limiter = Depends(get_rate_limiter)
rate_limiter = Depends(get_rate_limiter),
db: AsyncSession = Depends(get_db)
):
"""
Start a new training job for all tenant products (Admin+ only, quota enforced).
@@ -169,9 +172,7 @@ async def start_training_job(
# We don't know exact product count yet, so use historical average or estimate
try:
# Try to get historical average for this tenant
from app.core.database import get_db
db = next(get_db())
historical_avg = get_historical_average_estimate(db, tenant_id)
historical_avg = await get_historical_average_estimate(db, tenant_id)
# If no historical data, estimate based on typical product count (10-20 products)
estimated_products = 15 # Conservative estimate

View File

@@ -129,9 +129,7 @@ class EnhancedBakeryMLTrainer:
# Try to get historical average for more accurate estimates
try:
historical_avg = await asyncio.get_event_loop().run_in_executor(
None,
get_historical_average_estimate,
historical_avg = await get_historical_average_estimate(
db_session,
tenant_id
)

View File

@@ -9,6 +9,8 @@ Provides intelligent time estimation for training jobs based on:
from typing import List, Optional
from datetime import datetime, timedelta, timezone
import structlog
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
logger = structlog.get_logger()
@@ -256,8 +258,8 @@ def format_time_remaining(seconds: int) -> str:
return f"{hours} hour{'s' if hours > 1 else ''}"
def get_historical_average_estimate(
db_session,
async def get_historical_average_estimate(
db_session: AsyncSession,
tenant_id: str,
lookback_days: int = 30,
limit: int = 10
@@ -269,7 +271,7 @@ def get_historical_average_estimate(
recent historical data and calculate an average.
Args:
db_session: Database session
db_session: Async database session
tenant_id: Tenant UUID
lookback_days: How many days back to look
limit: Maximum number of historical records to consider
@@ -283,13 +285,19 @@ def get_historical_average_estimate(
cutoff = datetime.now(timezone.utc) - timedelta(days=lookback_days)
# Query recent training performance metrics
metrics = db_session.query(TrainingPerformanceMetrics).filter(
TrainingPerformanceMetrics.tenant_id == tenant_id,
TrainingPerformanceMetrics.completed_at >= cutoff
).order_by(
TrainingPerformanceMetrics.completed_at.desc()
).limit(limit).all()
# Query recent training performance metrics using SQLAlchemy 2.0 async pattern
query = (
select(TrainingPerformanceMetrics)
.where(
TrainingPerformanceMetrics.tenant_id == tenant_id,
TrainingPerformanceMetrics.completed_at >= cutoff
)
.order_by(TrainingPerformanceMetrics.completed_at.desc())
.limit(limit)
)
result = await db_session.execute(query)
metrics = result.scalars().all()
if not metrics:
logger.info(