Initial commit - production deployment
This commit is contained in:
0
services/distribution/app/__init__.py
Normal file
0
services/distribution/app/__init__.py
Normal file
81
services/distribution/app/api/dependencies.py
Normal file
81
services/distribution/app/api/dependencies.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""
|
||||
Dependency Injection for Distribution Service
|
||||
"""
|
||||
|
||||
from typing import AsyncGenerator
|
||||
from fastapi import Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.repositories.delivery_route_repository import DeliveryRouteRepository
|
||||
from app.repositories.shipment_repository import ShipmentRepository
|
||||
from app.repositories.delivery_schedule_repository import DeliveryScheduleRepository
|
||||
from app.services.distribution_service import DistributionService
|
||||
from app.services.routing_optimizer import RoutingOptimizer
|
||||
from shared.clients.tenant_client import TenantServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.clients.procurement_client import ProcurementServiceClient
|
||||
|
||||
|
||||
|
||||
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Get database session dependency"""
|
||||
async for session in get_db():
|
||||
yield session
|
||||
|
||||
|
||||
async def get_route_repository(db_session: AsyncSession = Depends(get_db_session)) -> DeliveryRouteRepository:
|
||||
"""Get delivery route repository dependency"""
|
||||
return DeliveryRouteRepository(db_session)
|
||||
|
||||
|
||||
async def get_shipment_repository(db_session: AsyncSession = Depends(get_db_session)) -> ShipmentRepository:
|
||||
"""Get shipment repository dependency"""
|
||||
return ShipmentRepository(db_session)
|
||||
|
||||
|
||||
async def get_delivery_schedule_repository(db_session: AsyncSession = Depends(get_db_session)) -> DeliveryScheduleRepository:
|
||||
"""Get delivery schedule repository dependency"""
|
||||
return DeliveryScheduleRepository(db_session)
|
||||
|
||||
|
||||
def get_tenant_client() -> TenantServiceClient:
|
||||
"""Get tenant service client dependency"""
|
||||
return TenantServiceClient(settings)
|
||||
|
||||
|
||||
def get_inventory_client() -> InventoryServiceClient:
|
||||
"""Get inventory service client dependency"""
|
||||
return InventoryServiceClient(settings)
|
||||
|
||||
|
||||
def get_procurement_client() -> ProcurementServiceClient:
|
||||
"""Get procurement service client dependency"""
|
||||
return ProcurementServiceClient(settings)
|
||||
|
||||
|
||||
def get_routing_optimizer() -> RoutingOptimizer:
|
||||
"""Get routing optimizer service dependency"""
|
||||
return RoutingOptimizer()
|
||||
|
||||
|
||||
def get_distribution_service(
|
||||
route_repository: DeliveryRouteRepository = Depends(get_route_repository),
|
||||
shipment_repository: ShipmentRepository = Depends(get_shipment_repository),
|
||||
schedule_repository: DeliveryScheduleRepository = Depends(get_delivery_schedule_repository),
|
||||
tenant_client: TenantServiceClient = Depends(get_tenant_client),
|
||||
inventory_client: InventoryServiceClient = Depends(get_inventory_client),
|
||||
procurement_client: ProcurementServiceClient = Depends(get_procurement_client),
|
||||
routing_optimizer: RoutingOptimizer = Depends(get_routing_optimizer)
|
||||
) -> DistributionService:
|
||||
"""Get distribution service dependency with all required clients"""
|
||||
return DistributionService(
|
||||
route_repository=route_repository,
|
||||
shipment_repository=shipment_repository,
|
||||
schedule_repository=schedule_repository,
|
||||
tenant_client=tenant_client,
|
||||
inventory_client=inventory_client,
|
||||
procurement_client=procurement_client,
|
||||
routing_optimizer=routing_optimizer
|
||||
)
|
||||
418
services/distribution/app/api/internal_demo.py
Normal file
418
services/distribution/app/api/internal_demo.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Distribution Service
|
||||
Service-to-service endpoint for cloning distribution data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.distribution import DeliveryRoute, Shipment
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
|
||||
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
||||
"""
|
||||
Parse date field, handling both ISO strings and BASE_TS markers.
|
||||
|
||||
Supports:
|
||||
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
||||
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
||||
- None values (returns None)
|
||||
|
||||
Returns timezone-aware datetime or None.
|
||||
"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
||||
return None
|
||||
|
||||
return adjust_date_for_demo(original_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone distribution service data for a virtual demo tenant
|
||||
|
||||
Clones:
|
||||
- Delivery routes
|
||||
- Shipments
|
||||
- Adjusts dates to recent timeframe
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting distribution data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"delivery_routes": 0,
|
||||
"shipments": 0,
|
||||
"alerts_generated": 0
|
||||
}
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "12-distribution.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "12-distribution.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
# Child outlets don't have their own distribution data
|
||||
# Distribution is managed centrally by the parent tenant
|
||||
# Child locations are delivery destinations, not distribution hubs
|
||||
logger.info(
|
||||
"Skipping distribution cloning for child outlet - distribution managed by parent",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
session_id=session_id
|
||||
)
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "completed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": duration_ms,
|
||||
"details": {
|
||||
"note": "Child outlets don't manage distribution - handled by parent tenant"
|
||||
}
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
logger.info(
|
||||
"Loaded distribution seed data",
|
||||
delivery_routes=len(seed_data.get('delivery_routes', [])),
|
||||
shipments=len(seed_data.get('shipments', []))
|
||||
)
|
||||
|
||||
# Clone Delivery Routes
|
||||
for route_data in seed_data.get('delivery_routes', []):
|
||||
# Transform IDs using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
route_uuid = uuid.UUID(route_data['id'])
|
||||
transformed_id = transform_id(route_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse route UUID",
|
||||
route_id=route_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Parse date fields
|
||||
route_date = parse_date_field(
|
||||
route_data.get('route_date'),
|
||||
session_time,
|
||||
"route_date"
|
||||
) or session_time
|
||||
|
||||
# Parse route sequence dates
|
||||
parsed_sequence = []
|
||||
for stop in route_data.get('route_sequence', []):
|
||||
estimated_arrival = parse_date_field(
|
||||
stop.get('estimated_arrival'),
|
||||
session_time,
|
||||
"estimated_arrival"
|
||||
)
|
||||
actual_arrival = parse_date_field(
|
||||
stop.get('actual_arrival'),
|
||||
session_time,
|
||||
"actual_arrival"
|
||||
)
|
||||
|
||||
parsed_sequence.append({
|
||||
**stop,
|
||||
"estimated_arrival": estimated_arrival.isoformat() if estimated_arrival else None,
|
||||
"actual_arrival": actual_arrival.isoformat() if actual_arrival else None
|
||||
})
|
||||
|
||||
# Make route_number unique per virtual tenant to prevent conflicts across demo sessions
|
||||
# Append last 6 chars of virtual_tenant_id to ensure uniqueness
|
||||
base_route_number = route_data.get('route_number', 'ROUTE-001')
|
||||
unique_route_number = f"{base_route_number}-{str(virtual_uuid)[-6:]}"
|
||||
|
||||
# Create new delivery route
|
||||
new_route = DeliveryRoute(
|
||||
id=transformed_id,
|
||||
tenant_id=virtual_uuid,
|
||||
route_number=unique_route_number,
|
||||
route_date=route_date,
|
||||
vehicle_id=route_data.get('vehicle_id'),
|
||||
driver_id=route_data.get('driver_id'),
|
||||
total_distance_km=route_data.get('total_distance_km'),
|
||||
estimated_duration_minutes=route_data.get('estimated_duration_minutes'),
|
||||
route_sequence=parsed_sequence,
|
||||
notes=route_data.get('notes'),
|
||||
status=route_data.get('status', 'planned'),
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=base_uuid,
|
||||
updated_by=base_uuid
|
||||
)
|
||||
db.add(new_route)
|
||||
stats["delivery_routes"] += 1
|
||||
|
||||
# Clone Shipments
|
||||
for shipment_data in seed_data.get('shipments', []):
|
||||
# Transform IDs using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
shipment_uuid = uuid.UUID(shipment_data['id'])
|
||||
transformed_id = transform_id(shipment_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse shipment UUID",
|
||||
shipment_id=shipment_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Parse date fields
|
||||
shipment_date = parse_date_field(
|
||||
shipment_data.get('shipment_date'),
|
||||
session_time,
|
||||
"shipment_date"
|
||||
) or session_time
|
||||
|
||||
# Note: The Shipment model doesn't have estimated_delivery_time
|
||||
# Only actual_delivery_time is stored
|
||||
actual_delivery_time = parse_date_field(
|
||||
shipment_data.get('actual_delivery_time'),
|
||||
session_time,
|
||||
"actual_delivery_time"
|
||||
)
|
||||
|
||||
# Transform purchase_order_id if present (links to internal transfer PO)
|
||||
purchase_order_id = None
|
||||
if shipment_data.get('purchase_order_id'):
|
||||
try:
|
||||
po_uuid = uuid.UUID(shipment_data['purchase_order_id'])
|
||||
purchase_order_id = transform_id(shipment_data['purchase_order_id'], virtual_uuid)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Invalid purchase_order_id format",
|
||||
purchase_order_id=shipment_data.get('purchase_order_id')
|
||||
)
|
||||
|
||||
# Transform delivery_route_id (CRITICAL: must reference transformed route)
|
||||
delivery_route_id = None
|
||||
if shipment_data.get('delivery_route_id'):
|
||||
try:
|
||||
route_uuid = uuid.UUID(shipment_data['delivery_route_id'])
|
||||
delivery_route_id = transform_id(shipment_data['delivery_route_id'], virtual_uuid)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Invalid delivery_route_id format",
|
||||
delivery_route_id=shipment_data.get('delivery_route_id')
|
||||
)
|
||||
|
||||
# Store items in delivery_notes as JSON for demo purposes
|
||||
# (In production, items are in the linked purchase order)
|
||||
items_json = json.dumps(shipment_data.get('items', [])) if shipment_data.get('items') else None
|
||||
|
||||
# Make shipment_number unique per virtual tenant to prevent conflicts across demo sessions
|
||||
# Append last 6 chars of virtual_tenant_id to ensure uniqueness
|
||||
base_shipment_number = shipment_data.get('shipment_number', 'SHIP-001')
|
||||
unique_shipment_number = f"{base_shipment_number}-{str(virtual_uuid)[-6:]}"
|
||||
|
||||
# Create new shipment
|
||||
new_shipment = Shipment(
|
||||
id=transformed_id,
|
||||
tenant_id=virtual_uuid,
|
||||
parent_tenant_id=virtual_uuid, # Parent is the same as tenant for demo
|
||||
child_tenant_id=shipment_data.get('child_tenant_id'),
|
||||
purchase_order_id=purchase_order_id, # Link to internal transfer PO
|
||||
delivery_route_id=delivery_route_id, # MUST use transformed ID
|
||||
shipment_number=unique_shipment_number,
|
||||
shipment_date=shipment_date,
|
||||
status=shipment_data.get('status', 'pending'),
|
||||
total_weight_kg=shipment_data.get('total_weight_kg'),
|
||||
actual_delivery_time=actual_delivery_time,
|
||||
# Store items info in delivery_notes for demo display
|
||||
delivery_notes=f"{shipment_data.get('notes', '')}\nItems: {items_json}" if items_json else shipment_data.get('notes'),
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=base_uuid,
|
||||
updated_by=base_uuid
|
||||
)
|
||||
db.add(new_shipment)
|
||||
stats["shipments"] += 1
|
||||
|
||||
# Commit cloned data
|
||||
await db.commit()
|
||||
|
||||
total_records = stats["delivery_routes"] + stats["shipments"]
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Distribution data cloning completed",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone distribution data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "distribution",
|
||||
"clone_endpoint": "available",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete all distribution data for a virtual demo tenant"""
|
||||
logger.info("Deleting distribution data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Count records
|
||||
route_count = await db.scalar(select(func.count(DeliveryRoute.id)).where(DeliveryRoute.tenant_id == virtual_uuid))
|
||||
shipment_count = await db.scalar(select(func.count(Shipment.id)).where(Shipment.tenant_id == virtual_uuid))
|
||||
|
||||
# Delete in order
|
||||
await db.execute(delete(Shipment).where(Shipment.tenant_id == virtual_uuid))
|
||||
await db.execute(delete(DeliveryRoute).where(DeliveryRoute.tenant_id == virtual_uuid))
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
logger.info("Distribution data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
||||
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"delivery_routes": route_count,
|
||||
"shipments": shipment_count,
|
||||
"total": route_count + shipment_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete distribution data", error=str(e), exc_info=True)
|
||||
await db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
141
services/distribution/app/api/routes.py
Normal file
141
services/distribution/app/api/routes.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""
|
||||
API Routes for Distribution Service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Header
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date, timedelta
|
||||
import structlog
|
||||
import os
|
||||
|
||||
from app.api.dependencies import get_distribution_service
|
||||
from shared.auth.tenant_access import verify_tenant_access_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize route builder for distribution service
|
||||
route_builder = RouteBuilder('distribution')
|
||||
|
||||
|
||||
# ✅ Security: Internal API key system removed
|
||||
# All authentication now handled via JWT service tokens at gateway level
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post(route_builder.build_base_route("plans/generate"))
|
||||
async def generate_daily_distribution_plan(
|
||||
tenant_id: str,
|
||||
target_date: date = Query(..., description="Date for which to generate distribution plan"),
|
||||
vehicle_capacity_kg: float = Query(1000.0, description="Vehicle capacity in kg"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Generate daily distribution plan for internal transfers
|
||||
|
||||
**Enterprise Tier Feature**: Distribution and routing require Enterprise subscription.
|
||||
"""
|
||||
try:
|
||||
# Validate subscription tier for distribution features
|
||||
from shared.subscription.plans import PlanFeatures
|
||||
from shared.clients import get_tenant_client
|
||||
|
||||
tenant_client = get_tenant_client(config=settings, service_name="distribution-service")
|
||||
subscription = await tenant_client.get_tenant_subscription(tenant_id)
|
||||
|
||||
if not subscription:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="No active subscription found. Distribution routing requires Enterprise tier."
|
||||
)
|
||||
|
||||
# Check if tier has distribution feature (enterprise only)
|
||||
tier = subscription.get("plan", "starter")
|
||||
if not PlanFeatures.has_feature(tier, "distribution_management"):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Distribution routing requires Enterprise tier. Current tier: {tier}"
|
||||
)
|
||||
|
||||
result = await distribution_service.generate_daily_distribution_plan(
|
||||
parent_tenant_id=tenant_id,
|
||||
target_date=target_date,
|
||||
vehicle_capacity_kg=vehicle_capacity_kg
|
||||
)
|
||||
return result
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error generating distribution plan", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate distribution plan: {str(e)}")
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("routes"))
|
||||
async def get_delivery_routes(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for route filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for route filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by route status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get delivery routes with optional filtering
|
||||
"""
|
||||
try:
|
||||
# If no date range specified, default to today
|
||||
if not date_from and not date_to:
|
||||
date_from = date.today()
|
||||
date_to = date.today()
|
||||
elif not date_to:
|
||||
date_to = date_from
|
||||
|
||||
routes = []
|
||||
current_date = date_from
|
||||
while current_date <= date_to:
|
||||
daily_routes = await distribution_service.get_delivery_routes_for_date(tenant_id, current_date)
|
||||
routes.extend(daily_routes)
|
||||
current_date = current_date + timedelta(days=1)
|
||||
|
||||
if status:
|
||||
routes = [r for r in routes if r.get('status') == status]
|
||||
|
||||
return {"routes": routes}
|
||||
except Exception as e:
|
||||
logger.error("Error getting delivery routes", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get delivery routes: {str(e)}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("routes/{route_id}"))
|
||||
async def get_route_detail(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get delivery route details
|
||||
"""
|
||||
try:
|
||||
# Implementation would fetch detailed route information
|
||||
# For now, return a simple response
|
||||
routes = await distribution_service.get_delivery_routes_for_date(tenant_id, date.today())
|
||||
route = next((r for r in routes if r.get('id') == route_id), None)
|
||||
|
||||
if not route:
|
||||
raise HTTPException(status_code=404, detail="Route not found")
|
||||
|
||||
return route
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting route detail", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get route detail: {str(e)}")
|
||||
|
||||
|
||||
166
services/distribution/app/api/shipments.py
Normal file
166
services/distribution/app/api/shipments.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Shipment API endpoints for distribution service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import List, Optional
|
||||
from datetime import date, timedelta
|
||||
|
||||
from app.api.dependencies import get_distribution_service
|
||||
from shared.auth.tenant_access import verify_tenant_access_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Initialize route builder for distribution service
|
||||
route_builder = RouteBuilder('distribution')
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("shipments"))
|
||||
async def get_shipments(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by shipment status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
List shipments with optional filtering
|
||||
"""
|
||||
try:
|
||||
# If no date range specified, default to today
|
||||
if not date_from and not date_to:
|
||||
date_from = date.today()
|
||||
date_to = date.today()
|
||||
elif not date_to:
|
||||
date_to = date_from
|
||||
|
||||
shipments = []
|
||||
current_date = date_from
|
||||
while current_date <= date_to:
|
||||
daily_shipments = await distribution_service.get_shipments_for_date(tenant_id, current_date)
|
||||
shipments.extend(daily_shipments)
|
||||
current_date = current_date + timedelta(days=1)
|
||||
|
||||
if status:
|
||||
shipments = [s for s in shipments if s.get('status') == status]
|
||||
|
||||
return {"shipments": shipments}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
|
||||
|
||||
|
||||
@router.put(route_builder.build_base_route("shipments/{shipment_id}/status"))
|
||||
async def update_shipment_status(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
status_update: dict, # Should be a proper Pydantic model
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Update shipment status
|
||||
"""
|
||||
try:
|
||||
new_status = status_update.get('status')
|
||||
if not new_status:
|
||||
raise HTTPException(status_code=400, detail="Status is required")
|
||||
|
||||
user_id = "temp_user_id" # Would come from auth context
|
||||
result = await distribution_service.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status=new_status,
|
||||
user_id=user_id,
|
||||
metadata=status_update.get('metadata')
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
|
||||
|
||||
|
||||
@router.post(route_builder.build_base_route("shipments/{shipment_id}/delivery-proof"))
|
||||
async def upload_delivery_proof(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
delivery_proof: dict, # Should be a proper Pydantic model
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Upload delivery proof (signature, photo, etc.)
|
||||
|
||||
Expected delivery_proof fields:
|
||||
- signature: Base64 encoded signature image or signature data
|
||||
- photo_url: URL to uploaded delivery photo
|
||||
- received_by_name: Name of person who received delivery
|
||||
- delivery_notes: Optional notes about delivery
|
||||
"""
|
||||
try:
|
||||
user_id = "temp_user_id" # Would come from auth context
|
||||
|
||||
# Prepare metadata for shipment update
|
||||
metadata = {}
|
||||
if 'signature' in delivery_proof:
|
||||
metadata['signature'] = delivery_proof['signature']
|
||||
if 'photo_url' in delivery_proof:
|
||||
metadata['photo_url'] = delivery_proof['photo_url']
|
||||
if 'received_by_name' in delivery_proof:
|
||||
metadata['received_by_name'] = delivery_proof['received_by_name']
|
||||
if 'delivery_notes' in delivery_proof:
|
||||
metadata['delivery_notes'] = delivery_proof['delivery_notes']
|
||||
|
||||
# Update shipment with delivery proof
|
||||
result = await distribution_service.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status='delivered', # Automatically mark as delivered when proof uploaded
|
||||
user_id=user_id,
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Shipment not found")
|
||||
|
||||
return {
|
||||
"message": "Delivery proof uploaded successfully",
|
||||
"shipment_id": shipment_id,
|
||||
"status": "delivered"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("shipments/{shipment_id}"))
|
||||
async def get_shipment_detail(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get detailed information about a specific shipment including:
|
||||
- Basic shipment info (number, date, status)
|
||||
- Parent and child tenant details
|
||||
- Delivery route assignment
|
||||
- Purchase order reference
|
||||
- Delivery proof (signature, photo, received by)
|
||||
- Location tracking data
|
||||
"""
|
||||
try:
|
||||
# Access the shipment repository from the distribution service
|
||||
shipment = await distribution_service.shipment_repository.get_shipment_by_id(shipment_id)
|
||||
|
||||
if not shipment:
|
||||
raise HTTPException(status_code=404, detail="Shipment not found")
|
||||
|
||||
# Verify tenant access
|
||||
if str(shipment.get('tenant_id')) != tenant_id:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this shipment")
|
||||
|
||||
return shipment
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipment details: {str(e)}")
|
||||
341
services/distribution/app/api/vrp_optimization.py
Normal file
341
services/distribution/app/api/vrp_optimization.py
Normal file
@@ -0,0 +1,341 @@
|
||||
"""
|
||||
VRP Optimization API Endpoints
|
||||
Endpoints for VRP optimization and metrics retrieval
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import List, Dict, Any, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
|
||||
from app.services.vrp_optimization_service import VRPOptimizationService
|
||||
from app.services.distribution_service import DistributionService
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# Pydantic models for request/response
|
||||
class VRPOptimizationRequest(BaseModel):
|
||||
algorithm_version: str = Field(default="v2.1", description="VRP algorithm version to use")
|
||||
constraints: Optional[Dict[str, Any]] = Field(
|
||||
None,
|
||||
description="Optimization constraints: max_route_duration, max_route_distance, etc."
|
||||
)
|
||||
|
||||
|
||||
class VRPOptimizationResponse(BaseModel):
|
||||
success: bool
|
||||
route_id: str
|
||||
optimization_savings: Dict[str, Any]
|
||||
vrp_algorithm_version: str
|
||||
vrp_optimization_timestamp: str
|
||||
vrp_constraints_satisfied: bool
|
||||
vrp_objective_value: float
|
||||
|
||||
|
||||
class RouteOptimizationMetrics(BaseModel):
|
||||
route_id: str
|
||||
route_number: str
|
||||
route_date: str
|
||||
vrp_optimization_savings: Optional[Dict[str, Any]]
|
||||
vrp_algorithm_version: Optional[str]
|
||||
vrp_optimization_timestamp: Optional[str]
|
||||
vrp_constraints_satisfied: Optional[bool]
|
||||
vrp_objective_value: Optional[float]
|
||||
total_distance_km: Optional[float]
|
||||
estimated_duration_minutes: Optional[int]
|
||||
|
||||
|
||||
class NetworkOptimizationSummary(BaseModel):
|
||||
total_routes: int
|
||||
optimized_routes: int
|
||||
total_distance_saved_km: float
|
||||
total_time_saved_minutes: float
|
||||
total_fuel_saved_liters: float
|
||||
total_co2_saved_kg: float
|
||||
total_cost_saved_eur: float
|
||||
optimization_rate: float
|
||||
average_savings_per_route: Optional[Dict[str, Any]]
|
||||
|
||||
|
||||
class OptimizationHistoryItem(BaseModel):
|
||||
optimization_id: str
|
||||
route_id: str
|
||||
timestamp: str
|
||||
algorithm_version: str
|
||||
distance_saved_km: float
|
||||
time_saved_minutes: float
|
||||
fuel_saved_liters: float
|
||||
co2_saved_kg: float
|
||||
cost_saved_eur: float
|
||||
constraints_satisfied: bool
|
||||
|
||||
|
||||
async def get_vrp_optimization_service() -> VRPOptimizationService:
|
||||
"""Dependency injection for VRPOptimizationService"""
|
||||
from app.core.database import database_manager
|
||||
from app.services.distribution_service import DistributionService as BusinessDistributionService
|
||||
from app.repositories.delivery_route_repository import DeliveryRouteRepository
|
||||
from app.repositories.shipment_repository import ShipmentRepository
|
||||
from app.repositories.delivery_schedule_repository import DeliveryScheduleRepository
|
||||
from shared.clients.tenant_client import TenantServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.clients.procurement_client import ProcurementServiceClient
|
||||
from app.services.routing_optimizer import RoutingOptimizer
|
||||
|
||||
# Create the business distribution service with proper dependencies
|
||||
route_repository = DeliveryRouteRepository(database_manager.get_session())
|
||||
shipment_repository = ShipmentRepository(database_manager.get_session())
|
||||
schedule_repository = DeliveryScheduleRepository(database_manager.get_session())
|
||||
|
||||
# Create client instances (these will be initialized with proper config)
|
||||
tenant_client = TenantServiceClient()
|
||||
inventory_client = InventoryServiceClient()
|
||||
procurement_client = ProcurementServiceClient()
|
||||
routing_optimizer = RoutingOptimizer()
|
||||
|
||||
distribution_service = BusinessDistributionService(
|
||||
route_repository=route_repository,
|
||||
shipment_repository=shipment_repository,
|
||||
schedule_repository=schedule_repository,
|
||||
procurement_client=procurement_client,
|
||||
tenant_client=tenant_client,
|
||||
inventory_client=inventory_client,
|
||||
routing_optimizer=routing_optimizer
|
||||
)
|
||||
|
||||
return VRPOptimizationService(distribution_service, database_manager)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/routes/{route_id}/optimize",
|
||||
response_model=VRPOptimizationResponse,
|
||||
summary="Optimize delivery route with VRP")
|
||||
async def optimize_route_with_vrp(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
optimization_request: VRPOptimizationRequest,
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Optimize a delivery route using VRP algorithm
|
||||
|
||||
This endpoint applies VRP optimization to a specific delivery route and stores
|
||||
the optimization metrics for analysis and reporting.
|
||||
"""
|
||||
try:
|
||||
result = await vrp_service.optimize_route_with_vrp(
|
||||
route_id=route_id,
|
||||
algorithm_version=optimization_request.algorithm_version,
|
||||
constraints=optimization_request.constraints
|
||||
)
|
||||
|
||||
if not result.get('success'):
|
||||
raise HTTPException(status_code=500, detail="Optimization failed")
|
||||
|
||||
return VRPOptimizationResponse(
|
||||
success=True,
|
||||
route_id=result['route_id'],
|
||||
optimization_savings=result['optimization_savings'],
|
||||
vrp_algorithm_version=result['optimization_savings'].get('algorithm_version', optimization_request.algorithm_version),
|
||||
vrp_optimization_timestamp=result['optimization_savings'].get('timestamp', datetime.now().isoformat()),
|
||||
vrp_constraints_satisfied=result['optimization_savings'].get('constraints_satisfied', True),
|
||||
vrp_objective_value=result['optimization_savings'].get('objective_value', 0.0)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("VRP optimization failed", tenant_id=tenant_id, route_id=route_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"VRP optimization failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/routes/{route_id}/optimization-metrics",
|
||||
response_model=RouteOptimizationMetrics,
|
||||
summary="Get VRP optimization metrics for route")
|
||||
async def get_route_optimization_metrics(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get VRP optimization metrics for a specific route
|
||||
|
||||
Retrieves stored optimization metrics including savings, algorithm version,
|
||||
and constraint satisfaction status.
|
||||
"""
|
||||
try:
|
||||
metrics = await vrp_service.get_route_optimization_metrics(route_id)
|
||||
return RouteOptimizationMetrics(**metrics)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get route optimization metrics", tenant_id=tenant_id, route_id=route_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get optimization metrics: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/vrp/optimization-summary",
|
||||
response_model=NetworkOptimizationSummary,
|
||||
summary="Get network-wide VRP optimization summary")
|
||||
async def get_network_optimization_summary(
|
||||
tenant_id: str,
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get aggregated VRP optimization metrics across all routes
|
||||
|
||||
Provides network-wide summary of optimization benefits including
|
||||
total savings, optimization rate, and average improvements.
|
||||
"""
|
||||
try:
|
||||
summary = await vrp_service.get_network_optimization_summary(tenant_id)
|
||||
return NetworkOptimizationSummary(**summary)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get network optimization summary", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get optimization summary: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/vrp/batch-optimize",
|
||||
summary="Batch optimize multiple routes")
|
||||
async def batch_optimize_routes(
|
||||
tenant_id: str,
|
||||
route_ids: List[str] = Query(..., description="List of route IDs to optimize"),
|
||||
algorithm_version: str = Query("v2.1", description="VRP algorithm version"),
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Batch optimize multiple delivery routes with VRP
|
||||
|
||||
Applies VRP optimization to multiple routes in a single request.
|
||||
"""
|
||||
try:
|
||||
result = await vrp_service.batch_optimize_routes(tenant_id, route_ids)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'total_routes_processed': result['total_routes_processed'],
|
||||
'successful_optimizations': result['successful_optimizations'],
|
||||
'failed_optimizations': result['failed_optimizations'],
|
||||
'results': result['results']
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Batch optimization failed", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Batch optimization failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/routes/{route_id}/optimization-history",
|
||||
response_model=List[OptimizationHistoryItem],
|
||||
summary="Get optimization history for route")
|
||||
async def get_optimization_history(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
limit: int = Query(10, description="Maximum number of historical records to return"),
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get historical optimization records for a route
|
||||
|
||||
Retrieves past optimization runs and their results for analysis.
|
||||
"""
|
||||
try:
|
||||
history = await vrp_service.get_optimization_history(route_id, limit)
|
||||
return [OptimizationHistoryItem(**item) for item in history]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get optimization history", tenant_id=tenant_id, route_id=route_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get optimization history: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/vrp/constraints/validate",
|
||||
summary="Validate VRP constraints")
|
||||
async def validate_vrp_constraints(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
max_route_duration: Optional[int] = Query(None, description="Maximum route duration in minutes"),
|
||||
max_route_distance: Optional[float] = Query(None, description="Maximum route distance in km"),
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Validate VRP constraints against a route
|
||||
|
||||
Checks if a route satisfies specified VRP constraints.
|
||||
"""
|
||||
try:
|
||||
from app.services.vrp_optimization_service import VRPConstraintValidator
|
||||
|
||||
# Get route data
|
||||
route = await vrp_service.distribution_service.get_delivery_route(route_id)
|
||||
|
||||
if not route:
|
||||
raise HTTPException(status_code=404, detail="Route not found")
|
||||
|
||||
# Build constraints dict
|
||||
constraints = {}
|
||||
if max_route_duration is not None:
|
||||
constraints['max_route_duration'] = max_route_duration
|
||||
if max_route_distance is not None:
|
||||
constraints['max_route_distance'] = max_route_distance
|
||||
|
||||
# Validate constraints
|
||||
validation_result = VRPConstraintValidator.validate_constraints(route, constraints)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'all_constraints_satisfied': validation_result['all_satisfied'],
|
||||
'constraint_violations': validation_result['constraint_violations']
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate VRP constraints", tenant_id=tenant_id, route_id=route_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate constraints: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/vrp/simulate",
|
||||
summary="Simulate VRP optimization")
|
||||
async def simulate_vrp_optimization(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
vrp_service: VRPOptimizationService = Depends(get_vrp_optimization_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Simulate VRP optimization without saving results
|
||||
|
||||
Useful for testing and previewing optimization results.
|
||||
"""
|
||||
try:
|
||||
from app.services.vrp_optimization_service import VRPOptimizationSimulator
|
||||
|
||||
# Get route data
|
||||
route = await vrp_service.distribution_service.get_delivery_route(route_id)
|
||||
|
||||
if not route:
|
||||
raise HTTPException(status_code=404, detail="Route not found")
|
||||
|
||||
# Simulate optimization
|
||||
simulation_result = VRPOptimizationSimulator.simulate_optimization(route)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'original_route': simulation_result['original_route'],
|
||||
'optimized_route': simulation_result['optimized_route'],
|
||||
'optimization_savings': simulation_result['optimization_savings'],
|
||||
'algorithm_version': simulation_result['algorithm_version'],
|
||||
'constraints_satisfied': simulation_result['constraints_satisfied'],
|
||||
'objective_value': simulation_result['objective_value']
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("VRP simulation failed", tenant_id=tenant_id, route_id=route_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"VRP simulation failed: {str(e)}")
|
||||
|
||||
|
||||
# Import datetime at runtime to avoid circular imports
|
||||
from datetime import datetime
|
||||
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Production event consumer for the distribution service
|
||||
Listens for production completion events and triggers distribution planning
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
import json
|
||||
|
||||
from app.services.distribution_service import DistributionService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProductionEventConsumer:
|
||||
"""
|
||||
Consumer for production events that may trigger distribution planning
|
||||
"""
|
||||
|
||||
def __init__(self, distribution_service: DistributionService):
|
||||
self.distribution_service = distribution_service
|
||||
|
||||
async def handle_production_batch_completed(self, event_data: Dict[str, Any]):
|
||||
"""
|
||||
Handle production batch completion event
|
||||
This might trigger distribution planning if it's for internal transfers
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Handling production batch completion: {event_data}")
|
||||
|
||||
tenant_id = event_data.get('tenant_id')
|
||||
batch_id = event_data.get('batch_id')
|
||||
product_type = event_data.get('product_type')
|
||||
completion_date = event_data.get('completion_date')
|
||||
|
||||
if not tenant_id:
|
||||
logger.error("Missing tenant_id in production event")
|
||||
return
|
||||
|
||||
# Check if this batch is for internal transfers (has destination tenant info)
|
||||
# In a real implementation, this would check if the production batch
|
||||
# is associated with an internal purchase order
|
||||
|
||||
# For now, we'll just log the event
|
||||
logger.info(f"Production batch {batch_id} completed for tenant {tenant_id}")
|
||||
|
||||
# In a real implementation, this might trigger immediate distribution planning
|
||||
# if the batch was for internal transfer orders
|
||||
# await self._trigger_distribution_if_needed(tenant_id, batch_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling production batch completion event: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def handle_internal_transfer_approved(self, event_data: Dict[str, Any]):
|
||||
"""
|
||||
Handle internal transfer approval event
|
||||
This should trigger immediate distribution planning for the approved transfer
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Handling internal transfer approval: {event_data}")
|
||||
|
||||
tenant_id = event_data.get('tenant_id') # The parent tenant
|
||||
transfer_id = event_data.get('transfer_id')
|
||||
destination_tenant_id = event_data.get('destination_tenant_id')
|
||||
scheduled_date = event_data.get('scheduled_date')
|
||||
|
||||
if not all([tenant_id, transfer_id, destination_tenant_id, scheduled_date]):
|
||||
logger.error("Missing required fields in internal transfer event")
|
||||
return
|
||||
|
||||
# In a real implementation, this might schedule distribution planning
|
||||
# for the specific transfer on the scheduled date
|
||||
logger.info(f"Internal transfer {transfer_id} approved from {tenant_id} to {destination_tenant_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling internal transfer approval: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def _trigger_distribution_if_needed(self, tenant_id: str, batch_id: str):
|
||||
"""
|
||||
Internal method to check if distribution planning is needed for this batch
|
||||
"""
|
||||
# Implementation would check if the batch is for internal transfers
|
||||
# and trigger distribution planning if so
|
||||
pass
|
||||
0
services/distribution/app/core/__init__.py
Normal file
0
services/distribution/app/core/__init__.py
Normal file
43
services/distribution/app/core/config.py
Normal file
43
services/distribution/app/core/config.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Distribution Service Configuration
|
||||
"""
|
||||
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from pydantic import Field
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""
|
||||
Distribution Service specific settings
|
||||
"""
|
||||
|
||||
# Service Identity
|
||||
APP_NAME: str = "Distribution Service"
|
||||
SERVICE_NAME: str = "distribution-service"
|
||||
DESCRIPTION: str = "Distribution and logistics service for enterprise tier bakery management"
|
||||
VERSION: str = "1.0.0"
|
||||
|
||||
# Database Configuration
|
||||
# Use environment variables with fallbacks for development
|
||||
DB_HOST: str = os.getenv("DISTRIBUTION_DB_HOST", os.getenv("DB_HOST", "localhost"))
|
||||
DB_PORT: int = int(os.getenv("DISTRIBUTION_DB_PORT", os.getenv("DB_PORT", "5432")))
|
||||
DB_USER: str = os.getenv("DISTRIBUTION_DB_USER", os.getenv("DB_USER", "postgres"))
|
||||
DB_PASSWORD: str = os.getenv("DISTRIBUTION_DB_PASSWORD", os.getenv("DB_PASSWORD", "postgres"))
|
||||
DB_NAME: str = os.getenv("DISTRIBUTION_DB_NAME", os.getenv("DB_NAME", "distribution_db"))
|
||||
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from components"""
|
||||
# Try service-specific environment variable first
|
||||
env_url = os.getenv("DISTRIBUTION_DATABASE_URL") or os.getenv("DATABASE_URL")
|
||||
if env_url:
|
||||
return env_url
|
||||
|
||||
# Build from components
|
||||
return f"postgresql+asyncpg://{self.DB_USER}:{self.DB_PASSWORD}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
|
||||
|
||||
|
||||
# Create settings instance
|
||||
settings = Settings()
|
||||
17
services/distribution/app/core/database.py
Normal file
17
services/distribution/app/core/database.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""
|
||||
Distribution Service Database Configuration
|
||||
"""
|
||||
|
||||
from shared.database import DatabaseManager, create_database_manager
|
||||
from .config import settings
|
||||
import os
|
||||
|
||||
|
||||
# Create database manager instance
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, service_name="distribution")
|
||||
|
||||
# Convenience function to get database sessions
|
||||
async def get_db():
|
||||
"""Get database session generator"""
|
||||
async with database_manager.get_session() as session:
|
||||
yield session
|
||||
127
services/distribution/app/main.py
Normal file
127
services/distribution/app/main.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
Distribution Service Main Application
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.api.routes import router as distribution_router
|
||||
from app.api.shipments import router as shipments_router
|
||||
from app.api.internal_demo import router as internal_demo_router
|
||||
from app.api.vrp_optimization import router as vrp_optimization_router
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
|
||||
class DistributionService(StandardFastAPIService):
|
||||
"""Distribution Service with standardized setup"""
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
await self.verify_migrations()
|
||||
await super().on_startup(app)
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
# Check if alembic_version table exists
|
||||
result = await session.execute(text("""
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'alembic_version'
|
||||
)
|
||||
"""))
|
||||
table_exists = result.scalar()
|
||||
|
||||
if table_exists:
|
||||
# If table exists, check the version
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
else:
|
||||
# If table doesn't exist, migrations might not have run yet
|
||||
# This is OK - the migration job should create it
|
||||
self.logger.warning("alembic_version table does not exist yet - migrations may not have run")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Migration verification failed (this may be expected during initial setup): {e}")
|
||||
|
||||
def __init__(self):
|
||||
# Define expected database tables for health checks
|
||||
# Must match tables created in migrations/versions/001_initial_schema.py
|
||||
distribution_expected_tables = [
|
||||
'delivery_routes', 'shipments', 'delivery_schedules'
|
||||
]
|
||||
|
||||
# Define custom metrics for distribution service
|
||||
distribution_custom_metrics = {
|
||||
"routes_generated_total": {
|
||||
"type": "counter",
|
||||
"description": "Total delivery routes generated"
|
||||
},
|
||||
"shipments_processed_total": {
|
||||
"type": "counter",
|
||||
"description": "Total shipments processed"
|
||||
},
|
||||
"route_optimization_time_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Time to optimize delivery routes"
|
||||
},
|
||||
"shipment_processing_time_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Time to process shipment request"
|
||||
},
|
||||
"delivery_completion_rate": {
|
||||
"type": "counter",
|
||||
"description": "Delivery completion rate by status",
|
||||
"labels": ["status"]
|
||||
}
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
service_name="distribution-service",
|
||||
app_name="Distribution Service",
|
||||
description="Distribution and logistics service for enterprise tier bakery management",
|
||||
version="1.0.0",
|
||||
log_level=settings.LOG_LEVEL,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=distribution_expected_tables,
|
||||
custom_metrics=distribution_custom_metrics
|
||||
)
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for distribution service"""
|
||||
self.logger.info("Distribution Service shutdown complete")
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return distribution-specific features"""
|
||||
return [
|
||||
"delivery_route_optimization",
|
||||
"shipment_tracking",
|
||||
"vehicle_assignment",
|
||||
"distribution_planning",
|
||||
"delivery_point_management"
|
||||
]
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = DistributionService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app(
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc"
|
||||
)
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Include routers with specific configurations
|
||||
# Note: Routes now use RouteBuilder which includes full paths, so no prefix needed
|
||||
service.add_router(distribution_router, tags=["distribution"])
|
||||
service.add_router(shipments_router, tags=["shipments"])
|
||||
service.add_router(internal_demo_router, tags=["internal-demo"])
|
||||
service.add_router(vrp_optimization_router, tags=["vrp-optimization"])
|
||||
4
services/distribution/app/models/__init__.py
Normal file
4
services/distribution/app/models/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# Distribution Service Models
|
||||
from app.models.distribution import * # noqa: F401, F403
|
||||
|
||||
__all__ = []
|
||||
180
services/distribution/app/models/distribution.py
Normal file
180
services/distribution/app/models/distribution.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""
|
||||
Distribution models for the bakery management platform
|
||||
"""
|
||||
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class DeliveryRouteStatus(enum.Enum):
|
||||
"""Status of delivery routes"""
|
||||
planned = "planned"
|
||||
in_progress = "in_progress"
|
||||
completed = "completed"
|
||||
cancelled = "cancelled"
|
||||
|
||||
|
||||
class ShipmentStatus(enum.Enum):
|
||||
"""Status of individual shipments"""
|
||||
pending = "pending"
|
||||
packed = "packed"
|
||||
in_transit = "in_transit"
|
||||
delivered = "delivered"
|
||||
failed = "failed"
|
||||
|
||||
|
||||
class DeliveryScheduleFrequency(enum.Enum):
|
||||
"""Frequency of recurring delivery schedules"""
|
||||
daily = "daily"
|
||||
weekly = "weekly"
|
||||
biweekly = "biweekly"
|
||||
monthly = "monthly"
|
||||
|
||||
|
||||
class DeliveryRoute(Base):
|
||||
"""Optimized multi-stop routes for distribution"""
|
||||
__tablename__ = "delivery_routes"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Route identification
|
||||
route_number = Column(String(50), nullable=False, unique=True, index=True)
|
||||
route_date = Column(DateTime(timezone=True), nullable=False, index=True) # Date when route is executed
|
||||
|
||||
# Vehicle and driver assignment
|
||||
vehicle_id = Column(String(100), nullable=True) # Reference to fleet management
|
||||
driver_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Reference to driver
|
||||
|
||||
# Optimization metadata
|
||||
total_distance_km = Column(Float, nullable=True)
|
||||
estimated_duration_minutes = Column(Integer, nullable=True)
|
||||
|
||||
# VRP Optimization metrics (Phase 2 enhancement)
|
||||
vrp_optimization_savings = Column(JSONB, nullable=True) # {"distance_saved_km": 12.5, "time_saved_minutes": 25, "fuel_saved_liters": 8.2, "co2_saved_kg": 15.4, "cost_saved_eur": 12.50}
|
||||
vrp_algorithm_version = Column(String(50), nullable=True) # Version of VRP algorithm used
|
||||
vrp_optimization_timestamp = Column(DateTime(timezone=True), nullable=True) # When optimization was performed
|
||||
vrp_constraints_satisfied = Column(Boolean, nullable=True) # Whether all constraints were satisfied
|
||||
vrp_objective_value = Column(Float, nullable=True) # Objective function value from optimization
|
||||
|
||||
# Route details
|
||||
route_sequence = Column(JSONB, nullable=True) # Ordered array of stops with timing: [{"stop_number": 1, "location_id": "...", "estimated_arrival": "...", "actual_arrival": "..."}]
|
||||
notes = Column(Text, nullable=True)
|
||||
|
||||
# Status
|
||||
status = Column(SQLEnum(DeliveryRouteStatus), nullable=False, default=DeliveryRouteStatus.planned, index=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
shipments = relationship("Shipment", back_populates="route", cascade="all, delete-orphan")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_delivery_routes_tenant_date', 'tenant_id', 'route_date'),
|
||||
Index('ix_delivery_routes_status', 'status'),
|
||||
Index('ix_delivery_routes_date_tenant_status', 'route_date', 'tenant_id', 'status'),
|
||||
)
|
||||
|
||||
|
||||
class Shipment(Base):
|
||||
"""Individual deliveries to child tenants"""
|
||||
__tablename__ = "shipments"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Links to hierarchy and procurement
|
||||
parent_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Source tenant (central production)
|
||||
child_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Destination tenant (retail outlet)
|
||||
purchase_order_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Associated internal purchase order
|
||||
delivery_route_id = Column(UUID(as_uuid=True), ForeignKey('delivery_routes.id', ondelete='SET NULL'), nullable=True, index=True) # Assigned route
|
||||
|
||||
# Shipment details
|
||||
shipment_number = Column(String(50), nullable=False, unique=True, index=True)
|
||||
shipment_date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
|
||||
# Tracking information
|
||||
current_location_lat = Column(Float, nullable=True)
|
||||
current_location_lng = Column(Float, nullable=True)
|
||||
last_tracked_at = Column(DateTime(timezone=True), nullable=True)
|
||||
status = Column(SQLEnum(ShipmentStatus), nullable=False, default=ShipmentStatus.pending, index=True)
|
||||
actual_delivery_time = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Proof of delivery
|
||||
signature = Column(Text, nullable=True) # Digital signature base64 encoded
|
||||
photo_url = Column(String(500), nullable=True) # URL to delivery confirmation photo
|
||||
received_by_name = Column(String(200), nullable=True)
|
||||
delivery_notes = Column(Text, nullable=True)
|
||||
|
||||
# Weight/volume tracking
|
||||
total_weight_kg = Column(Float, nullable=True)
|
||||
total_volume_m3 = Column(Float, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Relationships
|
||||
route = relationship("DeliveryRoute", back_populates="shipments")
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_shipments_tenant_status', 'tenant_id', 'status'),
|
||||
Index('ix_shipments_parent_child', 'parent_tenant_id', 'child_tenant_id'),
|
||||
Index('ix_shipments_date_tenant', 'shipment_date', 'tenant_id'),
|
||||
)
|
||||
|
||||
|
||||
class DeliverySchedule(Base):
|
||||
"""Recurring delivery patterns"""
|
||||
__tablename__ = "delivery_schedules"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Schedule identification
|
||||
name = Column(String(200), nullable=False)
|
||||
|
||||
# Delivery pattern
|
||||
delivery_days = Column(String(200), nullable=False) # Format: "Mon,Wed,Fri" or "Mon-Fri"
|
||||
delivery_time = Column(String(20), nullable=False) # Format: "HH:MM" or "HH:MM-HH:MM"
|
||||
frequency = Column(SQLEnum(DeliveryScheduleFrequency), nullable=False, default=DeliveryScheduleFrequency.weekly)
|
||||
|
||||
# Auto-generation settings
|
||||
auto_generate_orders = Column(Boolean, nullable=False, default=False)
|
||||
lead_time_days = Column(Integer, nullable=False, default=1) # How many days in advance to generate
|
||||
|
||||
# Target tenants for this schedule
|
||||
target_parent_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
target_child_tenant_ids = Column(JSONB, nullable=False) # List of child tenant IDs involved in this route
|
||||
|
||||
# Configuration
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
notes = Column(Text, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=False)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_delivery_schedules_tenant_active', 'tenant_id', 'is_active'),
|
||||
Index('ix_delivery_schedules_parent_tenant', 'target_parent_tenant_id'),
|
||||
)
|
||||
@@ -0,0 +1,312 @@
|
||||
"""
|
||||
Delivery Route Repository
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import date, datetime
|
||||
import uuid
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.models.distribution import DeliveryRoute, DeliveryRouteStatus
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class DeliveryRouteRepository:
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
|
||||
async def create_route(self, route_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new delivery route
|
||||
"""
|
||||
# Define system user ID to use when user_id is not provided
|
||||
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
|
||||
|
||||
route = DeliveryRoute(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=route_data['tenant_id'],
|
||||
route_number=route_data['route_number'],
|
||||
route_date=route_data['route_date'],
|
||||
vehicle_id=route_data.get('vehicle_id'),
|
||||
driver_id=route_data.get('driver_id'),
|
||||
total_distance_km=route_data.get('total_distance_km'),
|
||||
estimated_duration_minutes=route_data.get('estimated_duration_minutes'),
|
||||
route_sequence=route_data.get('route_sequence'),
|
||||
status=route_data.get('status', 'planned'),
|
||||
created_by=route_data.get('created_by', SYSTEM_USER_ID),
|
||||
updated_by=route_data.get('updated_by', SYSTEM_USER_ID)
|
||||
)
|
||||
|
||||
self.db_session.add(route)
|
||||
await self.db_session.commit()
|
||||
await self.db_session.refresh(route)
|
||||
|
||||
# Convert SQLAlchemy object to dict for return
|
||||
return {
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
|
||||
async def get_routes_by_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all delivery routes for a specific date and tenant
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(
|
||||
(DeliveryRoute.tenant_id == tenant_id) &
|
||||
(DeliveryRoute.route_date >= datetime.combine(target_date, datetime.min.time())) &
|
||||
(DeliveryRoute.route_date < datetime.combine(target_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
routes = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
for route in routes
|
||||
]
|
||||
|
||||
async def get_routes_by_date_range(self, tenant_id: str, start_date: date, end_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all delivery routes for a specific date range and tenant
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(
|
||||
(DeliveryRoute.tenant_id == tenant_id) &
|
||||
(DeliveryRoute.route_date >= datetime.combine(start_date, datetime.min.time())) &
|
||||
(DeliveryRoute.route_date <= datetime.combine(end_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
routes = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
for route in routes
|
||||
]
|
||||
|
||||
async def get_route_by_id(self, route_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get a specific delivery route by ID
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(DeliveryRoute.id == route_id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
route = result.scalar_one_or_none()
|
||||
|
||||
if route:
|
||||
return {
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
return None
|
||||
|
||||
async def update_route_status(self, route_id: str, status: str, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Update route status
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(DeliveryRoute.id == route_id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
route = result.scalar_one_or_none()
|
||||
|
||||
if not route:
|
||||
return None
|
||||
|
||||
# Handle system user ID if passed as string
|
||||
if user_id == 'system':
|
||||
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
|
||||
route.updated_by = SYSTEM_USER_ID
|
||||
else:
|
||||
route.updated_by = user_id
|
||||
route.status = status
|
||||
await self.db_session.commit()
|
||||
await self.db_session.refresh(route)
|
||||
|
||||
return {
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
|
||||
async def get_all_routes_for_tenant(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all delivery routes for a tenant
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(DeliveryRoute.tenant_id == tenant_id)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
routes = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
for route in routes
|
||||
]
|
||||
|
||||
async def delete_demo_routes_for_tenant(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Delete all demo routes for a tenant
|
||||
Used for demo session cleanup
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID to delete routes for
|
||||
|
||||
Returns:
|
||||
Number of routes deleted
|
||||
"""
|
||||
from sqlalchemy import delete
|
||||
|
||||
# Delete routes with DEMO- prefix in route_number
|
||||
stmt = delete(DeliveryRoute).where(
|
||||
(DeliveryRoute.tenant_id == uuid.UUID(tenant_id)) &
|
||||
(DeliveryRoute.route_number.like('DEMO-%'))
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
await self.db_session.commit()
|
||||
|
||||
deleted_count = result.rowcount
|
||||
return deleted_count
|
||||
|
||||
async def update_route_vrp_metrics(self, route_id: str, vrp_metrics: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Update VRP optimization metrics for a route
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(DeliveryRoute.id == route_id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
route = result.scalar_one_or_none()
|
||||
|
||||
if not route:
|
||||
return None
|
||||
|
||||
# Update VRP metrics fields
|
||||
route.vrp_optimization_savings = vrp_metrics.get('vrp_optimization_savings')
|
||||
route.vrp_algorithm_version = vrp_metrics.get('vrp_algorithm_version')
|
||||
route.vrp_optimization_timestamp = vrp_metrics.get('vrp_optimization_timestamp')
|
||||
route.vrp_constraints_satisfied = vrp_metrics.get('vrp_constraints_satisfied')
|
||||
route.vrp_objective_value = vrp_metrics.get('vrp_objective_value')
|
||||
|
||||
await self.db_session.commit()
|
||||
await self.db_session.refresh(route)
|
||||
|
||||
return {
|
||||
'id': str(route.id),
|
||||
'vrp_optimization_savings': route.vrp_optimization_savings,
|
||||
'vrp_algorithm_version': route.vrp_algorithm_version,
|
||||
'vrp_optimization_timestamp': route.vrp_optimization_timestamp,
|
||||
'vrp_constraints_satisfied': route.vrp_constraints_satisfied,
|
||||
'vrp_objective_value': route.vrp_objective_value
|
||||
}
|
||||
|
||||
async def get_routes_by_tenant(self, tenant_id: str, limit: int = None, offset: int = None, order_by: str = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all routes for a specific tenant with pagination and ordering
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(DeliveryRoute.tenant_id == tenant_id)
|
||||
|
||||
# Apply ordering if specified
|
||||
if order_by:
|
||||
if 'vrp_optimization_timestamp' in order_by:
|
||||
if 'DESC' in order_by:
|
||||
stmt = stmt.order_by(DeliveryRoute.vrp_optimization_timestamp.desc())
|
||||
else:
|
||||
stmt = stmt.order_by(DeliveryRoute.vrp_optimization_timestamp.asc())
|
||||
elif 'route_date' in order_by:
|
||||
if 'DESC' in order_by:
|
||||
stmt = stmt.order_by(DeliveryRoute.route_date.desc())
|
||||
else:
|
||||
stmt = stmt.order_by(DeliveryRoute.route_date.asc())
|
||||
|
||||
# Apply pagination if specified
|
||||
if limit is not None:
|
||||
stmt = stmt.limit(limit)
|
||||
if offset is not None:
|
||||
stmt = stmt.offset(offset)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
routes = result.scalars().all()
|
||||
|
||||
return [{
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at,
|
||||
'vrp_optimization_savings': route.vrp_optimization_savings,
|
||||
'vrp_algorithm_version': route.vrp_algorithm_version,
|
||||
'vrp_optimization_timestamp': route.vrp_optimization_timestamp,
|
||||
'vrp_constraints_satisfied': route.vrp_constraints_satisfied,
|
||||
'vrp_objective_value': route.vrp_objective_value
|
||||
} for route in routes]
|
||||
@@ -0,0 +1,74 @@
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, update, delete
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
import structlog
|
||||
|
||||
from app.models.distribution import DeliverySchedule
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class DeliveryScheduleRepository:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def create_schedule(self, schedule_data: Dict[str, Any]) -> DeliverySchedule:
|
||||
"""Create a new delivery schedule"""
|
||||
try:
|
||||
schedule = DeliverySchedule(**schedule_data)
|
||||
self.session.add(schedule)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(schedule)
|
||||
return schedule
|
||||
except IntegrityError as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Error creating delivery schedule", error=str(e))
|
||||
raise ValueError(f"Failed to create delivery schedule: {e}")
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Unexpected error creating delivery schedule", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_schedule_by_id(self, schedule_id: UUID) -> Optional[DeliverySchedule]:
|
||||
"""Get a delivery schedule by ID"""
|
||||
result = await self.session.execute(
|
||||
select(DeliverySchedule).where(DeliverySchedule.id == schedule_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_schedules_by_tenant(self, tenant_id: UUID) -> List[DeliverySchedule]:
|
||||
"""Get all delivery schedules for a tenant"""
|
||||
result = await self.session.execute(
|
||||
select(DeliverySchedule).where(DeliverySchedule.tenant_id == tenant_id)
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update_schedule(self, schedule_id: UUID, update_data: Dict[str, Any]) -> Optional[DeliverySchedule]:
|
||||
"""Update a delivery schedule"""
|
||||
try:
|
||||
stmt = (
|
||||
update(DeliverySchedule)
|
||||
.where(DeliverySchedule.id == schedule_id)
|
||||
.values(**update_data)
|
||||
.returning(DeliverySchedule)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
await self.session.commit()
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Error updating delivery schedule", error=str(e), schedule_id=schedule_id)
|
||||
raise
|
||||
|
||||
async def delete_schedule(self, schedule_id: UUID) -> bool:
|
||||
"""Delete a delivery schedule"""
|
||||
try:
|
||||
stmt = delete(DeliverySchedule).where(DeliverySchedule.id == schedule_id)
|
||||
result = await self.session.execute(stmt)
|
||||
await self.session.commit()
|
||||
return result.rowcount > 0
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Error deleting delivery schedule", error=str(e), schedule_id=schedule_id)
|
||||
raise
|
||||
345
services/distribution/app/repositories/shipment_repository.py
Normal file
345
services/distribution/app/repositories/shipment_repository.py
Normal file
@@ -0,0 +1,345 @@
|
||||
"""
|
||||
Shipment Repository
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import date, datetime
|
||||
import uuid
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.models.distribution import Shipment, ShipmentStatus
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class ShipmentRepository:
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
|
||||
async def create_shipment(self, shipment_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new shipment
|
||||
"""
|
||||
# Define system user ID to use when user_id is not provided
|
||||
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
|
||||
|
||||
shipment = Shipment(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=shipment_data['tenant_id'],
|
||||
parent_tenant_id=shipment_data['parent_tenant_id'],
|
||||
child_tenant_id=shipment_data['child_tenant_id'],
|
||||
purchase_order_id=shipment_data.get('purchase_order_id'),
|
||||
delivery_route_id=shipment_data.get('delivery_route_id'),
|
||||
shipment_number=shipment_data['shipment_number'],
|
||||
shipment_date=shipment_data['shipment_date'],
|
||||
status=shipment_data.get('status', 'pending'),
|
||||
total_weight_kg=shipment_data.get('total_weight_kg'),
|
||||
total_volume_m3=shipment_data.get('total_volume_m3'),
|
||||
created_by=shipment_data.get('created_by', SYSTEM_USER_ID),
|
||||
updated_by=shipment_data.get('updated_by', SYSTEM_USER_ID)
|
||||
)
|
||||
|
||||
self.db_session.add(shipment)
|
||||
await self.db_session.commit()
|
||||
await self.db_session.refresh(shipment)
|
||||
|
||||
# Convert SQLAlchemy object to dict for return
|
||||
return {
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
|
||||
async def get_shipments_by_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all shipments for a specific date and tenant
|
||||
"""
|
||||
stmt = select(Shipment).where(
|
||||
(Shipment.tenant_id == tenant_id) &
|
||||
(Shipment.shipment_date >= datetime.combine(target_date, datetime.min.time())) &
|
||||
(Shipment.shipment_date < datetime.combine(target_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipments = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
for shipment in shipments
|
||||
]
|
||||
|
||||
async def get_shipments_by_date_range(self, tenant_id: str, start_date: date, end_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all shipments for a specific date range and tenant
|
||||
"""
|
||||
stmt = select(Shipment).where(
|
||||
(Shipment.tenant_id == tenant_id) &
|
||||
(Shipment.shipment_date >= datetime.combine(start_date, datetime.min.time())) &
|
||||
(Shipment.shipment_date <= datetime.combine(end_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipments = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
for shipment in shipments
|
||||
]
|
||||
|
||||
async def get_shipment_by_id(self, shipment_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get a specific shipment by ID
|
||||
"""
|
||||
stmt = select(Shipment).where(Shipment.id == shipment_id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipment = result.scalar_one_or_none()
|
||||
|
||||
if shipment:
|
||||
return {
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
return None
|
||||
|
||||
async def update_shipment_status(self, shipment_id: str, status: str, user_id: str, metadata: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Update shipment status
|
||||
"""
|
||||
stmt = select(Shipment).where(Shipment.id == shipment_id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipment = result.scalar_one_or_none()
|
||||
|
||||
if not shipment:
|
||||
return None
|
||||
|
||||
# Handle system user ID if passed as string
|
||||
if user_id == 'system':
|
||||
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
|
||||
shipment.updated_by = SYSTEM_USER_ID
|
||||
else:
|
||||
shipment.updated_by = user_id
|
||||
shipment.status = status
|
||||
|
||||
# Update tracking information if provided in metadata
|
||||
if metadata:
|
||||
if 'current_location_lat' in metadata:
|
||||
shipment.current_location_lat = metadata['current_location_lat']
|
||||
if 'current_location_lng' in metadata:
|
||||
shipment.current_location_lng = metadata['current_location_lng']
|
||||
if 'last_tracked_at' in metadata:
|
||||
from datetime import datetime
|
||||
shipment.last_tracked_at = datetime.fromisoformat(metadata['last_tracked_at']) if isinstance(metadata['last_tracked_at'], str) else metadata['last_tracked_at']
|
||||
if 'signature' in metadata:
|
||||
shipment.signature = metadata['signature']
|
||||
if 'photo_url' in metadata:
|
||||
shipment.photo_url = metadata['photo_url']
|
||||
if 'received_by_name' in metadata:
|
||||
shipment.received_by_name = metadata['received_by_name']
|
||||
if 'delivery_notes' in metadata:
|
||||
shipment.delivery_notes = metadata['delivery_notes']
|
||||
if 'actual_delivery_time' in metadata:
|
||||
from datetime import datetime
|
||||
shipment.actual_delivery_time = datetime.fromisoformat(metadata['actual_delivery_time']) if isinstance(metadata['actual_delivery_time'], str) else metadata['actual_delivery_time']
|
||||
|
||||
await self.db_session.commit()
|
||||
await self.db_session.refresh(shipment)
|
||||
|
||||
return {
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
|
||||
async def assign_shipments_to_route(self, route_id: str, shipment_ids: List[str], user_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Assign multiple shipments to a specific route
|
||||
"""
|
||||
stmt = select(Shipment).where(Shipment.id.in_(shipment_ids))
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipments = result.scalars().all()
|
||||
|
||||
# Handle system user ID if passed as string
|
||||
actual_user_id = user_id
|
||||
if user_id == 'system':
|
||||
actual_user_id = uuid.UUID("50000000-0000-0000-0000-000000000004")
|
||||
|
||||
updated_shipments = []
|
||||
for shipment in shipments:
|
||||
shipment.delivery_route_id = route_id
|
||||
shipment.updated_by = actual_user_id
|
||||
await self.db_session.refresh(shipment)
|
||||
|
||||
updated_shipments.append({
|
||||
'id': str(shipment.id),
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'delivery_route_id': str(shipment.delivery_route_id)
|
||||
})
|
||||
|
||||
await self.db_session.commit()
|
||||
|
||||
return {
|
||||
'route_id': route_id,
|
||||
'updated_shipments': updated_shipments,
|
||||
'count': len(updated_shipments)
|
||||
}
|
||||
|
||||
async def get_all_shipments_for_tenant(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all shipments for a tenant
|
||||
"""
|
||||
stmt = select(Shipment).where(Shipment.tenant_id == tenant_id)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipments = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
for shipment in shipments
|
||||
]
|
||||
|
||||
async def delete_demo_shipments_for_tenant(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Delete all demo shipments for a tenant
|
||||
Used for demo session cleanup
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID to delete shipments for
|
||||
|
||||
Returns:
|
||||
Number of shipments deleted
|
||||
"""
|
||||
from sqlalchemy import delete
|
||||
|
||||
# Delete shipments with DEMOSHP- prefix in shipment_number
|
||||
stmt = delete(Shipment).where(
|
||||
(Shipment.tenant_id == uuid.UUID(tenant_id)) &
|
||||
(Shipment.shipment_number.like('DEMOSHP-%'))
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
await self.db_session.commit()
|
||||
|
||||
deleted_count = result.rowcount
|
||||
return deleted_count
|
||||
324
services/distribution/app/services/distribution_service.py
Normal file
324
services/distribution/app/services/distribution_service.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Distribution Service for Enterprise Tier
|
||||
Manages delivery routes and shipment tracking for parent-child tenant networks
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime, date, timedelta
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
from app.models.distribution import DeliveryRoute, Shipment, DeliverySchedule, DeliveryRouteStatus, ShipmentStatus
|
||||
from app.services.routing_optimizer import RoutingOptimizer
|
||||
from shared.clients.tenant_client import TenantServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.clients.procurement_client import ProcurementServiceClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DistributionService:
|
||||
"""
|
||||
Core business logic for distribution management
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
route_repository,
|
||||
shipment_repository,
|
||||
schedule_repository,
|
||||
procurement_client: ProcurementServiceClient,
|
||||
tenant_client: TenantServiceClient,
|
||||
inventory_client: InventoryServiceClient,
|
||||
routing_optimizer: RoutingOptimizer
|
||||
):
|
||||
self.route_repository = route_repository
|
||||
self.shipment_repository = shipment_repository
|
||||
self.schedule_repository = schedule_repository
|
||||
self.procurement_client = procurement_client
|
||||
self.tenant_client = tenant_client
|
||||
self.inventory_client = inventory_client
|
||||
self.routing_optimizer = routing_optimizer
|
||||
|
||||
async def generate_daily_distribution_plan(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
target_date: date,
|
||||
vehicle_capacity_kg: float = 1000.0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate daily distribution plan for internal transfers between parent and children
|
||||
"""
|
||||
logger.info(f"Generating distribution plan for parent tenant {parent_tenant_id} on {target_date}")
|
||||
|
||||
try:
|
||||
# 1. Fetch all approved internal POs for target date from procurement service
|
||||
internal_pos = await self.procurement_client.get_approved_internal_purchase_orders(
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
target_date=target_date
|
||||
)
|
||||
|
||||
if not internal_pos:
|
||||
logger.info(f"No approved internal POs found for {parent_tenant_id} on {target_date}")
|
||||
return {
|
||||
"parent_tenant_id": parent_tenant_id,
|
||||
"target_date": target_date.isoformat(),
|
||||
"routes": [],
|
||||
"shipments": [],
|
||||
"status": "no_deliveries_needed"
|
||||
}
|
||||
|
||||
# 2. Group by child tenant and aggregate weights/volumes
|
||||
deliveries_by_child = {}
|
||||
for po in internal_pos:
|
||||
child_tenant_id = po.get('destination_tenant_id')
|
||||
if child_tenant_id not in deliveries_by_child:
|
||||
deliveries_by_child[child_tenant_id] = {
|
||||
'po_id': po.get('id'),
|
||||
'weight_kg': 0,
|
||||
'volume_m3': 0,
|
||||
'items_count': 0
|
||||
}
|
||||
|
||||
# Calculate total weight and volume for this PO
|
||||
total_weight = 0
|
||||
total_volume = 0
|
||||
for item in po.get('items', []):
|
||||
# In a real implementation, we'd have weight/volume per item
|
||||
# For now, we'll estimate based on quantity
|
||||
quantity = item.get('ordered_quantity', 0)
|
||||
# Typical bakery item weight estimation (adjust as needed)
|
||||
avg_item_weight_kg = 1.0 # Adjust based on actual products
|
||||
total_weight += Decimal(str(quantity)) * Decimal(str(avg_item_weight_kg))
|
||||
|
||||
deliveries_by_child[child_tenant_id]['weight_kg'] += float(total_weight)
|
||||
deliveries_by_child[child_tenant_id]['items_count'] += len(po.get('items', []))
|
||||
|
||||
# 3. Fetch parent depot location and all child locations from tenant service
|
||||
parent_locations_response = await self.tenant_client.get_tenant_locations(parent_tenant_id)
|
||||
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
|
||||
parent_depot = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
|
||||
|
||||
if not parent_depot:
|
||||
logger.error(f"No central production location found for parent tenant {parent_tenant_id}")
|
||||
raise ValueError(f"No central production location found for parent tenant {parent_tenant_id}")
|
||||
|
||||
depot_location = (float(parent_depot['latitude']), float(parent_depot['longitude']))
|
||||
|
||||
# Fetch all child tenant locations
|
||||
deliveries_data = []
|
||||
for child_tenant_id, delivery_info in deliveries_by_child.items():
|
||||
child_locations_response = await self.tenant_client.get_tenant_locations(child_tenant_id)
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
|
||||
if not child_location:
|
||||
logger.warning(f"No retail outlet location found for child tenant {child_tenant_id}")
|
||||
continue
|
||||
|
||||
deliveries_data.append({
|
||||
'id': f"delivery_{child_tenant_id}",
|
||||
'child_tenant_id': child_tenant_id,
|
||||
'location': (float(child_location['latitude']), float(child_location['longitude'])),
|
||||
'weight_kg': delivery_info['weight_kg'],
|
||||
'volume_m3': delivery_info['volume_m3'],
|
||||
'po_id': delivery_info['po_id'],
|
||||
'items_count': delivery_info['items_count']
|
||||
})
|
||||
|
||||
if not deliveries_data:
|
||||
logger.info(f"No valid delivery locations found for distribution plan")
|
||||
return {
|
||||
"parent_tenant_id": parent_tenant_id,
|
||||
"target_date": target_date.isoformat(),
|
||||
"routes": [],
|
||||
"shipments": [],
|
||||
"status": "no_valid_deliveries"
|
||||
}
|
||||
|
||||
# 4. Call routing_optimizer.optimize_daily_routes()
|
||||
optimization_result = await self.routing_optimizer.optimize_daily_routes(
|
||||
deliveries=deliveries_data,
|
||||
depot_location=depot_location,
|
||||
vehicle_capacity_kg=vehicle_capacity_kg
|
||||
)
|
||||
|
||||
# 5. Create DeliveryRoute and Shipment records
|
||||
created_routes = []
|
||||
created_shipments = []
|
||||
|
||||
for route_idx, route_data in enumerate(optimization_result['routes']):
|
||||
# Create DeliveryRoute record
|
||||
route = await self.route_repository.create_route({
|
||||
'tenant_id': parent_tenant_id,
|
||||
'route_number': f"R{target_date.strftime('%Y%m%d')}{route_idx + 1:02d}",
|
||||
'route_date': datetime.combine(target_date, datetime.min.time()),
|
||||
'vehicle_id': route_data.get('vehicle_id'),
|
||||
'driver_id': route_data.get('driver_id'),
|
||||
'total_distance_km': route_data.get('total_distance_km', 0),
|
||||
'estimated_duration_minutes': route_data.get('estimated_duration_minutes', 0),
|
||||
'route_sequence': route_data.get('route_sequence', []),
|
||||
'status': 'planned'
|
||||
})
|
||||
|
||||
created_routes.append(route)
|
||||
|
||||
# Create Shipment records for each stop (excluding depot stops)
|
||||
for stop in route_data.get('route_sequence', []):
|
||||
if stop.get('is_depot', False) == False and 'child_tenant_id' in stop:
|
||||
shipment = await self.shipment_repository.create_shipment({
|
||||
'tenant_id': parent_tenant_id,
|
||||
'parent_tenant_id': parent_tenant_id,
|
||||
'child_tenant_id': stop['child_tenant_id'],
|
||||
'purchase_order_id': stop.get('po_id'),
|
||||
'delivery_route_id': route['id'],
|
||||
'shipment_number': f"S{target_date.strftime('%Y%m%d')}{len(created_shipments) + 1:03d}",
|
||||
'shipment_date': datetime.combine(target_date, datetime.min.time()),
|
||||
'status': 'pending',
|
||||
'total_weight_kg': stop.get('weight_kg', 0),
|
||||
'total_volume_m3': stop.get('volume_m3', 0)
|
||||
})
|
||||
created_shipments.append(shipment)
|
||||
|
||||
logger.info(f"Distribution plan generated: {len(created_routes)} routes, {len(created_shipments)} shipments")
|
||||
|
||||
# 6. Publish distribution.plan.created event to message queue
|
||||
await self._publish_distribution_plan_created_event(
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
target_date=target_date,
|
||||
routes=created_routes,
|
||||
shipments=created_shipments
|
||||
)
|
||||
|
||||
return {
|
||||
"parent_tenant_id": parent_tenant_id,
|
||||
"target_date": target_date.isoformat(),
|
||||
"routes": [route for route in created_routes],
|
||||
"shipments": [shipment for shipment in created_shipments],
|
||||
"optimization_metadata": optimization_result,
|
||||
"status": "success"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating distribution plan: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def _publish_distribution_plan_created_event(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
target_date: date,
|
||||
routes: List[Dict[str, Any]],
|
||||
shipments: List[Dict[str, Any]]
|
||||
):
|
||||
"""
|
||||
Publish distribution plan created event to message queue
|
||||
"""
|
||||
# In a real implementation, this would publish to RabbitMQ
|
||||
logger.info(f"Distribution plan created event published for parent {parent_tenant_id}")
|
||||
|
||||
# Legacy setup_demo_enterprise_distribution method removed
|
||||
# Distribution now uses standard cloning pattern via /internal/demo/clone endpoint
|
||||
|
||||
async def get_delivery_routes_for_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all delivery routes for a specific date and tenant
|
||||
"""
|
||||
routes = await self.route_repository.get_routes_by_date(tenant_id, target_date)
|
||||
return routes
|
||||
|
||||
async def get_shipments_for_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all shipments for a specific date and tenant
|
||||
"""
|
||||
shipments = await self.shipment_repository.get_shipments_by_date(tenant_id, target_date)
|
||||
return shipments
|
||||
|
||||
async def update_shipment_status(self, shipment_id: str, new_status: str, user_id: str, metadata: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Update shipment status with audit trail
|
||||
"""
|
||||
updated_shipment = await self.shipment_repository.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status=new_status,
|
||||
user_id=user_id,
|
||||
metadata=metadata
|
||||
)
|
||||
return updated_shipment
|
||||
|
||||
async def assign_shipments_to_route(self, route_id: str, shipment_ids: List[str], user_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Assign multiple shipments to a specific route
|
||||
"""
|
||||
result = await self.shipment_repository.assign_shipments_to_route(
|
||||
route_id=route_id,
|
||||
shipment_ids=shipment_ids,
|
||||
user_id=user_id
|
||||
)
|
||||
return result
|
||||
|
||||
async def create_delivery_schedule(self, schedule_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a delivery schedule for recurring deliveries between parent and child tenants
|
||||
|
||||
Args:
|
||||
schedule_data: Dictionary containing schedule information:
|
||||
- parent_tenant_id: UUID of parent tenant
|
||||
- child_tenant_id: UUID of child tenant
|
||||
- schedule_name: Human-readable name for the schedule
|
||||
- delivery_days: Comma-separated days (e.g., "Mon,Wed,Fri")
|
||||
- delivery_time: Time of day for delivery (HH:MM format)
|
||||
- auto_generate_orders: Boolean, whether to auto-generate orders
|
||||
- lead_time_days: Number of days lead time for orders
|
||||
- is_active: Boolean, whether schedule is active
|
||||
|
||||
Returns:
|
||||
Dictionary with created schedule information
|
||||
"""
|
||||
# Create schedule using repository
|
||||
try:
|
||||
# Ensure required fields are present
|
||||
if "delivery_days" not in schedule_data:
|
||||
schedule_data["delivery_days"] = "Mon,Wed,Fri"
|
||||
if "delivery_time" not in schedule_data:
|
||||
schedule_data["delivery_time"] = "09:00"
|
||||
if "auto_generate_orders" not in schedule_data:
|
||||
schedule_data["auto_generate_orders"] = True
|
||||
if "lead_time_days" not in schedule_data:
|
||||
schedule_data["lead_time_days"] = 1
|
||||
if "is_active" not in schedule_data:
|
||||
schedule_data["is_active"] = True
|
||||
|
||||
created_schedule = await self.schedule_repository.create_schedule(schedule_data)
|
||||
|
||||
logger.info(
|
||||
f"Created delivery schedule {created_schedule.id} for parent {schedule_data.get('parent_tenant_id')} "
|
||||
f"to child {schedule_data.get('child_tenant_id')}"
|
||||
)
|
||||
|
||||
return created_schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating delivery schedule: {e}")
|
||||
raise
|
||||
|
||||
# VRP Optimization Service Methods
|
||||
async def get_route_by_id(self, route_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get a specific delivery route by ID
|
||||
"""
|
||||
return await self.route_repository.get_route_by_id(route_id)
|
||||
|
||||
async def update_route_vrp_metrics(self, route_id: str, vrp_metrics: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Update VRP optimization metrics for a route
|
||||
"""
|
||||
return await self.route_repository.update_route_vrp_metrics(route_id, vrp_metrics)
|
||||
|
||||
async def get_routes_by_tenant(self, tenant_id: str, limit: int = None, offset: int = None, order_by: str = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all routes for a specific tenant with pagination and ordering
|
||||
"""
|
||||
return await self.route_repository.get_routes_by_tenant(tenant_id, limit, offset, order_by)
|
||||
457
services/distribution/app/services/routing_optimizer.py
Normal file
457
services/distribution/app/services/routing_optimizer.py
Normal file
@@ -0,0 +1,457 @@
|
||||
"""
|
||||
Routing optimizer for the distribution service using Google OR-Tools VRP
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
import time
|
||||
|
||||
# Google OR-Tools - Vehicle Routing Problem
|
||||
try:
|
||||
from ortools.constraint_solver import routing_enums_pb2
|
||||
from ortools.constraint_solver import pywrapcp
|
||||
HAS_ORTOOLS = True
|
||||
except ImportError:
|
||||
print("Warning: OR-Tools not installed. Using fallback routing algorithm.")
|
||||
HAS_ORTOOLS = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RoutingOptimizer:
|
||||
"""
|
||||
Vehicle Routing Problem optimizer using Google OR-Tools
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.has_ortools = HAS_ORTOOLS
|
||||
|
||||
async def optimize_daily_routes(
|
||||
self,
|
||||
deliveries: List[Dict[str, Any]],
|
||||
depot_location: Tuple[float, float],
|
||||
vehicle_capacity_kg: Optional[float] = 1000.0,
|
||||
time_limit_seconds: float = 30.0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Optimize daily delivery routes using VRP
|
||||
|
||||
Args:
|
||||
deliveries: List of delivery dictionaries with keys:
|
||||
- id: str - delivery ID
|
||||
- location: Tuple[float, float] - (lat, lng)
|
||||
- weight_kg: float - weight of delivery
|
||||
- time_window: Optional[Tuple[str, str]] - delivery time window
|
||||
depot_location: Tuple[float, float] - depot location (lat, lng)
|
||||
vehicle_capacity_kg: Maximum weight capacity per vehicle
|
||||
time_limit_seconds: Time limit for optimization (timeout)
|
||||
|
||||
Returns:
|
||||
Dict with optimized route sequences and metadata
|
||||
"""
|
||||
if not self.has_ortools:
|
||||
logger.warning("OR-Tools not available, using fallback sequential routing")
|
||||
return self._fallback_sequential_routing(deliveries, depot_location)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Prepare data for VRP
|
||||
locations = [depot_location] # Depot is first location (index 0)
|
||||
demands = [0] # Depot has no demand
|
||||
time_windows = [(0, 24*60)] # Depot available all day (in minutes from midnight)
|
||||
|
||||
delivery_mapping = {}
|
||||
for i, delivery in enumerate(deliveries, 1):
|
||||
locations.append(delivery['location'])
|
||||
# Ensure demands are integers for OR-Tools compatibility
|
||||
weight_kg = delivery.get('weight_kg', 0)
|
||||
demands.append(int(weight_kg) if isinstance(weight_kg, (int, float)) else 0)
|
||||
|
||||
# Convert time windows to minutes from midnight
|
||||
time_window = delivery.get('time_window', None)
|
||||
if time_window:
|
||||
start_time_str, end_time_str = time_window
|
||||
start_minutes = self._time_to_minutes(start_time_str)
|
||||
end_minutes = self._time_to_minutes(end_time_str)
|
||||
time_windows.append((int(start_minutes), int(end_minutes)))
|
||||
else:
|
||||
time_windows.append((0, 24*60)) # Default to all day if no time window
|
||||
|
||||
delivery_mapping[i] = delivery['id']
|
||||
|
||||
# Check if we have no deliveries (only depot), return early with empty route
|
||||
if len(locations) <= 1: # Only depot, no deliveries
|
||||
logger.info("No deliveries to optimize, returning empty route")
|
||||
return {
|
||||
'routes': [],
|
||||
'total_distance_km': 0,
|
||||
'optimization_time_seconds': time.time() - start_time,
|
||||
'algorithm_used': 'ortools_vrp',
|
||||
'status': 'success'
|
||||
}
|
||||
|
||||
# Calculate total demand first before checking it
|
||||
total_demand = sum(demands)
|
||||
|
||||
# Check if total demand is 0 but we have deliveries - handle this case too
|
||||
if total_demand == 0 and len(locations) > 1:
|
||||
logger.info("Total demand is 0 but deliveries exist, returning simple route")
|
||||
# Create simple route with all deliveries but no capacity constraints
|
||||
simple_route = {
|
||||
'route_number': 1,
|
||||
'route_sequence': [delivery_mapping[i] for i in range(1, len(locations))],
|
||||
'stops': [{
|
||||
'stop_number': i,
|
||||
'delivery_id': delivery_mapping.get(i, f"delivery_{i}"),
|
||||
'sequence': i - 1
|
||||
} for i in range(1, len(locations))],
|
||||
'total_weight_kg': 0
|
||||
}
|
||||
return {
|
||||
'routes': [simple_route],
|
||||
'total_distance_km': 0,
|
||||
'optimization_time_seconds': time.time() - start_time,
|
||||
'algorithm_used': 'ortools_vrp_zero_demand',
|
||||
'status': 'success'
|
||||
}
|
||||
|
||||
# Calculate distance matrix using haversine formula
|
||||
distance_matrix = self._calculate_distance_matrix(locations)
|
||||
|
||||
# Create VRP model
|
||||
# Calculate required vehicles (total_demand already calculated above)
|
||||
# Ensure at least 1 vehicle, and enough to cover demand plus buffer
|
||||
min_vehicles = max(1, int(total_demand / vehicle_capacity_kg) + 1)
|
||||
# Add a buffer vehicle just in case
|
||||
num_vehicles = int(min_vehicles + 1)
|
||||
|
||||
logger.info(f"VRP Optimization: Demand={total_demand}kg, Capacity={vehicle_capacity_kg}kg, Vehicles={num_vehicles}")
|
||||
|
||||
# Create VRP model
|
||||
manager = pywrapcp.RoutingIndexManager(
|
||||
len(distance_matrix), # number of locations
|
||||
num_vehicles, # number of vehicles
|
||||
[0] * num_vehicles, # depot index for starts
|
||||
[0] * num_vehicles # depot index for ends
|
||||
)
|
||||
|
||||
routing = pywrapcp.RoutingModel(manager)
|
||||
|
||||
def distance_callback(from_index, to_index):
|
||||
"""Returns the distance between the two nodes."""
|
||||
from_node = manager.IndexToNode(from_index)
|
||||
to_node = manager.IndexToNode(to_index)
|
||||
return distance_matrix[from_node][to_node]
|
||||
|
||||
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
|
||||
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
|
||||
|
||||
# Add capacity constraint
|
||||
def demand_callback(index):
|
||||
"""Returns the demand of the node."""
|
||||
node = manager.IndexToNode(index)
|
||||
return int(demands[node]) # Ensure demands are integers
|
||||
|
||||
demand_callback_index = routing.RegisterUnaryTransitCallback(demand_callback)
|
||||
routing.AddDimensionWithVehicleCapacity(
|
||||
demand_callback_index,
|
||||
0, # null capacity slack
|
||||
[int(vehicle_capacity_kg)] * num_vehicles, # vehicle maximum capacities (as integers)
|
||||
True, # start cumul to zero
|
||||
'Capacity'
|
||||
)
|
||||
|
||||
# Add time window constraint
|
||||
def time_callback(from_index, to_index):
|
||||
"""Returns the travel time between the two nodes."""
|
||||
from_node = manager.IndexToNode(from_index)
|
||||
to_node = manager.IndexToNode(to_index)
|
||||
# Calculate travel time based on distance (meters) and assumed speed (km/h)
|
||||
distance_m = distance_matrix[from_node][to_node]
|
||||
distance_km = distance_m / 1000.0 # Convert meters to km
|
||||
# Assume 30 km/h average speed for city deliveries
|
||||
travel_time_minutes = (distance_km / 30.0) * 60.0
|
||||
return int(travel_time_minutes)
|
||||
|
||||
time_callback_index = routing.RegisterTransitCallback(time_callback)
|
||||
routing.AddDimension(
|
||||
time_callback_index,
|
||||
60 * 24, # Allow waiting time (24 hours in minutes)
|
||||
60 * 24, # Maximum time per vehicle (24 hours in minutes)
|
||||
False, # Don't force start cumul to zero
|
||||
'Time'
|
||||
)
|
||||
time_dimension = routing.GetDimensionOrDie('Time')
|
||||
|
||||
# Add time window constraints for each location
|
||||
for location_idx in range(len(locations)):
|
||||
index = manager.NodeToIndex(location_idx)
|
||||
if index != -1: # Valid index
|
||||
min_time, max_time = time_windows[location_idx]
|
||||
time_dimension.CumulVar(index).SetRange(int(min_time), int(max_time))
|
||||
|
||||
# Setting first solution heuristic
|
||||
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
|
||||
search_parameters.first_solution_strategy = (
|
||||
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC
|
||||
)
|
||||
search_parameters.time_limit.FromSeconds(time_limit_seconds)
|
||||
|
||||
# Solve the problem
|
||||
solution = routing.SolveWithParameters(search_parameters)
|
||||
|
||||
# Check if solution was found
|
||||
if solution:
|
||||
optimized_routes = self._extract_routes(routing, manager, solution, delivery_mapping)
|
||||
|
||||
# Calculate total distance and duration
|
||||
total_distance = 0
|
||||
total_duration = 0
|
||||
for route in optimized_routes:
|
||||
route_distance = 0
|
||||
for stop in route['stops']:
|
||||
route_distance += stop.get('distance_to_next', 0)
|
||||
route['total_distance_km'] = route_distance
|
||||
total_distance += route_distance
|
||||
|
||||
logger.info(f"VRP optimization completed in {time.time() - start_time:.2f}s")
|
||||
|
||||
return {
|
||||
'routes': optimized_routes,
|
||||
'total_distance_km': total_distance,
|
||||
'optimization_time_seconds': time.time() - start_time,
|
||||
'algorithm_used': 'ortools_vrp',
|
||||
'status': 'success'
|
||||
}
|
||||
else:
|
||||
logger.warning("OR-Tools failed to find solution, using fallback routing")
|
||||
return self._fallback_sequential_routing(deliveries, depot_location)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in VRP optimization: {e}")
|
||||
# Fallback to simple sequential routing
|
||||
return self._fallback_sequential_routing(deliveries, depot_location)
|
||||
|
||||
def _calculate_distance_matrix(self, locations: List[Tuple[float, float]]) -> List[List[int]]:
|
||||
"""
|
||||
Calculate distance matrix using haversine formula (in meters)
|
||||
"""
|
||||
import math
|
||||
|
||||
def haversine_distance(lat1, lon1, lat2, lon2):
|
||||
"""Calculate distance between two lat/lon points in meters"""
|
||||
R = 6371000 # Earth's radius in meters
|
||||
|
||||
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
|
||||
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
|
||||
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
|
||||
return R * c # Distance in meters
|
||||
|
||||
n = len(locations)
|
||||
matrix = [[0] * n for _ in range(n)]
|
||||
|
||||
for i in range(n):
|
||||
for j in range(n):
|
||||
if i != j:
|
||||
lat1, lon1 = locations[i]
|
||||
lat2, lon2 = locations[j]
|
||||
dist_m = haversine_distance(lat1, lon1, lat2, lon2)
|
||||
matrix[i][j] = int(dist_m)
|
||||
|
||||
return matrix
|
||||
|
||||
def _extract_routes(self, routing, manager, solution, delivery_mapping) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Extract routes from OR-Tools solution
|
||||
"""
|
||||
routes = []
|
||||
|
||||
for vehicle_id in range(manager.GetNumberOfVehicles()):
|
||||
index = routing.Start(vehicle_id)
|
||||
|
||||
# Skip if vehicle is not used (Start -> End directly)
|
||||
if routing.IsEnd(solution.Value(routing.NextVar(index))):
|
||||
continue
|
||||
|
||||
current_route = {
|
||||
'route_number': vehicle_id + 1,
|
||||
'stops': [],
|
||||
'total_weight_kg': 0
|
||||
}
|
||||
|
||||
# Initialize route sequence to store the delivery IDs in visit order
|
||||
route_sequence = []
|
||||
|
||||
# Add depot as first stop
|
||||
node_index = manager.IndexToNode(index)
|
||||
delivery_id = delivery_mapping.get(node_index, f"depot_{node_index}")
|
||||
|
||||
current_route['stops'].append({
|
||||
'stop_number': 1,
|
||||
'delivery_id': delivery_id,
|
||||
'location': 'depot',
|
||||
'sequence': 0
|
||||
})
|
||||
|
||||
stop_number = 1
|
||||
|
||||
while not routing.IsEnd(index):
|
||||
index = solution.Value(routing.NextVar(index))
|
||||
node_index = manager.IndexToNode(index)
|
||||
|
||||
if node_index != 0: # Not depot
|
||||
stop_number += 1
|
||||
delivery_id = delivery_mapping.get(node_index, f"delivery_{node_index}")
|
||||
current_route['stops'].append({
|
||||
'stop_number': stop_number,
|
||||
'delivery_id': delivery_id,
|
||||
'location_index': node_index,
|
||||
'sequence': stop_number
|
||||
})
|
||||
|
||||
# Add delivery ID to route sequence (excluding depot stops)
|
||||
route_sequence.append(delivery_id)
|
||||
else: # Back to depot
|
||||
stop_number += 1
|
||||
current_route['stops'].append({
|
||||
'stop_number': stop_number,
|
||||
'delivery_id': f"depot_end_{vehicle_id + 1}",
|
||||
'location': 'depot',
|
||||
'sequence': stop_number
|
||||
})
|
||||
break
|
||||
|
||||
# Add the route_sequence to the current route
|
||||
current_route['route_sequence'] = route_sequence
|
||||
routes.append(current_route)
|
||||
|
||||
return routes
|
||||
|
||||
def _time_to_minutes(self, time_str: str) -> int:
|
||||
"""
|
||||
Convert HH:MM string to minutes from midnight
|
||||
"""
|
||||
if ":" in time_str:
|
||||
hour, minute = map(int, time_str.split(":"))
|
||||
return hour * 60 + minute
|
||||
else:
|
||||
# If it's already in minutes, return as is
|
||||
return int(time_str)
|
||||
|
||||
def _fallback_sequential_routing(self, deliveries: List[Dict[str, Any]], depot_location: Tuple[float, float]) -> Dict[str, Any]:
|
||||
"""
|
||||
Fallback routing algorithm that sequences deliveries sequentially
|
||||
"""
|
||||
import math
|
||||
|
||||
def haversine_distance(lat1, lon1, lat2, lon2):
|
||||
"""Calculate distance between two lat/lon points in km"""
|
||||
R = 6371 # Earth's radius in km
|
||||
|
||||
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
|
||||
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
|
||||
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
|
||||
return R * c # Distance in km
|
||||
|
||||
# Calculate distances from depot to each delivery and between deliveries
|
||||
deliveries_with_distance = []
|
||||
for delivery in deliveries:
|
||||
lat, lon = delivery['location']
|
||||
depot_lat, depot_lon = depot_location
|
||||
dist = haversine_distance(depot_lat, depot_lon, lat, lon)
|
||||
deliveries_with_distance.append({
|
||||
**delivery,
|
||||
'distance_from_depot': dist
|
||||
})
|
||||
|
||||
# Sort deliveries by distance from depot (nearest first)
|
||||
deliveries_with_distance.sort(key=lambda x: x['distance_from_depot'])
|
||||
|
||||
# Create simple route
|
||||
route_stops = []
|
||||
total_distance = 0
|
||||
|
||||
# Start from depot
|
||||
route_stops.append({
|
||||
'stop_number': 1,
|
||||
'delivery_id': 'depot_start',
|
||||
'location': depot_location,
|
||||
'sequence': 0,
|
||||
'is_depot': True
|
||||
})
|
||||
|
||||
# Add deliveries
|
||||
for i, delivery in enumerate(deliveries_with_distance, 1):
|
||||
route_stops.append({
|
||||
'stop_number': i + 1,
|
||||
'delivery_id': delivery['id'],
|
||||
'location': delivery['location'],
|
||||
'weight_kg': delivery.get('weight_kg', 0),
|
||||
'sequence': i,
|
||||
'is_depot': False
|
||||
})
|
||||
|
||||
# Return to depot
|
||||
route_stops.append({
|
||||
'stop_number': len(deliveries_with_distance) + 2,
|
||||
'delivery_id': 'depot_end',
|
||||
'location': depot_location,
|
||||
'sequence': len(deliveries_with_distance) + 1,
|
||||
'is_depot': True
|
||||
})
|
||||
|
||||
# Calculate total distance
|
||||
for i in range(len(route_stops) - 1):
|
||||
current_stop = route_stops[i]
|
||||
next_stop = route_stops[i + 1]
|
||||
|
||||
if not current_stop['is_depot'] or not next_stop['is_depot']:
|
||||
if not current_stop['is_depot'] and not next_stop['is_depot']:
|
||||
# Between two deliveries
|
||||
curr_lat, curr_lon = current_stop['location']
|
||||
next_lat, next_lon = next_stop['location']
|
||||
dist = haversine_distance(curr_lat, curr_lon, next_lat, next_lon)
|
||||
elif current_stop['is_depot'] and not next_stop['is_depot']:
|
||||
# From depot to delivery
|
||||
depot_lat, depot_lon = current_stop['location']
|
||||
del_lat, del_lon = next_stop['location']
|
||||
dist = haversine_distance(depot_lat, depot_lon, del_lat, del_lon)
|
||||
elif not current_stop['is_depot'] and next_stop['is_depot']:
|
||||
# From delivery to depot
|
||||
del_lat, del_lon = current_stop['location']
|
||||
depot_lat, depot_lon = next_stop['location']
|
||||
dist = haversine_distance(del_lat, del_lon, depot_lat, depot_lon)
|
||||
else:
|
||||
dist = 0 # depot to depot
|
||||
|
||||
total_distance += dist
|
||||
route_stops[i]['distance_to_next'] = dist
|
||||
|
||||
# Create route sequence from delivery IDs in the order they appear
|
||||
route_sequence = [stop['delivery_id'] for stop in route_stops if not stop.get('is_depot', False)]
|
||||
|
||||
return {
|
||||
'routes': [{
|
||||
'route_number': 1,
|
||||
'stops': route_stops,
|
||||
'route_sequence': route_sequence,
|
||||
'total_distance_km': total_distance,
|
||||
'total_weight_kg': sum(d.get('weight_kg', 0) for d in deliveries),
|
||||
}],
|
||||
'total_distance_km': total_distance,
|
||||
'optimization_time_seconds': 0,
|
||||
'algorithm_used': 'fallback_sequential',
|
||||
'status': 'success'
|
||||
}
|
||||
357
services/distribution/app/services/vrp_optimization_service.py
Normal file
357
services/distribution/app/services/vrp_optimization_service.py
Normal file
@@ -0,0 +1,357 @@
|
||||
"""
|
||||
VRP Optimization Service
|
||||
Business logic for VRP optimization and metrics management
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.repositories.delivery_route_repository import DeliveryRouteRepository
|
||||
from app.services.routing_optimizer import RoutingOptimizer
|
||||
from app.core.database import get_db
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class VRPOptimizationService:
|
||||
"""
|
||||
Service for VRP optimization operations
|
||||
"""
|
||||
|
||||
def __init__(self, distribution_service: "DistributionService", database_manager: Any):
|
||||
"""
|
||||
Initialize VRP optimization service
|
||||
|
||||
Args:
|
||||
distribution_service: Distribution service instance
|
||||
database_manager: Database manager for session management
|
||||
"""
|
||||
self.distribution_service = distribution_service
|
||||
self.database_manager = database_manager
|
||||
self.routing_optimizer = RoutingOptimizer()
|
||||
|
||||
async def optimize_route(
|
||||
self,
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
optimization_params: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Optimize a specific delivery route using VRP
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
route_id: Route ID to optimize
|
||||
optimization_params: Optimization parameters
|
||||
|
||||
Returns:
|
||||
Optimization result with metrics
|
||||
"""
|
||||
try:
|
||||
# Get the current route using distribution service
|
||||
route = await self.distribution_service.get_route_by_id(route_id)
|
||||
if not route:
|
||||
raise ValueError(f"Route {route_id} not found")
|
||||
|
||||
# Extract deliveries from route sequence
|
||||
deliveries = self._extract_deliveries_from_route(route)
|
||||
|
||||
# Perform VRP optimization
|
||||
depot_location = optimization_params.get('depot_location', (0.0, 0.0))
|
||||
vehicle_capacity = optimization_params.get('vehicle_capacity_kg', 1000.0)
|
||||
time_limit = optimization_params.get('time_limit_seconds', 30.0)
|
||||
|
||||
optimization_result = await self.routing_optimizer.optimize_daily_routes(
|
||||
deliveries=deliveries,
|
||||
depot_location=depot_location,
|
||||
vehicle_capacity_kg=vehicle_capacity,
|
||||
time_limit_seconds=time_limit
|
||||
)
|
||||
|
||||
# Update route with optimization metrics
|
||||
vrp_metrics = {
|
||||
'vrp_optimization_savings': {
|
||||
'distance_saved_km': optimization_result.get('distance_savings_km', 0.0),
|
||||
'time_saved_minutes': optimization_result.get('time_savings_minutes', 0.0),
|
||||
'cost_saved': optimization_result.get('cost_savings', 0.0)
|
||||
},
|
||||
'vrp_algorithm_version': 'or-tools-v1.0',
|
||||
'vrp_optimization_timestamp': datetime.utcnow(),
|
||||
'vrp_constraints_satisfied': optimization_result.get('constraints_satisfied', True),
|
||||
'vrp_objective_value': optimization_result.get('objective_value', 0.0)
|
||||
}
|
||||
|
||||
# Update the route with VRP metrics using distribution service
|
||||
await self.distribution_service.update_route_vrp_metrics(route_id, vrp_metrics)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'route_id': route_id,
|
||||
'optimization_metrics': vrp_metrics,
|
||||
'optimized_route': optimization_result.get('optimized_route', [])
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("vrp_optimization_failed", error=str(e), route_id=route_id)
|
||||
raise
|
||||
|
||||
def _extract_deliveries_from_route(self, route: Any) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Extract deliveries from route sequence
|
||||
|
||||
Args:
|
||||
route: Delivery route object
|
||||
|
||||
Returns:
|
||||
List of delivery dictionaries
|
||||
"""
|
||||
deliveries = []
|
||||
route_sequence = route.route_sequence or []
|
||||
|
||||
for stop in route_sequence:
|
||||
deliveries.append({
|
||||
'id': stop.get('id', ''),
|
||||
'location': (stop.get('lat', 0.0), stop.get('lng', 0.0)),
|
||||
'weight_kg': stop.get('weight_kg', 0.0),
|
||||
'time_window': stop.get('time_window')
|
||||
})
|
||||
|
||||
return deliveries
|
||||
|
||||
async def get_route_optimization_metrics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
route_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get VRP optimization metrics for a specific route
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
route_id: Route ID
|
||||
|
||||
Returns:
|
||||
VRP optimization metrics
|
||||
"""
|
||||
route = await self.route_repository.get_route_by_id(route_id)
|
||||
if not route:
|
||||
raise ValueError(f"Route {route_id} not found")
|
||||
|
||||
return {
|
||||
'vrp_optimization_savings': route.vrp_optimization_savings,
|
||||
'vrp_algorithm_version': route.vrp_algorithm_version,
|
||||
'vrp_optimization_timestamp': route.vrp_optimization_timestamp,
|
||||
'vrp_constraints_satisfied': route.vrp_constraints_satisfied,
|
||||
'vrp_objective_value': route.vrp_objective_value
|
||||
}
|
||||
|
||||
async def get_network_optimization_summary(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get VRP optimization summary across all routes for a tenant
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Network optimization summary
|
||||
"""
|
||||
routes = await self.route_repository.get_routes_by_tenant(tenant_id)
|
||||
|
||||
total_optimized = 0
|
||||
total_distance_saved = 0.0
|
||||
total_time_saved = 0.0
|
||||
total_cost_saved = 0.0
|
||||
|
||||
for route in routes:
|
||||
if route.vrp_optimization_timestamp:
|
||||
total_optimized += 1
|
||||
savings = route.vrp_optimization_savings or {}
|
||||
total_distance_saved += savings.get('distance_saved_km', 0.0)
|
||||
total_time_saved += savings.get('time_saved_minutes', 0.0)
|
||||
total_cost_saved += savings.get('cost_saved', 0.0)
|
||||
|
||||
return {
|
||||
'total_routes': len(routes),
|
||||
'total_optimized_routes': total_optimized,
|
||||
'optimization_rate': total_optimized / len(routes) if routes else 0.0,
|
||||
'total_distance_saved_km': total_distance_saved,
|
||||
'total_time_saved_minutes': total_time_saved,
|
||||
'total_cost_saved': total_cost_saved,
|
||||
'average_savings_per_route': {
|
||||
'distance_km': total_distance_saved / total_optimized if total_optimized > 0 else 0.0,
|
||||
'time_minutes': total_time_saved / total_optimized if total_optimized > 0 else 0.0,
|
||||
'cost': total_cost_saved / total_optimized if total_optimized > 0 else 0.0
|
||||
}
|
||||
}
|
||||
|
||||
async def batch_optimize_routes(
|
||||
self,
|
||||
tenant_id: str,
|
||||
route_ids: List[str],
|
||||
optimization_params: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Batch optimize multiple routes
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
route_ids: List of route IDs to optimize
|
||||
optimization_params: Optimization parameters
|
||||
|
||||
Returns:
|
||||
Batch optimization results
|
||||
"""
|
||||
results = []
|
||||
|
||||
for route_id in route_ids:
|
||||
try:
|
||||
result = await self.optimize_route(tenant_id, route_id, optimization_params)
|
||||
results.append({
|
||||
'route_id': route_id,
|
||||
'success': True,
|
||||
'metrics': result['optimization_metrics']
|
||||
})
|
||||
except Exception as e:
|
||||
results.append({
|
||||
'route_id': route_id,
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
return {
|
||||
'total_routes': len(route_ids),
|
||||
'successful_optimizations': sum(1 for r in results if r['success']),
|
||||
'failed_optimizations': sum(1 for r in results if not r['success']),
|
||||
'results': results
|
||||
}
|
||||
|
||||
async def validate_optimization_constraints(
|
||||
self,
|
||||
tenant_id: str,
|
||||
route_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate VRP optimization constraints for a route
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
route_id: Route ID
|
||||
|
||||
Returns:
|
||||
Constraint validation results
|
||||
"""
|
||||
route = await self.route_repository.get_route_by_id(route_id)
|
||||
if not route:
|
||||
raise ValueError(f"Route {route_id} not found")
|
||||
|
||||
# Check if route has been optimized
|
||||
if not route.vrp_optimization_timestamp:
|
||||
return {
|
||||
'route_id': route_id,
|
||||
'is_optimized': False,
|
||||
'constraints_valid': False,
|
||||
'message': 'Route has not been optimized yet'
|
||||
}
|
||||
|
||||
# Validate constraints
|
||||
constraints_valid = route.vrp_constraints_satisfied or False
|
||||
|
||||
return {
|
||||
'route_id': route_id,
|
||||
'is_optimized': True,
|
||||
'constraints_valid': constraints_valid,
|
||||
'vrp_algorithm_version': route.vrp_algorithm_version,
|
||||
'optimization_timestamp': route.vrp_optimization_timestamp
|
||||
}
|
||||
|
||||
async def get_optimization_history(
|
||||
self,
|
||||
tenant_id: str,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get VRP optimization history for a tenant
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
limit: Maximum number of records to return
|
||||
offset: Pagination offset
|
||||
|
||||
Returns:
|
||||
Optimization history
|
||||
"""
|
||||
routes = await self.route_repository.get_routes_by_tenant(
|
||||
tenant_id,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
order_by='vrp_optimization_timestamp DESC'
|
||||
)
|
||||
|
||||
history = []
|
||||
for route in routes:
|
||||
if route.vrp_optimization_timestamp:
|
||||
history.append({
|
||||
'route_id': str(route.id),
|
||||
'route_number': route.route_number,
|
||||
'optimization_timestamp': route.vrp_optimization_timestamp,
|
||||
'algorithm_version': route.vrp_algorithm_version,
|
||||
'constraints_satisfied': route.vrp_constraints_satisfied,
|
||||
'objective_value': route.vrp_objective_value,
|
||||
'savings': route.vrp_optimization_savings
|
||||
})
|
||||
|
||||
return {
|
||||
'total_records': len(history),
|
||||
'history': history
|
||||
}
|
||||
|
||||
async def simulate_optimization(
|
||||
self,
|
||||
tenant_id: str,
|
||||
route_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Simulate VRP optimization without saving results
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
route_data: Route data for simulation
|
||||
|
||||
Returns:
|
||||
Simulation results
|
||||
"""
|
||||
try:
|
||||
deliveries = route_data.get('deliveries', [])
|
||||
depot_location = route_data.get('depot_location', (0.0, 0.0))
|
||||
vehicle_capacity = route_data.get('vehicle_capacity_kg', 1000.0)
|
||||
time_limit = route_data.get('time_limit_seconds', 30.0)
|
||||
|
||||
simulation_result = await self.routing_optimizer.optimize_daily_routes(
|
||||
deliveries=deliveries,
|
||||
depot_location=depot_location,
|
||||
vehicle_capacity_kg=vehicle_capacity,
|
||||
time_limit_seconds=time_limit
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'simulation_results': simulation_result,
|
||||
'estimated_savings': {
|
||||
'distance_km': simulation_result.get('distance_savings_km', 0.0),
|
||||
'time_minutes': simulation_result.get('time_savings_minutes', 0.0),
|
||||
'cost': simulation_result.get('cost_savings', 0.0)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("vrp_simulation_failed", error=str(e))
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
Reference in New Issue
Block a user