New enterprise feature

This commit is contained in:
Urtzi Alfaro
2025-11-30 09:12:40 +01:00
parent f9d0eec6ec
commit 972db02f6d
176 changed files with 19741 additions and 1361 deletions

View File

View File

@@ -0,0 +1,81 @@
"""
Dependency Injection for Distribution Service
"""
from typing import AsyncGenerator
from fastapi import Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.core.config import settings
from app.repositories.delivery_route_repository import DeliveryRouteRepository
from app.repositories.shipment_repository import ShipmentRepository
from app.repositories.delivery_schedule_repository import DeliveryScheduleRepository
from app.services.distribution_service import DistributionService
from app.services.routing_optimizer import RoutingOptimizer
from shared.clients.tenant_client import TenantServiceClient
from shared.clients.inventory_client import InventoryServiceClient
from shared.clients.procurement_client import ProcurementServiceClient
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
"""Get database session dependency"""
async for session in get_db():
yield session
async def get_route_repository(db_session: AsyncSession = Depends(get_db_session)) -> DeliveryRouteRepository:
"""Get delivery route repository dependency"""
return DeliveryRouteRepository(db_session)
async def get_shipment_repository(db_session: AsyncSession = Depends(get_db_session)) -> ShipmentRepository:
"""Get shipment repository dependency"""
return ShipmentRepository(db_session)
async def get_delivery_schedule_repository(db_session: AsyncSession = Depends(get_db_session)) -> DeliveryScheduleRepository:
"""Get delivery schedule repository dependency"""
return DeliveryScheduleRepository(db_session)
def get_tenant_client() -> TenantServiceClient:
"""Get tenant service client dependency"""
return TenantServiceClient(settings)
def get_inventory_client() -> InventoryServiceClient:
"""Get inventory service client dependency"""
return InventoryServiceClient(settings)
def get_procurement_client() -> ProcurementServiceClient:
"""Get procurement service client dependency"""
return ProcurementServiceClient(settings)
def get_routing_optimizer() -> RoutingOptimizer:
"""Get routing optimizer service dependency"""
return RoutingOptimizer()
def get_distribution_service(
route_repository: DeliveryRouteRepository = Depends(get_route_repository),
shipment_repository: ShipmentRepository = Depends(get_shipment_repository),
schedule_repository: DeliveryScheduleRepository = Depends(get_delivery_schedule_repository),
tenant_client: TenantServiceClient = Depends(get_tenant_client),
inventory_client: InventoryServiceClient = Depends(get_inventory_client),
procurement_client: ProcurementServiceClient = Depends(get_procurement_client),
routing_optimizer: RoutingOptimizer = Depends(get_routing_optimizer)
) -> DistributionService:
"""Get distribution service dependency with all required clients"""
return DistributionService(
route_repository=route_repository,
shipment_repository=shipment_repository,
schedule_repository=schedule_repository,
tenant_client=tenant_client,
inventory_client=inventory_client,
procurement_client=procurement_client,
routing_optimizer=routing_optimizer
)

View File

@@ -0,0 +1,452 @@
"""
Internal Demo API for Distribution Service
Handles internal demo setup for enterprise tier
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from typing import Dict, Any, List
import structlog
from datetime import datetime
import uuid
from app.services.distribution_service import DistributionService
from app.api.dependencies import get_distribution_service
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter()
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/internal/demo/setup")
async def setup_demo_distribution(
setup_request: dict, # Contains parent_tenant_id, child_tenant_ids, session_id
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""
Internal endpoint to setup distribution for enterprise demo
Args:
setup_request: Contains parent_tenant_id, child_tenant_ids, session_id
"""
try:
parent_tenant_id = setup_request.get('parent_tenant_id')
child_tenant_ids = setup_request.get('child_tenant_ids', [])
session_id = setup_request.get('session_id')
if not all([parent_tenant_id, child_tenant_ids, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: parent_tenant_id, child_tenant_ids, session_id"
)
logger.info("Setting up demo distribution",
parent=parent_tenant_id,
children=child_tenant_ids,
session_id=session_id)
# Get locations for parent and children to set up delivery routes
parent_locations_response = await distribution_service.tenant_client.get_tenant_locations(parent_tenant_id)
# Check if parent_locations_response is None (which happens when the API call fails)
if not parent_locations_response:
logger.warning(f"No locations found for parent tenant {parent_tenant_id}")
raise HTTPException(
status_code=404,
detail=f"No locations found for parent tenant {parent_tenant_id}. "
f"Ensure the tenant exists and has locations configured."
)
# Extract the actual locations array from the response object
# The response format is {"locations": [...], "total": N}
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
# Look for central production or warehouse location as fallback
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'warehouse'), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('central')), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('main')), None)
# If no specific central location found, use first available location
if not parent_location and parent_locations:
parent_location = parent_locations[0]
logger.warning(f"No central production location found for parent tenant {parent_tenant_id}, using first location: {parent_location.get('name', 'unnamed')}")
# BUG-013 FIX: Use HTTPException instead of ValueError
if not parent_location:
raise HTTPException(
status_code=404,
detail=f"No location found for parent tenant {parent_tenant_id} to use as distribution center. "
f"Ensure the parent tenant has at least one location configured."
)
# Create delivery schedules for each child
for child_id in child_tenant_ids:
try:
child_locations_response = await distribution_service.tenant_client.get_tenant_locations(child_id)
# Check if child_locations_response is None (which happens when the API call fails)
if not child_locations_response:
logger.warning(f"No locations found for child tenant {child_id}")
continue # Skip this child tenant and continue with the next one
# Extract the actual locations array from the response object
# The response format is {"locations": [...], "total": N}
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
# Look for retail outlet or store location as first choice
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
# If no specific retail location found, use first available location
if not child_location and child_locations:
child_location = child_locations[0]
logger.warning(f"No retail outlet location found for child tenant {child_id}, using first location: {child_location.get('name', 'unnamed')}")
if not child_location:
logger.warning(f"No location found for child tenant {child_id}")
continue
# Create delivery schedule
schedule_data = {
'tenant_id': child_id, # The child tenant that will receive deliveries
'target_parent_tenant_id': parent_tenant_id, # The parent tenant that supplies
'target_child_tenant_ids': [child_id], # Array of child tenant IDs in this schedule
'name': f"Demo Schedule: {child_location.get('name', f'Child {child_id}')}",
'delivery_days': "Mon,Wed,Fri", # Tri-weekly delivery
'delivery_time': "09:00", # Morning delivery
'auto_generate_orders': True,
'lead_time_days': 1,
'is_active': True,
'created_by': parent_tenant_id, # BUG FIX: Add required created_by field
'updated_by': parent_tenant_id # BUG FIX: Add required updated_by field
}
# Create the delivery schedule record
schedule = await distribution_service.create_delivery_schedule(schedule_data)
logger.info(f"Created delivery schedule for {parent_tenant_id} to {child_id}")
except Exception as e:
logger.error(f"Error creating delivery schedule for child {child_id}: {e}", exc_info=True)
continue # Continue with the next child
# BUG-012 FIX: Use demo reference date instead of actual today
from datetime import date
from shared.utils.demo_dates import BASE_REFERENCE_DATE
# Get demo reference date from session metadata if available
session_metadata = setup_request.get('session_metadata', {})
session_created_at = session_metadata.get('session_created_at')
if session_created_at:
# Use the BASE_REFERENCE_DATE for consistent demo data dating
# All demo data is anchored to this date (November 25, 2025)
demo_today = BASE_REFERENCE_DATE
logger.info(f"Using demo reference date: {demo_today}")
else:
# Fallback to today if no session metadata (shouldn't happen in production)
demo_today = date.today()
logger.warning(f"No session_created_at in metadata, using today: {demo_today}")
delivery_data = []
# Prepare delivery information for each child
for child_id in child_tenant_ids:
try:
child_locations_response = await distribution_service.tenant_client.get_tenant_locations(child_id)
# Check if child_locations_response is None (which happens when the API call fails)
if not child_locations_response:
logger.warning(f"No locations found for child delivery {child_id}")
continue # Skip this child tenant and continue with the next one
# Extract the actual locations array from the response object
# The response format is {"locations": [...], "total": N}
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
# Look for retail outlet or store location as first choice
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
# If no specific retail location found, use first available location
if not child_location and child_locations:
child_location = child_locations[0]
logger.warning(f"No retail outlet location found for child delivery {child_id}, using first location: {child_location.get('name', 'unnamed')}")
if child_location:
# Ensure we have valid coordinates
latitude = child_location.get('latitude')
longitude = child_location.get('longitude')
if latitude is not None and longitude is not None:
try:
lat = float(latitude)
lng = float(longitude)
delivery_data.append({
'id': f"demo_delivery_{child_id}",
'child_tenant_id': child_id,
'location': (lat, lng),
'weight_kg': 150.0, # Fixed weight for demo
'po_id': f"demo_po_{child_id}", # Would be actual PO ID in real implementation
'items_count': 20
})
except (ValueError, TypeError):
logger.warning(f"Invalid coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
else:
logger.warning(f"Missing coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
else:
logger.warning(f"No location found for child delivery {child_id}, skipping")
except Exception as e:
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
# Optimize routes using VRP - ensure we have valid coordinates
parent_latitude = parent_location.get('latitude')
parent_longitude = parent_location.get('longitude')
# BUG-013 FIX: Use HTTPException for coordinate validation errors
if parent_latitude is None or parent_longitude is None:
logger.error(f"Missing coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}")
raise HTTPException(
status_code=400,
detail=f"Parent location {parent_tenant_id} missing coordinates. "
f"Latitude and longitude must be provided for distribution planning."
)
try:
depot_location = (float(parent_latitude), float(parent_longitude))
except (ValueError, TypeError) as e:
logger.error(f"Invalid coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}, error: {e}")
raise HTTPException(
status_code=400,
detail=f"Parent location {parent_tenant_id} has invalid coordinates: {e}"
)
optimization_result = await distribution_service.routing_optimizer.optimize_daily_routes(
deliveries=delivery_data,
depot_location=depot_location,
vehicle_capacity_kg=1000.0 # Standard vehicle capacity
)
# BUG-012 FIX: Create the delivery route using demo reference date
routes = optimization_result.get('routes', [])
route_sequence = routes[0].get('route_sequence', []) if routes else []
# Use session_id suffix to ensure unique route numbers for concurrent demo sessions
session_suffix = session_id.split('_')[-1][:8] if session_id else '001'
route = await distribution_service.route_repository.create_route({
'tenant_id': uuid.UUID(parent_tenant_id),
'route_number': f"DEMO-{demo_today.strftime('%Y%m%d')}-{session_suffix}",
'route_date': datetime.combine(demo_today, datetime.min.time()),
'total_distance_km': optimization_result.get('total_distance_km', 0),
'estimated_duration_minutes': optimization_result.get('estimated_duration_minutes', 0),
'route_sequence': route_sequence,
'status': 'planned'
})
# BUG-012 FIX: Create shipment records using demo reference date
# Use session_id suffix to ensure unique shipment numbers
shipments = []
for idx, delivery in enumerate(delivery_data):
shipment = await distribution_service.shipment_repository.create_shipment({
'tenant_id': uuid.UUID(parent_tenant_id),
'parent_tenant_id': uuid.UUID(parent_tenant_id),
'child_tenant_id': uuid.UUID(delivery['child_tenant_id']),
'shipment_number': f"DEMOSHP-{demo_today.strftime('%Y%m%d')}-{session_suffix}-{idx+1:03d}",
'shipment_date': datetime.combine(demo_today, datetime.min.time()),
'status': 'pending',
'total_weight_kg': delivery['weight_kg']
})
shipments.append(shipment)
logger.info(f"Demo distribution setup completed: 1 route, {len(shipments)} shipments")
return {
"status": "completed",
"route_id": str(route['id']),
"shipment_count": len(shipments),
"total_distance_km": optimization_result.get('total_distance_km', 0),
"session_id": session_id
}
except Exception as e:
logger.error(f"Error setting up demo distribution: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to setup demo distribution: {str(e)}")
@router.post("/internal/demo/cleanup")
async def cleanup_demo_distribution(
cleanup_request: dict, # Contains parent_tenant_id, child_tenant_ids, session_id
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""
Internal endpoint to cleanup distribution data for enterprise demo
Args:
cleanup_request: Contains parent_tenant_id, child_tenant_ids, session_id
"""
try:
parent_tenant_id = cleanup_request.get('parent_tenant_id')
child_tenant_ids = cleanup_request.get('child_tenant_ids', [])
session_id = cleanup_request.get('session_id')
if not all([parent_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: parent_tenant_id, session_id"
)
logger.info("Cleaning up demo distribution",
parent=parent_tenant_id,
session_id=session_id)
# Delete all demo routes and shipments for this parent tenant
deleted_routes_count = await distribution_service.route_repository.delete_demo_routes_for_tenant(
tenant_id=parent_tenant_id
)
deleted_shipments_count = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(
tenant_id=parent_tenant_id
)
logger.info(f"Demo distribution cleanup completed: {deleted_routes_count} routes, {deleted_shipments_count} shipments deleted")
return {
"status": "completed",
"routes_deleted": deleted_routes_count,
"shipments_deleted": deleted_shipments_count,
"session_id": session_id
}
except Exception as e:
logger.error(f"Error cleaning up demo distribution: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to cleanup demo distribution: {str(e)}")
@router.get("/internal/health")
async def internal_health_check(
_: bool = Depends(verify_internal_api_key)
):
"""
Internal health check endpoint
"""
return {
"service": "distribution-service",
"endpoint": "internal-demo",
"status": "healthy",
"timestamp": datetime.utcnow().isoformat()
}
@router.post("/internal/demo/clone")
async def clone_demo_data(
clone_request: dict,
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone/Setup distribution data for a virtual demo tenant
Args:
clone_request: Contains base_tenant_id, virtual_tenant_id, session_id, demo_account_type
"""
try:
virtual_tenant_id = clone_request.get('virtual_tenant_id')
session_id = clone_request.get('session_id')
if not all([virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: virtual_tenant_id, session_id"
)
logger.info("Cloning distribution data",
virtual_tenant_id=virtual_tenant_id,
session_id=session_id)
# 1. Fetch child tenants for the new virtual parent
child_tenants = await distribution_service.tenant_client.get_child_tenants(virtual_tenant_id)
if not child_tenants:
logger.warning(f"No child tenants found for virtual parent {virtual_tenant_id}, skipping distribution setup")
return {
"status": "skipped",
"reason": "no_child_tenants",
"virtual_tenant_id": virtual_tenant_id
}
child_tenant_ids = [child['id'] for child in child_tenants]
# 2. Call existing setup logic
result = await distribution_service.setup_demo_enterprise_distribution(
parent_tenant_id=virtual_tenant_id,
child_tenant_ids=child_tenant_ids,
session_id=session_id
)
return {
"service": "distribution",
"status": "completed",
"records_cloned": result.get('shipment_count', 0) + 1, # shipments + 1 route
"details": result
}
except Exception as e:
logger.error(f"Error cloning distribution data: {e}", exc_info=True)
# Don't fail the entire cloning process if distribution fails
return {
"service": "distribution",
"status": "failed",
"error": str(e)
}
@router.delete("/internal/demo/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all distribution data for a virtual demo tenant"""
try:
logger.info("Deleting distribution data", virtual_tenant_id=virtual_tenant_id)
# Reuse existing cleanup logic
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(
tenant_id=virtual_tenant_id
)
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(
tenant_id=virtual_tenant_id
)
return {
"service": "distribution",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"routes": deleted_routes,
"shipments": deleted_shipments
}
}
except Exception as e:
logger.error(f"Error deleting distribution data: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,225 @@
"""
API Routes for Distribution Service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Header
from typing import List, Optional, Dict, Any
from datetime import date, timedelta
import structlog
import os
from app.api.dependencies import get_distribution_service
from shared.auth.tenant_access import verify_tenant_permission_dep
from app.core.config import settings
logger = structlog.get_logger()
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
router = APIRouter()
@router.post("/tenants/{tenant_id}/distribution/plans/generate")
async def generate_daily_distribution_plan(
tenant_id: str,
target_date: date = Query(..., description="Date for which to generate distribution plan"),
vehicle_capacity_kg: float = Query(1000.0, description="Vehicle capacity in kg"),
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Generate daily distribution plan for internal transfers
**Enterprise Tier Feature**: Distribution and routing require Enterprise subscription.
"""
try:
# Validate subscription tier for distribution features
from shared.subscription.plans import PlanFeatures
from shared.clients import get_tenant_client
tenant_client = get_tenant_client(config=settings, service_name="distribution-service")
subscription = await tenant_client.get_tenant_subscription(tenant_id)
if not subscription:
raise HTTPException(
status_code=403,
detail="No active subscription found. Distribution routing requires Enterprise tier."
)
# Check if tier has distribution feature (enterprise only)
tier = subscription.get("plan", "starter")
if not PlanFeatures.has_feature(tier, "distribution_management"):
raise HTTPException(
status_code=403,
detail=f"Distribution routing requires Enterprise tier. Current tier: {tier}"
)
result = await distribution_service.generate_daily_distribution_plan(
parent_tenant_id=tenant_id,
target_date=target_date,
vehicle_capacity_kg=vehicle_capacity_kg
)
return result
except HTTPException:
raise
except Exception as e:
logger.error("Error generating distribution plan", error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to generate distribution plan: {str(e)}")
@router.get("/tenants/{tenant_id}/distribution/routes")
async def get_delivery_routes(
tenant_id: str,
date_from: Optional[date] = Query(None, description="Start date for route filtering"),
date_to: Optional[date] = Query(None, description="End date for route filtering"),
status: Optional[str] = Query(None, description="Filter by route status"),
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get delivery routes with optional filtering
"""
try:
# If no date range specified, default to today
if not date_from and not date_to:
date_from = date.today()
date_to = date.today()
elif not date_to:
date_to = date_from
routes = []
current_date = date_from
while current_date <= date_to:
daily_routes = await distribution_service.get_delivery_routes_for_date(tenant_id, current_date)
routes.extend(daily_routes)
current_date = current_date + timedelta(days=1)
if status:
routes = [r for r in routes if r.get('status') == status]
return {"routes": routes}
except Exception as e:
logger.error("Error getting delivery routes", error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to get delivery routes: {str(e)}")
@router.get("/tenants/{tenant_id}/distribution/shipments")
async def get_shipments(
tenant_id: str,
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
status: Optional[str] = Query(None, description="Filter by shipment status"),
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get shipments with optional filtering
"""
try:
# If no date range specified, default to today
if not date_from and not date_to:
date_from = date.today()
date_to = date.today()
elif not date_to:
date_to = date_from
shipments = []
current_date = date_from
while current_date <= date_to:
daily_shipments = await distribution_service.get_shipments_for_date(tenant_id, current_date)
shipments.extend(daily_shipments)
current_date = current_date + timedelta(days=1)
if status:
shipments = [s for s in shipments if s.get('status') == status]
return {"shipments": shipments}
except Exception as e:
logger.error("Error getting shipments", error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
@router.put("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/status")
async def update_shipment_status(
tenant_id: str,
shipment_id: str,
status_update: dict, # Should be a Pydantic model in production
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Update shipment status
"""
try:
new_status = status_update.get('status')
if not new_status:
raise HTTPException(status_code=400, detail="Status is required")
user_id = "temp_user" # Would come from auth context
result = await distribution_service.update_shipment_status(
shipment_id=shipment_id,
new_status=new_status,
user_id=user_id,
metadata=status_update.get('metadata')
)
return result
except Exception as e:
logger.error("Error updating shipment status", error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
@router.post("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/delivery-proof")
async def upload_delivery_proof(
tenant_id: str,
shipment_id: str,
delivery_proof: dict, # Should be a Pydantic model in production
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Upload delivery proof (signature, photo, etc.)
"""
try:
# Implementation would handle signature/photo upload
# This is a placeholder until proper models are created
raise HTTPException(status_code=501, detail="Delivery proof upload endpoint not yet implemented")
except Exception as e:
logger.error("Error uploading delivery proof", error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
@router.get("/tenants/{tenant_id}/distribution/routes/{route_id}")
async def get_route_detail(
tenant_id: str,
route_id: str,
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get delivery route details
"""
try:
# Implementation would fetch detailed route information
# For now, return a simple response
routes = await distribution_service.get_delivery_routes_for_date(tenant_id, date.today())
route = next((r for r in routes if r.get('id') == route_id), None)
if not route:
raise HTTPException(status_code=404, detail="Route not found")
return route
except HTTPException:
raise
except Exception as e:
logger.error("Error getting route detail", error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to get route detail: {str(e)}")

View File

@@ -0,0 +1,112 @@
"""
Shipment API endpoints for distribution service
"""
from fastapi import APIRouter, Depends, HTTPException, Query
from typing import List, Optional
from datetime import date, timedelta
from app.api.dependencies import get_distribution_service
from shared.auth.tenant_access import verify_tenant_permission_dep
router = APIRouter()
@router.get("/tenants/{tenant_id}/distribution/shipments")
async def get_shipments(
tenant_id: str,
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
status: Optional[str] = Query(None, description="Filter by shipment status"),
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
List shipments with optional filtering
"""
try:
# If no date range specified, default to today
if not date_from and not date_to:
date_from = date.today()
date_to = date.today()
elif not date_to:
date_to = date_from
shipments = []
current_date = date_from
while current_date <= date_to:
daily_shipments = await distribution_service.get_shipments_for_date(tenant_id, current_date)
shipments.extend(daily_shipments)
current_date = current_date + timedelta(days=1)
if status:
shipments = [s for s in shipments if s.get('status') == status]
return {"shipments": shipments}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
@router.put("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/status")
async def update_shipment_status(
tenant_id: str,
shipment_id: str,
status_update: dict, # Should be a proper Pydantic model
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Update shipment status
"""
try:
new_status = status_update.get('status')
if not new_status:
raise HTTPException(status_code=400, detail="Status is required")
user_id = "temp_user_id" # Would come from auth context
result = await distribution_service.update_shipment_status(
shipment_id=shipment_id,
new_status=new_status,
user_id=user_id,
metadata=status_update.get('metadata')
)
return result
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
@router.post("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/delivery-proof")
async def upload_delivery_proof(
tenant_id: str,
shipment_id: str,
delivery_proof: dict, # Should be a proper Pydantic model
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Upload delivery proof (signature, photo, etc.)
"""
try:
# Implementation would handle signature/photo upload
# This is a placeholder until proper models are created
raise HTTPException(status_code=501, detail="Delivery proof upload endpoint not yet implemented")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
@router.get("/tenants/{tenant_id}/distribution/shipments/{shipment_id}")
async def get_shipment_detail(
tenant_id: str,
shipment_id: str,
distribution_service: object = Depends(get_distribution_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get detailed information about a specific shipment
"""
try:
# Implementation would fetch detailed shipment information
# This is a placeholder until repositories are created
raise HTTPException(status_code=501, detail="Shipment detail endpoint not yet implemented")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to get shipment details: {str(e)}")

View File

@@ -0,0 +1,86 @@
"""
Production event consumer for the distribution service
Listens for production completion events and triggers distribution planning
"""
import logging
from typing import Dict, Any, Optional
import json
from app.services.distribution_service import DistributionService
logger = logging.getLogger(__name__)
class ProductionEventConsumer:
"""
Consumer for production events that may trigger distribution planning
"""
def __init__(self, distribution_service: DistributionService):
self.distribution_service = distribution_service
async def handle_production_batch_completed(self, event_data: Dict[str, Any]):
"""
Handle production batch completion event
This might trigger distribution planning if it's for internal transfers
"""
try:
logger.info(f"Handling production batch completion: {event_data}")
tenant_id = event_data.get('tenant_id')
batch_id = event_data.get('batch_id')
product_type = event_data.get('product_type')
completion_date = event_data.get('completion_date')
if not tenant_id:
logger.error("Missing tenant_id in production event")
return
# Check if this batch is for internal transfers (has destination tenant info)
# In a real implementation, this would check if the production batch
# is associated with an internal purchase order
# For now, we'll just log the event
logger.info(f"Production batch {batch_id} completed for tenant {tenant_id}")
# In a real implementation, this might trigger immediate distribution planning
# if the batch was for internal transfer orders
# await self._trigger_distribution_if_needed(tenant_id, batch_id)
except Exception as e:
logger.error(f"Error handling production batch completion event: {e}", exc_info=True)
raise
async def handle_internal_transfer_approved(self, event_data: Dict[str, Any]):
"""
Handle internal transfer approval event
This should trigger immediate distribution planning for the approved transfer
"""
try:
logger.info(f"Handling internal transfer approval: {event_data}")
tenant_id = event_data.get('tenant_id') # The parent tenant
transfer_id = event_data.get('transfer_id')
destination_tenant_id = event_data.get('destination_tenant_id')
scheduled_date = event_data.get('scheduled_date')
if not all([tenant_id, transfer_id, destination_tenant_id, scheduled_date]):
logger.error("Missing required fields in internal transfer event")
return
# In a real implementation, this might schedule distribution planning
# for the specific transfer on the scheduled date
logger.info(f"Internal transfer {transfer_id} approved from {tenant_id} to {destination_tenant_id}")
except Exception as e:
logger.error(f"Error handling internal transfer approval: {e}", exc_info=True)
raise
async def _trigger_distribution_if_needed(self, tenant_id: str, batch_id: str):
"""
Internal method to check if distribution planning is needed for this batch
"""
# Implementation would check if the batch is for internal transfers
# and trigger distribution planning if so
pass

View File

@@ -0,0 +1,43 @@
"""
Distribution Service Configuration
"""
from shared.config.base import BaseServiceSettings
from pydantic import Field
from typing import Optional
import os
class Settings(BaseServiceSettings):
"""
Distribution Service specific settings
"""
# Service Identity
APP_NAME: str = "Distribution Service"
SERVICE_NAME: str = "distribution-service"
DESCRIPTION: str = "Distribution and logistics service for enterprise tier bakery management"
VERSION: str = "1.0.0"
# Database Configuration
# Use environment variables with fallbacks for development
DB_HOST: str = os.getenv("DISTRIBUTION_DB_HOST", os.getenv("DB_HOST", "localhost"))
DB_PORT: int = int(os.getenv("DISTRIBUTION_DB_PORT", os.getenv("DB_PORT", "5432")))
DB_USER: str = os.getenv("DISTRIBUTION_DB_USER", os.getenv("DB_USER", "postgres"))
DB_PASSWORD: str = os.getenv("DISTRIBUTION_DB_PASSWORD", os.getenv("DB_PASSWORD", "postgres"))
DB_NAME: str = os.getenv("DISTRIBUTION_DB_NAME", os.getenv("DB_NAME", "distribution_db"))
@property
def DATABASE_URL(self) -> str:
"""Build database URL from components"""
# Try service-specific environment variable first
env_url = os.getenv("DISTRIBUTION_DATABASE_URL") or os.getenv("DATABASE_URL")
if env_url:
return env_url
# Build from components
return f"postgresql+asyncpg://{self.DB_USER}:{self.DB_PASSWORD}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}"
# Create settings instance
settings = Settings()

View File

@@ -0,0 +1,17 @@
"""
Distribution Service Database Configuration
"""
from shared.database import DatabaseManager, create_database_manager
from .config import settings
import os
# Create database manager instance
database_manager = create_database_manager(settings.DATABASE_URL, service_name="distribution")
# Convenience function to get database sessions
async def get_db():
"""Get database session generator"""
async with database_manager.get_session() as session:
yield session

View File

@@ -0,0 +1,125 @@
"""
Distribution Service Main Application
"""
from fastapi import FastAPI
from sqlalchemy import text
from app.core.config import settings
from app.core.database import database_manager
from app.api.routes import router as distribution_router
from app.api.shipments import router as shipments_router
from app.api.internal_demo import router as internal_demo_router
from shared.service_base import StandardFastAPIService
class DistributionService(StandardFastAPIService):
"""Distribution Service with standardized setup"""
async def on_startup(self, app):
"""Custom startup logic including migration verification"""
await self.verify_migrations()
await super().on_startup(app)
async def verify_migrations(self):
"""Verify database schema matches the latest migrations."""
try:
async with self.database_manager.get_session() as session:
# Check if alembic_version table exists
result = await session.execute(text("""
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'alembic_version'
)
"""))
table_exists = result.scalar()
if table_exists:
# If table exists, check the version
result = await session.execute(text("SELECT version_num FROM alembic_version"))
version = result.scalar()
self.logger.info(f"Migration verification successful: {version}")
else:
# If table doesn't exist, migrations might not have run yet
# This is OK - the migration job should create it
self.logger.warning("alembic_version table does not exist yet - migrations may not have run")
except Exception as e:
self.logger.warning(f"Migration verification failed (this may be expected during initial setup): {e}")
def __init__(self):
# Define expected database tables for health checks
distribution_expected_tables = [
'delivery_routes', 'shipments', 'route_assignments', 'delivery_points',
'vehicle_assignments', 'delivery_schedule', 'shipment_tracking', 'audit_logs'
]
# Define custom metrics for distribution service
distribution_custom_metrics = {
"routes_generated_total": {
"type": "counter",
"description": "Total delivery routes generated"
},
"shipments_processed_total": {
"type": "counter",
"description": "Total shipments processed"
},
"route_optimization_time_seconds": {
"type": "histogram",
"description": "Time to optimize delivery routes"
},
"shipment_processing_time_seconds": {
"type": "histogram",
"description": "Time to process shipment request"
},
"delivery_completion_rate": {
"type": "counter",
"description": "Delivery completion rate by status",
"labels": ["status"]
}
}
super().__init__(
service_name="distribution-service",
app_name="Distribution Service",
description="Distribution and logistics service for enterprise tier bakery management",
version="1.0.0",
log_level=settings.LOG_LEVEL,
api_prefix="", # Empty because RouteBuilder already includes /api/v1
database_manager=database_manager,
expected_tables=distribution_expected_tables,
custom_metrics=distribution_custom_metrics
)
async def on_shutdown(self, app: FastAPI):
"""Custom shutdown logic for distribution service"""
self.logger.info("Distribution Service shutdown complete")
def get_service_features(self):
"""Return distribution-specific features"""
return [
"delivery_route_optimization",
"shipment_tracking",
"vehicle_assignment",
"distribution_planning",
"delivery_point_management"
]
# Create service instance
service = DistributionService()
# Create FastAPI app with standardized setup
app = service.create_app(
docs_url="/docs",
redoc_url="/redoc"
)
# Setup standard endpoints
service.setup_standard_endpoints()
# Include routers with specific configurations
# Note: Routes now use RouteBuilder which includes full paths, so no prefix needed
service.add_router(distribution_router, tags=["distribution"])
service.add_router(shipments_router, tags=["shipments"])
service.add_router(internal_demo_router, tags=["internal-demo"])

View File

@@ -0,0 +1,4 @@
# Distribution Service Models
from app.models.distribution import * # noqa: F401, F403
__all__ = []

View File

@@ -0,0 +1,173 @@
"""
Distribution models for the bakery management platform
"""
import uuid
import enum
from datetime import datetime, timezone
from decimal import Decimal
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from shared.database.base import Base
class DeliveryRouteStatus(enum.Enum):
"""Status of delivery routes"""
planned = "planned"
in_progress = "in_progress"
completed = "completed"
cancelled = "cancelled"
class ShipmentStatus(enum.Enum):
"""Status of individual shipments"""
pending = "pending"
packed = "packed"
in_transit = "in_transit"
delivered = "delivered"
failed = "failed"
class DeliveryScheduleFrequency(enum.Enum):
"""Frequency of recurring delivery schedules"""
daily = "daily"
weekly = "weekly"
biweekly = "biweekly"
monthly = "monthly"
class DeliveryRoute(Base):
"""Optimized multi-stop routes for distribution"""
__tablename__ = "delivery_routes"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# Route identification
route_number = Column(String(50), nullable=False, unique=True, index=True)
route_date = Column(DateTime(timezone=True), nullable=False, index=True) # Date when route is executed
# Vehicle and driver assignment
vehicle_id = Column(String(100), nullable=True) # Reference to fleet management
driver_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Reference to driver
# Optimization metadata
total_distance_km = Column(Float, nullable=True)
estimated_duration_minutes = Column(Integer, nullable=True)
# Route details
route_sequence = Column(JSONB, nullable=True) # Ordered array of stops with timing: [{"stop_number": 1, "location_id": "...", "estimated_arrival": "...", "actual_arrival": "..."}]
notes = Column(Text, nullable=True)
# Status
status = Column(SQLEnum(DeliveryRouteStatus), nullable=False, default=DeliveryRouteStatus.planned, index=True)
# Audit fields
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
created_by = Column(UUID(as_uuid=True), nullable=False)
updated_by = Column(UUID(as_uuid=True), nullable=False)
# Relationships
shipments = relationship("Shipment", back_populates="route", cascade="all, delete-orphan")
# Indexes
__table_args__ = (
Index('ix_delivery_routes_tenant_date', 'tenant_id', 'route_date'),
Index('ix_delivery_routes_status', 'status'),
Index('ix_delivery_routes_date_tenant_status', 'route_date', 'tenant_id', 'status'),
)
class Shipment(Base):
"""Individual deliveries to child tenants"""
__tablename__ = "shipments"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# Links to hierarchy and procurement
parent_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Source tenant (central production)
child_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Destination tenant (retail outlet)
purchase_order_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Associated internal purchase order
delivery_route_id = Column(UUID(as_uuid=True), ForeignKey('delivery_routes.id', ondelete='SET NULL'), nullable=True, index=True) # Assigned route
# Shipment details
shipment_number = Column(String(50), nullable=False, unique=True, index=True)
shipment_date = Column(DateTime(timezone=True), nullable=False, index=True)
# Tracking information
current_location_lat = Column(Float, nullable=True)
current_location_lng = Column(Float, nullable=True)
last_tracked_at = Column(DateTime(timezone=True), nullable=True)
status = Column(SQLEnum(ShipmentStatus), nullable=False, default=ShipmentStatus.pending, index=True)
actual_delivery_time = Column(DateTime(timezone=True), nullable=True)
# Proof of delivery
signature = Column(Text, nullable=True) # Digital signature base64 encoded
photo_url = Column(String(500), nullable=True) # URL to delivery confirmation photo
received_by_name = Column(String(200), nullable=True)
delivery_notes = Column(Text, nullable=True)
# Weight/volume tracking
total_weight_kg = Column(Float, nullable=True)
total_volume_m3 = Column(Float, nullable=True)
# Audit fields
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
created_by = Column(UUID(as_uuid=True), nullable=False)
updated_by = Column(UUID(as_uuid=True), nullable=False)
# Relationships
route = relationship("DeliveryRoute", back_populates="shipments")
# Indexes
__table_args__ = (
Index('ix_shipments_tenant_status', 'tenant_id', 'status'),
Index('ix_shipments_parent_child', 'parent_tenant_id', 'child_tenant_id'),
Index('ix_shipments_date_tenant', 'shipment_date', 'tenant_id'),
)
class DeliverySchedule(Base):
"""Recurring delivery patterns"""
__tablename__ = "delivery_schedules"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# Schedule identification
name = Column(String(200), nullable=False)
# Delivery pattern
delivery_days = Column(String(200), nullable=False) # Format: "Mon,Wed,Fri" or "Mon-Fri"
delivery_time = Column(String(20), nullable=False) # Format: "HH:MM" or "HH:MM-HH:MM"
frequency = Column(SQLEnum(DeliveryScheduleFrequency), nullable=False, default=DeliveryScheduleFrequency.weekly)
# Auto-generation settings
auto_generate_orders = Column(Boolean, nullable=False, default=False)
lead_time_days = Column(Integer, nullable=False, default=1) # How many days in advance to generate
# Target tenants for this schedule
target_parent_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
target_child_tenant_ids = Column(JSONB, nullable=False) # List of child tenant IDs involved in this route
# Configuration
is_active = Column(Boolean, nullable=False, default=True)
notes = Column(Text, nullable=True)
# Audit fields
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
created_by = Column(UUID(as_uuid=True), nullable=False)
updated_by = Column(UUID(as_uuid=True), nullable=False)
# Indexes
__table_args__ = (
Index('ix_delivery_schedules_tenant_active', 'tenant_id', 'is_active'),
Index('ix_delivery_schedules_parent_tenant', 'target_parent_tenant_id'),
)

View File

@@ -0,0 +1,207 @@
"""
Delivery Route Repository
"""
from typing import List, Dict, Any, Optional
from datetime import date, datetime
import uuid
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.models.distribution import DeliveryRoute, DeliveryRouteStatus
from shared.database.base import Base
class DeliveryRouteRepository:
def __init__(self, db_session: AsyncSession):
self.db_session = db_session
async def create_route(self, route_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Create a new delivery route
"""
# Define system user ID to use when user_id is not provided
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
route = DeliveryRoute(
id=uuid.uuid4(),
tenant_id=route_data['tenant_id'],
route_number=route_data['route_number'],
route_date=route_data['route_date'],
vehicle_id=route_data.get('vehicle_id'),
driver_id=route_data.get('driver_id'),
total_distance_km=route_data.get('total_distance_km'),
estimated_duration_minutes=route_data.get('estimated_duration_minutes'),
route_sequence=route_data.get('route_sequence'),
status=route_data.get('status', 'planned'),
created_by=route_data.get('created_by', SYSTEM_USER_ID),
updated_by=route_data.get('updated_by', SYSTEM_USER_ID)
)
self.db_session.add(route)
await self.db_session.commit()
await self.db_session.refresh(route)
# Convert SQLAlchemy object to dict for return
return {
'id': str(route.id),
'tenant_id': str(route.tenant_id),
'route_number': route.route_number,
'route_date': route.route_date,
'vehicle_id': route.vehicle_id,
'driver_id': route.driver_id,
'total_distance_km': route.total_distance_km,
'estimated_duration_minutes': route.estimated_duration_minutes,
'route_sequence': route.route_sequence,
'status': route.status.value if hasattr(route.status, 'value') else route.status,
'created_at': route.created_at,
'updated_at': route.updated_at
}
async def get_routes_by_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
"""
Get all delivery routes for a specific date and tenant
"""
stmt = select(DeliveryRoute).where(
(DeliveryRoute.tenant_id == tenant_id) &
(DeliveryRoute.route_date >= datetime.combine(target_date, datetime.min.time())) &
(DeliveryRoute.route_date < datetime.combine(target_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
)
result = await self.db_session.execute(stmt)
routes = result.scalars().all()
return [
{
'id': str(route.id),
'tenant_id': str(route.tenant_id),
'route_number': route.route_number,
'route_date': route.route_date,
'vehicle_id': route.vehicle_id,
'driver_id': route.driver_id,
'total_distance_km': route.total_distance_km,
'estimated_duration_minutes': route.estimated_duration_minutes,
'route_sequence': route.route_sequence,
'status': route.status.value if hasattr(route.status, 'value') else route.status,
'created_at': route.created_at,
'updated_at': route.updated_at
}
for route in routes
]
async def get_routes_by_date_range(self, tenant_id: str, start_date: date, end_date: date) -> List[Dict[str, Any]]:
"""
Get all delivery routes for a specific date range and tenant
"""
stmt = select(DeliveryRoute).where(
(DeliveryRoute.tenant_id == tenant_id) &
(DeliveryRoute.route_date >= datetime.combine(start_date, datetime.min.time())) &
(DeliveryRoute.route_date <= datetime.combine(end_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
)
result = await self.db_session.execute(stmt)
routes = result.scalars().all()
return [
{
'id': str(route.id),
'tenant_id': str(route.tenant_id),
'route_number': route.route_number,
'route_date': route.route_date,
'vehicle_id': route.vehicle_id,
'driver_id': route.driver_id,
'total_distance_km': route.total_distance_km,
'estimated_duration_minutes': route.estimated_duration_minutes,
'route_sequence': route.route_sequence,
'status': route.status.value if hasattr(route.status, 'value') else route.status,
'created_at': route.created_at,
'updated_at': route.updated_at
}
for route in routes
]
async def get_route_by_id(self, route_id: str) -> Optional[Dict[str, Any]]:
"""
Get a specific delivery route by ID
"""
stmt = select(DeliveryRoute).where(DeliveryRoute.id == route_id)
result = await self.db_session.execute(stmt)
route = result.scalar_one_or_none()
if route:
return {
'id': str(route.id),
'tenant_id': str(route.tenant_id),
'route_number': route.route_number,
'route_date': route.route_date,
'vehicle_id': route.vehicle_id,
'driver_id': route.driver_id,
'total_distance_km': route.total_distance_km,
'estimated_duration_minutes': route.estimated_duration_minutes,
'route_sequence': route.route_sequence,
'status': route.status.value if hasattr(route.status, 'value') else route.status,
'created_at': route.created_at,
'updated_at': route.updated_at
}
return None
async def update_route_status(self, route_id: str, status: str, user_id: str) -> Optional[Dict[str, Any]]:
"""
Update route status
"""
stmt = select(DeliveryRoute).where(DeliveryRoute.id == route_id)
result = await self.db_session.execute(stmt)
route = result.scalar_one_or_none()
if not route:
return None
# Handle system user ID if passed as string
if user_id == 'system':
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
route.updated_by = SYSTEM_USER_ID
else:
route.updated_by = user_id
route.status = status
await self.db_session.commit()
await self.db_session.refresh(route)
return {
'id': str(route.id),
'tenant_id': str(route.tenant_id),
'route_number': route.route_number,
'route_date': route.route_date,
'vehicle_id': route.vehicle_id,
'driver_id': route.driver_id,
'total_distance_km': route.total_distance_km,
'estimated_duration_minutes': route.estimated_duration_minutes,
'route_sequence': route.route_sequence,
'status': route.status.value if hasattr(route.status, 'value') else route.status,
'created_at': route.created_at,
'updated_at': route.updated_at
}
async def delete_demo_routes_for_tenant(self, tenant_id: str) -> int:
"""
Delete all demo routes for a tenant
Used for demo session cleanup
Args:
tenant_id: The tenant ID to delete routes for
Returns:
Number of routes deleted
"""
from sqlalchemy import delete
# Delete routes with DEMO- prefix in route_number
stmt = delete(DeliveryRoute).where(
(DeliveryRoute.tenant_id == uuid.UUID(tenant_id)) &
(DeliveryRoute.route_number.like('DEMO-%'))
)
result = await self.db_session.execute(stmt)
await self.db_session.commit()
deleted_count = result.rowcount
return deleted_count

View File

@@ -0,0 +1,74 @@
from typing import List, Optional, Dict, Any
from uuid import UUID
from sqlalchemy import select, update, delete
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.exc import IntegrityError
import structlog
from app.models.distribution import DeliverySchedule
logger = structlog.get_logger()
class DeliveryScheduleRepository:
def __init__(self, session: AsyncSession):
self.session = session
async def create_schedule(self, schedule_data: Dict[str, Any]) -> DeliverySchedule:
"""Create a new delivery schedule"""
try:
schedule = DeliverySchedule(**schedule_data)
self.session.add(schedule)
await self.session.commit()
await self.session.refresh(schedule)
return schedule
except IntegrityError as e:
await self.session.rollback()
logger.error("Error creating delivery schedule", error=str(e))
raise ValueError(f"Failed to create delivery schedule: {e}")
except Exception as e:
await self.session.rollback()
logger.error("Unexpected error creating delivery schedule", error=str(e))
raise
async def get_schedule_by_id(self, schedule_id: UUID) -> Optional[DeliverySchedule]:
"""Get a delivery schedule by ID"""
result = await self.session.execute(
select(DeliverySchedule).where(DeliverySchedule.id == schedule_id)
)
return result.scalar_one_or_none()
async def get_schedules_by_tenant(self, tenant_id: UUID) -> List[DeliverySchedule]:
"""Get all delivery schedules for a tenant"""
result = await self.session.execute(
select(DeliverySchedule).where(DeliverySchedule.tenant_id == tenant_id)
)
return result.scalars().all()
async def update_schedule(self, schedule_id: UUID, update_data: Dict[str, Any]) -> Optional[DeliverySchedule]:
"""Update a delivery schedule"""
try:
stmt = (
update(DeliverySchedule)
.where(DeliverySchedule.id == schedule_id)
.values(**update_data)
.returning(DeliverySchedule)
)
result = await self.session.execute(stmt)
await self.session.commit()
return result.scalar_one_or_none()
except Exception as e:
await self.session.rollback()
logger.error("Error updating delivery schedule", error=str(e), schedule_id=schedule_id)
raise
async def delete_schedule(self, schedule_id: UUID) -> bool:
"""Delete a delivery schedule"""
try:
stmt = delete(DeliverySchedule).where(DeliverySchedule.id == schedule_id)
result = await self.session.execute(stmt)
await self.session.commit()
return result.rowcount > 0
except Exception as e:
await self.session.rollback()
logger.error("Error deleting delivery schedule", error=str(e), schedule_id=schedule_id)
raise

View File

@@ -0,0 +1,309 @@
"""
Shipment Repository
"""
from typing import List, Dict, Any, Optional
from datetime import date, datetime
import uuid
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.models.distribution import Shipment, ShipmentStatus
from shared.database.base import Base
class ShipmentRepository:
def __init__(self, db_session: AsyncSession):
self.db_session = db_session
async def create_shipment(self, shipment_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Create a new shipment
"""
# Define system user ID to use when user_id is not provided
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
shipment = Shipment(
id=uuid.uuid4(),
tenant_id=shipment_data['tenant_id'],
parent_tenant_id=shipment_data['parent_tenant_id'],
child_tenant_id=shipment_data['child_tenant_id'],
purchase_order_id=shipment_data.get('purchase_order_id'),
delivery_route_id=shipment_data.get('delivery_route_id'),
shipment_number=shipment_data['shipment_number'],
shipment_date=shipment_data['shipment_date'],
status=shipment_data.get('status', 'pending'),
total_weight_kg=shipment_data.get('total_weight_kg'),
total_volume_m3=shipment_data.get('total_volume_m3'),
created_by=shipment_data.get('created_by', SYSTEM_USER_ID),
updated_by=shipment_data.get('updated_by', SYSTEM_USER_ID)
)
self.db_session.add(shipment)
await self.db_session.commit()
await self.db_session.refresh(shipment)
# Convert SQLAlchemy object to dict for return
return {
'id': str(shipment.id),
'tenant_id': str(shipment.tenant_id),
'parent_tenant_id': str(shipment.parent_tenant_id),
'child_tenant_id': str(shipment.child_tenant_id),
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
'shipment_number': shipment.shipment_number,
'shipment_date': shipment.shipment_date,
'current_location_lat': shipment.current_location_lat,
'current_location_lng': shipment.current_location_lng,
'last_tracked_at': shipment.last_tracked_at,
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
'actual_delivery_time': shipment.actual_delivery_time,
'signature': shipment.signature,
'photo_url': shipment.photo_url,
'received_by_name': shipment.received_by_name,
'delivery_notes': shipment.delivery_notes,
'total_weight_kg': shipment.total_weight_kg,
'total_volume_m3': shipment.total_volume_m3,
'created_at': shipment.created_at,
'updated_at': shipment.updated_at
}
async def get_shipments_by_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
"""
Get all shipments for a specific date and tenant
"""
stmt = select(Shipment).where(
(Shipment.tenant_id == tenant_id) &
(Shipment.shipment_date >= datetime.combine(target_date, datetime.min.time())) &
(Shipment.shipment_date < datetime.combine(target_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
)
result = await self.db_session.execute(stmt)
shipments = result.scalars().all()
return [
{
'id': str(shipment.id),
'tenant_id': str(shipment.tenant_id),
'parent_tenant_id': str(shipment.parent_tenant_id),
'child_tenant_id': str(shipment.child_tenant_id),
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
'shipment_number': shipment.shipment_number,
'shipment_date': shipment.shipment_date,
'current_location_lat': shipment.current_location_lat,
'current_location_lng': shipment.current_location_lng,
'last_tracked_at': shipment.last_tracked_at,
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
'actual_delivery_time': shipment.actual_delivery_time,
'signature': shipment.signature,
'photo_url': shipment.photo_url,
'received_by_name': shipment.received_by_name,
'delivery_notes': shipment.delivery_notes,
'total_weight_kg': shipment.total_weight_kg,
'total_volume_m3': shipment.total_volume_m3,
'created_at': shipment.created_at,
'updated_at': shipment.updated_at
}
for shipment in shipments
]
async def get_shipments_by_date_range(self, tenant_id: str, start_date: date, end_date: date) -> List[Dict[str, Any]]:
"""
Get all shipments for a specific date range and tenant
"""
stmt = select(Shipment).where(
(Shipment.tenant_id == tenant_id) &
(Shipment.shipment_date >= datetime.combine(start_date, datetime.min.time())) &
(Shipment.shipment_date <= datetime.combine(end_date, datetime.max.time().replace(hour=23, minute=59, second=59)))
)
result = await self.db_session.execute(stmt)
shipments = result.scalars().all()
return [
{
'id': str(shipment.id),
'tenant_id': str(shipment.tenant_id),
'parent_tenant_id': str(shipment.parent_tenant_id),
'child_tenant_id': str(shipment.child_tenant_id),
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
'shipment_number': shipment.shipment_number,
'shipment_date': shipment.shipment_date,
'current_location_lat': shipment.current_location_lat,
'current_location_lng': shipment.current_location_lng,
'last_tracked_at': shipment.last_tracked_at,
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
'actual_delivery_time': shipment.actual_delivery_time,
'signature': shipment.signature,
'photo_url': shipment.photo_url,
'received_by_name': shipment.received_by_name,
'delivery_notes': shipment.delivery_notes,
'total_weight_kg': shipment.total_weight_kg,
'total_volume_m3': shipment.total_volume_m3,
'created_at': shipment.created_at,
'updated_at': shipment.updated_at
}
for shipment in shipments
]
async def get_shipment_by_id(self, shipment_id: str) -> Optional[Dict[str, Any]]:
"""
Get a specific shipment by ID
"""
stmt = select(Shipment).where(Shipment.id == shipment_id)
result = await self.db_session.execute(stmt)
shipment = result.scalar_one_or_none()
if shipment:
return {
'id': str(shipment.id),
'tenant_id': str(shipment.tenant_id),
'parent_tenant_id': str(shipment.parent_tenant_id),
'child_tenant_id': str(shipment.child_tenant_id),
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
'shipment_number': shipment.shipment_number,
'shipment_date': shipment.shipment_date,
'current_location_lat': shipment.current_location_lat,
'current_location_lng': shipment.current_location_lng,
'last_tracked_at': shipment.last_tracked_at,
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
'actual_delivery_time': shipment.actual_delivery_time,
'signature': shipment.signature,
'photo_url': shipment.photo_url,
'received_by_name': shipment.received_by_name,
'delivery_notes': shipment.delivery_notes,
'total_weight_kg': shipment.total_weight_kg,
'total_volume_m3': shipment.total_volume_m3,
'created_at': shipment.created_at,
'updated_at': shipment.updated_at
}
return None
async def update_shipment_status(self, shipment_id: str, status: str, user_id: str, metadata: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
"""
Update shipment status
"""
stmt = select(Shipment).where(Shipment.id == shipment_id)
result = await self.db_session.execute(stmt)
shipment = result.scalar_one_or_none()
if not shipment:
return None
# Handle system user ID if passed as string
if user_id == 'system':
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
shipment.updated_by = SYSTEM_USER_ID
else:
shipment.updated_by = user_id
shipment.status = status
# Update tracking information if provided in metadata
if metadata:
if 'current_location_lat' in metadata:
shipment.current_location_lat = metadata['current_location_lat']
if 'current_location_lng' in metadata:
shipment.current_location_lng = metadata['current_location_lng']
if 'last_tracked_at' in metadata:
from datetime import datetime
shipment.last_tracked_at = datetime.fromisoformat(metadata['last_tracked_at']) if isinstance(metadata['last_tracked_at'], str) else metadata['last_tracked_at']
if 'signature' in metadata:
shipment.signature = metadata['signature']
if 'photo_url' in metadata:
shipment.photo_url = metadata['photo_url']
if 'received_by_name' in metadata:
shipment.received_by_name = metadata['received_by_name']
if 'delivery_notes' in metadata:
shipment.delivery_notes = metadata['delivery_notes']
if 'actual_delivery_time' in metadata:
from datetime import datetime
shipment.actual_delivery_time = datetime.fromisoformat(metadata['actual_delivery_time']) if isinstance(metadata['actual_delivery_time'], str) else metadata['actual_delivery_time']
await self.db_session.commit()
await self.db_session.refresh(shipment)
return {
'id': str(shipment.id),
'tenant_id': str(shipment.tenant_id),
'parent_tenant_id': str(shipment.parent_tenant_id),
'child_tenant_id': str(shipment.child_tenant_id),
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
'shipment_number': shipment.shipment_number,
'shipment_date': shipment.shipment_date,
'current_location_lat': shipment.current_location_lat,
'current_location_lng': shipment.current_location_lng,
'last_tracked_at': shipment.last_tracked_at,
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
'actual_delivery_time': shipment.actual_delivery_time,
'signature': shipment.signature,
'photo_url': shipment.photo_url,
'received_by_name': shipment.received_by_name,
'delivery_notes': shipment.delivery_notes,
'total_weight_kg': shipment.total_weight_kg,
'total_volume_m3': shipment.total_volume_m3,
'created_at': shipment.created_at,
'updated_at': shipment.updated_at
}
async def assign_shipments_to_route(self, route_id: str, shipment_ids: List[str], user_id: str) -> Dict[str, Any]:
"""
Assign multiple shipments to a specific route
"""
stmt = select(Shipment).where(Shipment.id.in_(shipment_ids))
result = await self.db_session.execute(stmt)
shipments = result.scalars().all()
# Handle system user ID if passed as string
actual_user_id = user_id
if user_id == 'system':
actual_user_id = uuid.UUID("50000000-0000-0000-0000-000000000004")
updated_shipments = []
for shipment in shipments:
shipment.delivery_route_id = route_id
shipment.updated_by = actual_user_id
await self.db_session.refresh(shipment)
updated_shipments.append({
'id': str(shipment.id),
'shipment_number': shipment.shipment_number,
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
'delivery_route_id': str(shipment.delivery_route_id)
})
await self.db_session.commit()
return {
'route_id': route_id,
'updated_shipments': updated_shipments,
'count': len(updated_shipments)
}
async def delete_demo_shipments_for_tenant(self, tenant_id: str) -> int:
"""
Delete all demo shipments for a tenant
Used for demo session cleanup
Args:
tenant_id: The tenant ID to delete shipments for
Returns:
Number of shipments deleted
"""
from sqlalchemy import delete
# Delete shipments with DEMOSHP- prefix in shipment_number
stmt = delete(Shipment).where(
(Shipment.tenant_id == uuid.UUID(tenant_id)) &
(Shipment.shipment_number.like('DEMOSHP-%'))
)
result = await self.db_session.execute(stmt)
await self.db_session.commit()
deleted_count = result.rowcount
return deleted_count

View File

@@ -0,0 +1,585 @@
"""
Distribution Service for Enterprise Tier
Manages delivery routes and shipment tracking for parent-child tenant networks
"""
import asyncio
import logging
from typing import List, Dict, Any, Optional
from datetime import datetime, date, timedelta
import uuid
from decimal import Decimal
from shared.utils.demo_dates import BASE_REFERENCE_DATE
from app.models.distribution import DeliveryRoute, Shipment, DeliverySchedule, DeliveryRouteStatus, ShipmentStatus
from app.services.routing_optimizer import RoutingOptimizer
from shared.clients.tenant_client import TenantServiceClient
from shared.clients.inventory_client import InventoryServiceClient
from shared.clients.procurement_client import ProcurementServiceClient
logger = logging.getLogger(__name__)
class DistributionService:
"""
Core business logic for distribution management
"""
def __init__(
self,
route_repository,
shipment_repository,
schedule_repository,
procurement_client: ProcurementServiceClient,
tenant_client: TenantServiceClient,
inventory_client: InventoryServiceClient,
routing_optimizer: RoutingOptimizer
):
self.route_repository = route_repository
self.shipment_repository = shipment_repository
self.schedule_repository = schedule_repository
self.procurement_client = procurement_client
self.tenant_client = tenant_client
self.inventory_client = inventory_client
self.routing_optimizer = routing_optimizer
async def generate_daily_distribution_plan(
self,
parent_tenant_id: str,
target_date: date,
vehicle_capacity_kg: float = 1000.0
) -> Dict[str, Any]:
"""
Generate daily distribution plan for internal transfers between parent and children
"""
logger.info(f"Generating distribution plan for parent tenant {parent_tenant_id} on {target_date}")
try:
# 1. Fetch all approved internal POs for target date from procurement service
internal_pos = await self.procurement_client.get_approved_internal_purchase_orders(
parent_tenant_id=parent_tenant_id,
target_date=target_date
)
if not internal_pos:
logger.info(f"No approved internal POs found for {parent_tenant_id} on {target_date}")
return {
"parent_tenant_id": parent_tenant_id,
"target_date": target_date.isoformat(),
"routes": [],
"shipments": [],
"status": "no_deliveries_needed"
}
# 2. Group by child tenant and aggregate weights/volumes
deliveries_by_child = {}
for po in internal_pos:
child_tenant_id = po.get('destination_tenant_id')
if child_tenant_id not in deliveries_by_child:
deliveries_by_child[child_tenant_id] = {
'po_id': po.get('id'),
'weight_kg': 0,
'volume_m3': 0,
'items_count': 0
}
# Calculate total weight and volume for this PO
total_weight = 0
total_volume = 0
for item in po.get('items', []):
# In a real implementation, we'd have weight/volume per item
# For now, we'll estimate based on quantity
quantity = item.get('ordered_quantity', 0)
# Typical bakery item weight estimation (adjust as needed)
avg_item_weight_kg = 1.0 # Adjust based on actual products
total_weight += Decimal(str(quantity)) * Decimal(str(avg_item_weight_kg))
deliveries_by_child[child_tenant_id]['weight_kg'] += float(total_weight)
deliveries_by_child[child_tenant_id]['items_count'] += len(po.get('items', []))
# 3. Fetch parent depot location and all child locations from tenant service
parent_locations_response = await self.tenant_client.get_tenant_locations(parent_tenant_id)
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
parent_depot = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
if not parent_depot:
logger.error(f"No central production location found for parent tenant {parent_tenant_id}")
raise ValueError(f"No central production location found for parent tenant {parent_tenant_id}")
depot_location = (float(parent_depot['latitude']), float(parent_depot['longitude']))
# Fetch all child tenant locations
deliveries_data = []
for child_tenant_id, delivery_info in deliveries_by_child.items():
child_locations_response = await self.tenant_client.get_tenant_locations(child_tenant_id)
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
logger.warning(f"No retail outlet location found for child tenant {child_tenant_id}")
continue
deliveries_data.append({
'id': f"delivery_{child_tenant_id}",
'child_tenant_id': child_tenant_id,
'location': (float(child_location['latitude']), float(child_location['longitude'])),
'weight_kg': delivery_info['weight_kg'],
'volume_m3': delivery_info['volume_m3'],
'po_id': delivery_info['po_id'],
'items_count': delivery_info['items_count']
})
if not deliveries_data:
logger.info(f"No valid delivery locations found for distribution plan")
return {
"parent_tenant_id": parent_tenant_id,
"target_date": target_date.isoformat(),
"routes": [],
"shipments": [],
"status": "no_valid_deliveries"
}
# 4. Call routing_optimizer.optimize_daily_routes()
optimization_result = await self.routing_optimizer.optimize_daily_routes(
deliveries=deliveries_data,
depot_location=depot_location,
vehicle_capacity_kg=vehicle_capacity_kg
)
# 5. Create DeliveryRoute and Shipment records
created_routes = []
created_shipments = []
for route_idx, route_data in enumerate(optimization_result['routes']):
# Create DeliveryRoute record
route = await self.route_repository.create_route({
'tenant_id': parent_tenant_id,
'route_number': f"R{target_date.strftime('%Y%m%d')}{route_idx + 1:02d}",
'route_date': datetime.combine(target_date, datetime.min.time()),
'vehicle_id': route_data.get('vehicle_id'),
'driver_id': route_data.get('driver_id'),
'total_distance_km': route_data.get('total_distance_km', 0),
'estimated_duration_minutes': route_data.get('estimated_duration_minutes', 0),
'route_sequence': route_data.get('route_sequence', []),
'status': 'planned'
})
created_routes.append(route)
# Create Shipment records for each stop (excluding depot stops)
for stop in route_data.get('route_sequence', []):
if stop.get('is_depot', False) == False and 'child_tenant_id' in stop:
shipment = await self.shipment_repository.create_shipment({
'tenant_id': parent_tenant_id,
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': stop['child_tenant_id'],
'purchase_order_id': stop.get('po_id'),
'delivery_route_id': route['id'],
'shipment_number': f"S{target_date.strftime('%Y%m%d')}{len(created_shipments) + 1:03d}",
'shipment_date': datetime.combine(target_date, datetime.min.time()),
'status': 'pending',
'total_weight_kg': stop.get('weight_kg', 0),
'total_volume_m3': stop.get('volume_m3', 0)
})
created_shipments.append(shipment)
logger.info(f"Distribution plan generated: {len(created_routes)} routes, {len(created_shipments)} shipments")
# 6. Publish distribution.plan.created event to message queue
await self._publish_distribution_plan_created_event(
parent_tenant_id=parent_tenant_id,
target_date=target_date,
routes=created_routes,
shipments=created_shipments
)
return {
"parent_tenant_id": parent_tenant_id,
"target_date": target_date.isoformat(),
"routes": [route for route in created_routes],
"shipments": [shipment for shipment in created_shipments],
"optimization_metadata": optimization_result,
"status": "success"
}
except Exception as e:
logger.error(f"Error generating distribution plan: {e}", exc_info=True)
raise
async def _publish_distribution_plan_created_event(
self,
parent_tenant_id: str,
target_date: date,
routes: List[Dict[str, Any]],
shipments: List[Dict[str, Any]]
):
"""
Publish distribution plan created event to message queue
"""
# In a real implementation, this would publish to RabbitMQ
logger.info(f"Distribution plan created event published for parent {parent_tenant_id}")
async def setup_demo_enterprise_distribution(
self,
parent_tenant_id: str,
child_tenant_ids: List[str],
session_id: str
) -> Dict[str, Any]:
"""
Setup distribution routes and schedules for enterprise demo
"""
try:
logger.info(f"Setting up demo distribution for parent {parent_tenant_id} with {len(child_tenant_ids)} children")
# Get locations for all tenants
parent_locations_response = await self.tenant_client.get_tenant_locations(parent_tenant_id)
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
# Look for central production or warehouse location as fallback
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'warehouse'), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('central')), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('main')), None)
# If no specific central location found, use first available location
if not parent_location and parent_locations:
parent_location = parent_locations[0]
logger.warning(f"No central production location found for parent tenant {parent_tenant_id}, using first location: {parent_location.get('name', 'unnamed')}")
if not parent_location:
raise ValueError(f"No location found for parent tenant {parent_tenant_id} to use as distribution center")
# Create delivery schedules for each child
for child_id in child_tenant_ids:
try:
child_locations_response = await self.tenant_client.get_tenant_locations(child_id)
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
# Look for retail outlet or store location as first choice
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
# If no specific retail location found, use first available location
if not child_location and child_locations:
child_location = child_locations[0]
logger.warning(f"No retail outlet location found for child tenant {child_id}, using first location: {child_location.get('name', 'unnamed')}")
if not child_location:
logger.warning(f"No location found for child tenant {child_id}")
continue
# Create delivery schedule
schedule_data = {
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': child_id,
'schedule_name': f"Demo Schedule: {child_location.get('name', f'Child {child_id}')}",
'delivery_days': "Mon,Wed,Fri", # Tri-weekly delivery
'delivery_time': "09:00", # Morning delivery
'auto_generate_orders': True,
'lead_time_days': 1,
'is_active': True
}
# Create the delivery schedule record
await self.create_delivery_schedule(schedule_data)
except Exception as e:
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
continue
# Create sample delivery route for today
today = date.today()
delivery_data = []
# Prepare delivery information for each child
for child_id in child_tenant_ids:
try:
child_locations_response = await self.tenant_client.get_tenant_locations(child_id)
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
# Look for retail outlet or store location as first choice
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
# If no specific retail location found, use first available location
if not child_location and child_locations:
child_location = child_locations[0]
logger.warning(f"No retail outlet location found for child delivery {child_id}, using first location: {child_location.get('name', 'unnamed')}")
if child_location:
# Ensure we have valid coordinates
latitude = child_location.get('latitude')
longitude = child_location.get('longitude')
if latitude is not None and longitude is not None:
try:
lat = float(latitude)
lng = float(longitude)
delivery_data.append({
'id': f"demo_delivery_{child_id}",
'child_tenant_id': child_id,
'location': (lat, lng),
'weight_kg': 150.0, # Fixed weight for demo
'po_id': f"demo_po_{child_id}", # Would be actual PO ID in real implementation
'items_count': 20
})
except (ValueError, TypeError):
logger.warning(f"Invalid coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
else:
logger.warning(f"Missing coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
else:
logger.warning(f"No location found for child delivery {child_id}, skipping")
except Exception as e:
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
# Optimize routes using VRP - ensure we have valid coordinates
parent_latitude = parent_location.get('latitude')
parent_longitude = parent_location.get('longitude')
if parent_latitude is None or parent_longitude is None:
logger.error(f"Missing coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}")
raise ValueError(f"Parent location {parent_tenant_id} missing coordinates")
try:
depot_location = (float(parent_latitude), float(parent_longitude))
except (ValueError, TypeError) as e:
logger.error(f"Invalid coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}, error: {e}")
raise ValueError(f"Parent location {parent_tenant_id} has invalid coordinates: {e}")
optimization_result = await self.routing_optimizer.optimize_daily_routes(
deliveries=delivery_data,
depot_location=depot_location,
vehicle_capacity_kg=1000.0 # Standard vehicle capacity
)
# Create the delivery route for today
# Use a random suffix to ensure unique route numbers
import secrets
unique_suffix = secrets.token_hex(4)[:8]
route = await self.route_repository.create_route({
'tenant_id': parent_tenant_id,
'route_number': f"DEMO-{today.strftime('%Y%m%d')}-{unique_suffix}",
'route_date': datetime.combine(today, datetime.min.time()),
'total_distance_km': optimization_result.get('total_distance_km', 0),
'estimated_duration_minutes': optimization_result.get('estimated_duration_minutes', 0),
'route_sequence': optimization_result.get('routes', [])[0].get('route_sequence', []) if optimization_result.get('routes') else [],
'status': 'planned'
})
# Create shipment records for each delivery
shipments = []
for idx, delivery in enumerate(delivery_data):
shipment = await self.shipment_repository.create_shipment({
'tenant_id': parent_tenant_id,
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': delivery['child_tenant_id'],
'shipment_number': f"DEMOSHP-{today.strftime('%Y%m%d')}-{idx+1:03d}",
'shipment_date': datetime.combine(today, datetime.min.time()),
'status': 'pending',
'total_weight_kg': delivery['weight_kg']
})
shipments.append(shipment)
# BUG-012 FIX: Clone historical data from template
# Define template tenant IDs (matching seed script)
TEMPLATE_PARENT_ID = "c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8"
TEMPLATE_CHILD_IDS = [
"d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9", # Madrid Centro
"e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0", # Barcelona Gràcia
"f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1" # Valencia Ruzafa
]
# Create mapping from template child IDs to new session child IDs
# Assumption: child_tenant_ids are passed in same order (Madrid, Barcelona, Valencia)
child_id_map = {}
for idx, template_child_id in enumerate(TEMPLATE_CHILD_IDS):
if idx < len(child_tenant_ids):
child_id_map[template_child_id] = child_tenant_ids[idx]
# Calculate date range for history (last 30 days)
# Use demo reference date if available in session metadata, otherwise today
# Note: session_id is passed, but we need to fetch metadata or infer date
# For now, we'll use BASE_REFERENCE_DATE as the anchor, similar to the seed script
end_date = BASE_REFERENCE_DATE
start_date = end_date - timedelta(days=30)
logger.info(f"Cloning historical distribution data from {start_date} to {end_date}")
# Fetch historical routes from template parent
historical_routes = await self.route_repository.get_routes_by_date_range(
tenant_id=TEMPLATE_PARENT_ID,
start_date=start_date,
end_date=end_date
)
# Fetch historical shipments from template parent
historical_shipments = await self.shipment_repository.get_shipments_by_date_range(
tenant_id=TEMPLATE_PARENT_ID,
start_date=start_date,
end_date=end_date
)
logger.info(f"Found {len(historical_routes)} routes and {len(historical_shipments)} shipments to clone")
# Clone routes
route_id_map = {} # Old route ID -> New route ID
cloned_routes_count = 0
for route_data in historical_routes:
old_route_id = route_data['id']
# Update route sequence with new child IDs
new_sequence = []
for stop in route_data.get('route_sequence', []):
new_stop = stop.copy()
if 'tenant_id' in new_stop and new_stop['tenant_id'] in child_id_map:
new_stop['tenant_id'] = child_id_map[new_stop['tenant_id']]
new_sequence.append(new_stop)
# Create new route
new_route = await self.route_repository.create_route({
'tenant_id': parent_tenant_id,
'route_number': route_data['route_number'], # Keep same number for consistency
'route_date': route_data['route_date'],
'vehicle_id': route_data['vehicle_id'],
'driver_id': str(uuid.uuid4()), # New driver
'total_distance_km': route_data['total_distance_km'],
'estimated_duration_minutes': route_data['estimated_duration_minutes'],
'route_sequence': new_sequence,
'status': route_data['status']
})
route_id_map[old_route_id] = str(new_route['id'])
cloned_routes_count += 1
# Clone shipments
cloned_shipments_count = 0
for shipment_data in historical_shipments:
# Skip if child tenant not in our map (e.g. if we have fewer children than template)
if shipment_data['child_tenant_id'] not in child_id_map:
continue
# Map route ID
new_route_id = None
if shipment_data['delivery_route_id'] in route_id_map:
new_route_id = route_id_map[shipment_data['delivery_route_id']]
# Create new shipment
await self.shipment_repository.create_shipment({
'tenant_id': parent_tenant_id,
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': child_id_map[shipment_data['child_tenant_id']],
'shipment_number': shipment_data['shipment_number'],
'shipment_date': shipment_data['shipment_date'],
'status': shipment_data['status'],
'total_weight_kg': shipment_data['total_weight_kg'],
'total_volume_m3': shipment_data['total_volume_m3'],
'delivery_route_id': new_route_id
})
cloned_shipments_count += 1
logger.info(f"Demo distribution setup completed: {cloned_routes_count} routes, {cloned_shipments_count} shipments cloned")
return {
"status": "completed",
"route_id": None, # No single route ID to return
"shipment_count": cloned_shipments_count,
"routes_count": cloned_routes_count,
"total_distance_km": 0, # Not calculating total for history
"session_id": session_id
}
except Exception as e:
logger.error(f"Error setting up demo distribution: {e}", exc_info=True)
raise
async def get_delivery_routes_for_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
"""
Get all delivery routes for a specific date and tenant
"""
routes = await self.route_repository.get_routes_by_date(tenant_id, target_date)
return routes
async def get_shipments_for_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
"""
Get all shipments for a specific date and tenant
"""
shipments = await self.shipment_repository.get_shipments_by_date(tenant_id, target_date)
return shipments
async def update_shipment_status(self, shipment_id: str, new_status: str, user_id: str, metadata: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
Update shipment status with audit trail
"""
updated_shipment = await self.shipment_repository.update_shipment_status(
shipment_id=shipment_id,
new_status=new_status,
user_id=user_id,
metadata=metadata
)
return updated_shipment
async def assign_shipments_to_route(self, route_id: str, shipment_ids: List[str], user_id: str) -> Dict[str, Any]:
"""
Assign multiple shipments to a specific route
"""
result = await self.shipment_repository.assign_shipments_to_route(
route_id=route_id,
shipment_ids=shipment_ids,
user_id=user_id
)
return result
async def create_delivery_schedule(self, schedule_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Create a delivery schedule for recurring deliveries between parent and child tenants
Args:
schedule_data: Dictionary containing schedule information:
- parent_tenant_id: UUID of parent tenant
- child_tenant_id: UUID of child tenant
- schedule_name: Human-readable name for the schedule
- delivery_days: Comma-separated days (e.g., "Mon,Wed,Fri")
- delivery_time: Time of day for delivery (HH:MM format)
- auto_generate_orders: Boolean, whether to auto-generate orders
- lead_time_days: Number of days lead time for orders
- is_active: Boolean, whether schedule is active
Returns:
Dictionary with created schedule information
"""
# Create schedule using repository
try:
# Ensure required fields are present
if "delivery_days" not in schedule_data:
schedule_data["delivery_days"] = "Mon,Wed,Fri"
if "delivery_time" not in schedule_data:
schedule_data["delivery_time"] = "09:00"
if "auto_generate_orders" not in schedule_data:
schedule_data["auto_generate_orders"] = True
if "lead_time_days" not in schedule_data:
schedule_data["lead_time_days"] = 1
if "is_active" not in schedule_data:
schedule_data["is_active"] = True
created_schedule = await self.schedule_repository.create_schedule(schedule_data)
logger.info(
f"Created delivery schedule {created_schedule.id} for parent {schedule_data.get('parent_tenant_id')} "
f"to child {schedule_data.get('child_tenant_id')}"
)
return created_schedule
except Exception as e:
logger.error(f"Error creating delivery schedule: {e}")
raise

View File

@@ -0,0 +1,457 @@
"""
Routing optimizer for the distribution service using Google OR-Tools VRP
"""
import logging
from typing import List, Dict, Any, Optional, Tuple
from datetime import datetime, timedelta
import time
# Google OR-Tools - Vehicle Routing Problem
try:
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
HAS_ORTOOLS = True
except ImportError:
print("Warning: OR-Tools not installed. Using fallback routing algorithm.")
HAS_ORTOOLS = False
logger = logging.getLogger(__name__)
class RoutingOptimizer:
"""
Vehicle Routing Problem optimizer using Google OR-Tools
"""
def __init__(self):
self.has_ortools = HAS_ORTOOLS
async def optimize_daily_routes(
self,
deliveries: List[Dict[str, Any]],
depot_location: Tuple[float, float],
vehicle_capacity_kg: Optional[float] = 1000.0,
time_limit_seconds: float = 30.0
) -> Dict[str, Any]:
"""
Optimize daily delivery routes using VRP
Args:
deliveries: List of delivery dictionaries with keys:
- id: str - delivery ID
- location: Tuple[float, float] - (lat, lng)
- weight_kg: float - weight of delivery
- time_window: Optional[Tuple[str, str]] - delivery time window
depot_location: Tuple[float, float] - depot location (lat, lng)
vehicle_capacity_kg: Maximum weight capacity per vehicle
time_limit_seconds: Time limit for optimization (timeout)
Returns:
Dict with optimized route sequences and metadata
"""
if not self.has_ortools:
logger.warning("OR-Tools not available, using fallback sequential routing")
return self._fallback_sequential_routing(deliveries, depot_location)
start_time = time.time()
try:
# Prepare data for VRP
locations = [depot_location] # Depot is first location (index 0)
demands = [0] # Depot has no demand
time_windows = [(0, 24*60)] # Depot available all day (in minutes from midnight)
delivery_mapping = {}
for i, delivery in enumerate(deliveries, 1):
locations.append(delivery['location'])
# Ensure demands are integers for OR-Tools compatibility
weight_kg = delivery.get('weight_kg', 0)
demands.append(int(weight_kg) if isinstance(weight_kg, (int, float)) else 0)
# Convert time windows to minutes from midnight
time_window = delivery.get('time_window', None)
if time_window:
start_time_str, end_time_str = time_window
start_minutes = self._time_to_minutes(start_time_str)
end_minutes = self._time_to_minutes(end_time_str)
time_windows.append((int(start_minutes), int(end_minutes)))
else:
time_windows.append((0, 24*60)) # Default to all day if no time window
delivery_mapping[i] = delivery['id']
# Check if we have no deliveries (only depot), return early with empty route
if len(locations) <= 1: # Only depot, no deliveries
logger.info("No deliveries to optimize, returning empty route")
return {
'routes': [],
'total_distance_km': 0,
'optimization_time_seconds': time.time() - start_time,
'algorithm_used': 'ortools_vrp',
'status': 'success'
}
# Calculate total demand first before checking it
total_demand = sum(demands)
# Check if total demand is 0 but we have deliveries - handle this case too
if total_demand == 0 and len(locations) > 1:
logger.info("Total demand is 0 but deliveries exist, returning simple route")
# Create simple route with all deliveries but no capacity constraints
simple_route = {
'route_number': 1,
'route_sequence': [delivery_mapping[i] for i in range(1, len(locations))],
'stops': [{
'stop_number': i,
'delivery_id': delivery_mapping.get(i, f"delivery_{i}"),
'sequence': i - 1
} for i in range(1, len(locations))],
'total_weight_kg': 0
}
return {
'routes': [simple_route],
'total_distance_km': 0,
'optimization_time_seconds': time.time() - start_time,
'algorithm_used': 'ortools_vrp_zero_demand',
'status': 'success'
}
# Calculate distance matrix using haversine formula
distance_matrix = self._calculate_distance_matrix(locations)
# Create VRP model
# Calculate required vehicles (total_demand already calculated above)
# Ensure at least 1 vehicle, and enough to cover demand plus buffer
min_vehicles = max(1, int(total_demand / vehicle_capacity_kg) + 1)
# Add a buffer vehicle just in case
num_vehicles = int(min_vehicles + 1)
logger.info(f"VRP Optimization: Demand={total_demand}kg, Capacity={vehicle_capacity_kg}kg, Vehicles={num_vehicles}")
# Create VRP model
manager = pywrapcp.RoutingIndexManager(
len(distance_matrix), # number of locations
num_vehicles, # number of vehicles
[0] * num_vehicles, # depot index for starts
[0] * num_vehicles # depot index for ends
)
routing = pywrapcp.RoutingModel(manager)
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return distance_matrix[from_node][to_node]
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add capacity constraint
def demand_callback(index):
"""Returns the demand of the node."""
node = manager.IndexToNode(index)
return int(demands[node]) # Ensure demands are integers
demand_callback_index = routing.RegisterUnaryTransitCallback(demand_callback)
routing.AddDimensionWithVehicleCapacity(
demand_callback_index,
0, # null capacity slack
[int(vehicle_capacity_kg)] * num_vehicles, # vehicle maximum capacities (as integers)
True, # start cumul to zero
'Capacity'
)
# Add time window constraint
def time_callback(from_index, to_index):
"""Returns the travel time between the two nodes."""
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
# Calculate travel time based on distance (meters) and assumed speed (km/h)
distance_m = distance_matrix[from_node][to_node]
distance_km = distance_m / 1000.0 # Convert meters to km
# Assume 30 km/h average speed for city deliveries
travel_time_minutes = (distance_km / 30.0) * 60.0
return int(travel_time_minutes)
time_callback_index = routing.RegisterTransitCallback(time_callback)
routing.AddDimension(
time_callback_index,
60 * 24, # Allow waiting time (24 hours in minutes)
60 * 24, # Maximum time per vehicle (24 hours in minutes)
False, # Don't force start cumul to zero
'Time'
)
time_dimension = routing.GetDimensionOrDie('Time')
# Add time window constraints for each location
for location_idx in range(len(locations)):
index = manager.NodeToIndex(location_idx)
if index != -1: # Valid index
min_time, max_time = time_windows[location_idx]
time_dimension.CumulVar(index).SetRange(int(min_time), int(max_time))
# Setting first solution heuristic
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC
)
search_parameters.time_limit.FromSeconds(time_limit_seconds)
# Solve the problem
solution = routing.SolveWithParameters(search_parameters)
# Check if solution was found
if solution:
optimized_routes = self._extract_routes(routing, manager, solution, delivery_mapping)
# Calculate total distance and duration
total_distance = 0
total_duration = 0
for route in optimized_routes:
route_distance = 0
for stop in route['stops']:
route_distance += stop.get('distance_to_next', 0)
route['total_distance_km'] = route_distance
total_distance += route_distance
logger.info(f"VRP optimization completed in {time.time() - start_time:.2f}s")
return {
'routes': optimized_routes,
'total_distance_km': total_distance,
'optimization_time_seconds': time.time() - start_time,
'algorithm_used': 'ortools_vrp',
'status': 'success'
}
else:
logger.warning("OR-Tools failed to find solution, using fallback routing")
return self._fallback_sequential_routing(deliveries, depot_location)
except Exception as e:
logger.error(f"Error in VRP optimization: {e}")
# Fallback to simple sequential routing
return self._fallback_sequential_routing(deliveries, depot_location)
def _calculate_distance_matrix(self, locations: List[Tuple[float, float]]) -> List[List[int]]:
"""
Calculate distance matrix using haversine formula (in meters)
"""
import math
def haversine_distance(lat1, lon1, lat2, lon2):
"""Calculate distance between two lat/lon points in meters"""
R = 6371000 # Earth's radius in meters
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
dlat = lat2 - lat1
dlon = lon2 - lon1
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
c = 2 * math.asin(math.sqrt(a))
return R * c # Distance in meters
n = len(locations)
matrix = [[0] * n for _ in range(n)]
for i in range(n):
for j in range(n):
if i != j:
lat1, lon1 = locations[i]
lat2, lon2 = locations[j]
dist_m = haversine_distance(lat1, lon1, lat2, lon2)
matrix[i][j] = int(dist_m)
return matrix
def _extract_routes(self, routing, manager, solution, delivery_mapping) -> List[Dict[str, Any]]:
"""
Extract routes from OR-Tools solution
"""
routes = []
for vehicle_id in range(manager.GetNumberOfVehicles()):
index = routing.Start(vehicle_id)
# Skip if vehicle is not used (Start -> End directly)
if routing.IsEnd(solution.Value(routing.NextVar(index))):
continue
current_route = {
'route_number': vehicle_id + 1,
'stops': [],
'total_weight_kg': 0
}
# Initialize route sequence to store the delivery IDs in visit order
route_sequence = []
# Add depot as first stop
node_index = manager.IndexToNode(index)
delivery_id = delivery_mapping.get(node_index, f"depot_{node_index}")
current_route['stops'].append({
'stop_number': 1,
'delivery_id': delivery_id,
'location': 'depot',
'sequence': 0
})
stop_number = 1
while not routing.IsEnd(index):
index = solution.Value(routing.NextVar(index))
node_index = manager.IndexToNode(index)
if node_index != 0: # Not depot
stop_number += 1
delivery_id = delivery_mapping.get(node_index, f"delivery_{node_index}")
current_route['stops'].append({
'stop_number': stop_number,
'delivery_id': delivery_id,
'location_index': node_index,
'sequence': stop_number
})
# Add delivery ID to route sequence (excluding depot stops)
route_sequence.append(delivery_id)
else: # Back to depot
stop_number += 1
current_route['stops'].append({
'stop_number': stop_number,
'delivery_id': f"depot_end_{vehicle_id + 1}",
'location': 'depot',
'sequence': stop_number
})
break
# Add the route_sequence to the current route
current_route['route_sequence'] = route_sequence
routes.append(current_route)
return routes
def _time_to_minutes(self, time_str: str) -> int:
"""
Convert HH:MM string to minutes from midnight
"""
if ":" in time_str:
hour, minute = map(int, time_str.split(":"))
return hour * 60 + minute
else:
# If it's already in minutes, return as is
return int(time_str)
def _fallback_sequential_routing(self, deliveries: List[Dict[str, Any]], depot_location: Tuple[float, float]) -> Dict[str, Any]:
"""
Fallback routing algorithm that sequences deliveries sequentially
"""
import math
def haversine_distance(lat1, lon1, lat2, lon2):
"""Calculate distance between two lat/lon points in km"""
R = 6371 # Earth's radius in km
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
dlat = lat2 - lat1
dlon = lon2 - lon1
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
c = 2 * math.asin(math.sqrt(a))
return R * c # Distance in km
# Calculate distances from depot to each delivery and between deliveries
deliveries_with_distance = []
for delivery in deliveries:
lat, lon = delivery['location']
depot_lat, depot_lon = depot_location
dist = haversine_distance(depot_lat, depot_lon, lat, lon)
deliveries_with_distance.append({
**delivery,
'distance_from_depot': dist
})
# Sort deliveries by distance from depot (nearest first)
deliveries_with_distance.sort(key=lambda x: x['distance_from_depot'])
# Create simple route
route_stops = []
total_distance = 0
# Start from depot
route_stops.append({
'stop_number': 1,
'delivery_id': 'depot_start',
'location': depot_location,
'sequence': 0,
'is_depot': True
})
# Add deliveries
for i, delivery in enumerate(deliveries_with_distance, 1):
route_stops.append({
'stop_number': i + 1,
'delivery_id': delivery['id'],
'location': delivery['location'],
'weight_kg': delivery.get('weight_kg', 0),
'sequence': i,
'is_depot': False
})
# Return to depot
route_stops.append({
'stop_number': len(deliveries_with_distance) + 2,
'delivery_id': 'depot_end',
'location': depot_location,
'sequence': len(deliveries_with_distance) + 1,
'is_depot': True
})
# Calculate total distance
for i in range(len(route_stops) - 1):
current_stop = route_stops[i]
next_stop = route_stops[i + 1]
if not current_stop['is_depot'] or not next_stop['is_depot']:
if not current_stop['is_depot'] and not next_stop['is_depot']:
# Between two deliveries
curr_lat, curr_lon = current_stop['location']
next_lat, next_lon = next_stop['location']
dist = haversine_distance(curr_lat, curr_lon, next_lat, next_lon)
elif current_stop['is_depot'] and not next_stop['is_depot']:
# From depot to delivery
depot_lat, depot_lon = current_stop['location']
del_lat, del_lon = next_stop['location']
dist = haversine_distance(depot_lat, depot_lon, del_lat, del_lon)
elif not current_stop['is_depot'] and next_stop['is_depot']:
# From delivery to depot
del_lat, del_lon = current_stop['location']
depot_lat, depot_lon = next_stop['location']
dist = haversine_distance(del_lat, del_lon, depot_lat, depot_lon)
else:
dist = 0 # depot to depot
total_distance += dist
route_stops[i]['distance_to_next'] = dist
# Create route sequence from delivery IDs in the order they appear
route_sequence = [stop['delivery_id'] for stop in route_stops if not stop.get('is_depot', False)]
return {
'routes': [{
'route_number': 1,
'stops': route_stops,
'route_sequence': route_sequence,
'total_distance_km': total_distance,
'total_weight_kg': sum(d.get('weight_kg', 0) for d in deliveries),
}],
'total_distance_km': total_distance,
'optimization_time_seconds': 0,
'algorithm_used': 'fallback_sequential',
'status': 'success'
}