New alert service
This commit is contained in:
@@ -4,10 +4,12 @@ Handles internal demo setup for enterprise tier
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from typing import Dict, Any, List
|
||||
from typing import Dict, Any, List, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import json
|
||||
import time
|
||||
|
||||
from app.services.distribution_service import DistributionService
|
||||
from app.api.dependencies import get_distribution_service
|
||||
@@ -26,318 +28,9 @@ async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/internal/demo/setup")
|
||||
async def setup_demo_distribution(
|
||||
setup_request: dict, # Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
distribution_service: DistributionService = Depends(get_distribution_service),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to setup distribution for enterprise demo
|
||||
|
||||
Args:
|
||||
setup_request: Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
"""
|
||||
try:
|
||||
parent_tenant_id = setup_request.get('parent_tenant_id')
|
||||
child_tenant_ids = setup_request.get('child_tenant_ids', [])
|
||||
session_id = setup_request.get('session_id')
|
||||
|
||||
if not all([parent_tenant_id, child_tenant_ids, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: parent_tenant_id, child_tenant_ids, session_id"
|
||||
)
|
||||
|
||||
logger.info("Setting up demo distribution",
|
||||
parent=parent_tenant_id,
|
||||
children=child_tenant_ids,
|
||||
session_id=session_id)
|
||||
|
||||
# Get locations for parent and children to set up delivery routes
|
||||
parent_locations_response = await distribution_service.tenant_client.get_tenant_locations(parent_tenant_id)
|
||||
|
||||
# Check if parent_locations_response is None (which happens when the API call fails)
|
||||
if not parent_locations_response:
|
||||
logger.warning(f"No locations found for parent tenant {parent_tenant_id}")
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"No locations found for parent tenant {parent_tenant_id}. "
|
||||
f"Ensure the tenant exists and has locations configured."
|
||||
)
|
||||
|
||||
# Extract the actual locations array from the response object
|
||||
# The response format is {"locations": [...], "total": N}
|
||||
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
|
||||
|
||||
# Look for central production or warehouse location as fallback
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'warehouse'), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('central')), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('main')), None)
|
||||
|
||||
# If no specific central location found, use first available location
|
||||
if not parent_location and parent_locations:
|
||||
parent_location = parent_locations[0]
|
||||
logger.warning(f"No central production location found for parent tenant {parent_tenant_id}, using first location: {parent_location.get('name', 'unnamed')}")
|
||||
|
||||
# BUG-013 FIX: Use HTTPException instead of ValueError
|
||||
if not parent_location:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"No location found for parent tenant {parent_tenant_id} to use as distribution center. "
|
||||
f"Ensure the parent tenant has at least one location configured."
|
||||
)
|
||||
|
||||
# Create delivery schedules for each child
|
||||
for child_id in child_tenant_ids:
|
||||
try:
|
||||
child_locations_response = await distribution_service.tenant_client.get_tenant_locations(child_id)
|
||||
|
||||
# Check if child_locations_response is None (which happens when the API call fails)
|
||||
if not child_locations_response:
|
||||
logger.warning(f"No locations found for child tenant {child_id}")
|
||||
continue # Skip this child tenant and continue with the next one
|
||||
|
||||
# Extract the actual locations array from the response object
|
||||
# The response format is {"locations": [...], "total": N}
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
|
||||
# Look for retail outlet or store location as first choice
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
|
||||
|
||||
# If no specific retail location found, use first available location
|
||||
if not child_location and child_locations:
|
||||
child_location = child_locations[0]
|
||||
logger.warning(f"No retail outlet location found for child tenant {child_id}, using first location: {child_location.get('name', 'unnamed')}")
|
||||
|
||||
if not child_location:
|
||||
logger.warning(f"No location found for child tenant {child_id}")
|
||||
continue
|
||||
|
||||
# Create delivery schedule
|
||||
schedule_data = {
|
||||
'tenant_id': child_id, # The child tenant that will receive deliveries
|
||||
'target_parent_tenant_id': parent_tenant_id, # The parent tenant that supplies
|
||||
'target_child_tenant_ids': [child_id], # Array of child tenant IDs in this schedule
|
||||
'name': f"Demo Schedule: {child_location.get('name', f'Child {child_id}')}",
|
||||
'delivery_days': "Mon,Wed,Fri", # Tri-weekly delivery
|
||||
'delivery_time': "09:00", # Morning delivery
|
||||
'auto_generate_orders': True,
|
||||
'lead_time_days': 1,
|
||||
'is_active': True,
|
||||
'created_by': parent_tenant_id, # BUG FIX: Add required created_by field
|
||||
'updated_by': parent_tenant_id # BUG FIX: Add required updated_by field
|
||||
}
|
||||
|
||||
# Create the delivery schedule record
|
||||
schedule = await distribution_service.create_delivery_schedule(schedule_data)
|
||||
logger.info(f"Created delivery schedule for {parent_tenant_id} to {child_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating delivery schedule for child {child_id}: {e}", exc_info=True)
|
||||
continue # Continue with the next child
|
||||
|
||||
# BUG-012 FIX: Use demo reference date instead of actual today
|
||||
from datetime import date
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
# Get demo reference date from session metadata if available
|
||||
session_metadata = setup_request.get('session_metadata', {})
|
||||
session_created_at = session_metadata.get('session_created_at')
|
||||
|
||||
if session_created_at:
|
||||
# Use the BASE_REFERENCE_DATE for consistent demo data dating
|
||||
# All demo data is anchored to this date (November 25, 2025)
|
||||
demo_today = BASE_REFERENCE_DATE
|
||||
logger.info(f"Using demo reference date: {demo_today}")
|
||||
else:
|
||||
# Fallback to today if no session metadata (shouldn't happen in production)
|
||||
demo_today = date.today()
|
||||
logger.warning(f"No session_created_at in metadata, using today: {demo_today}")
|
||||
|
||||
delivery_data = []
|
||||
|
||||
# Prepare delivery information for each child
|
||||
for child_id in child_tenant_ids:
|
||||
try:
|
||||
child_locations_response = await distribution_service.tenant_client.get_tenant_locations(child_id)
|
||||
|
||||
# Check if child_locations_response is None (which happens when the API call fails)
|
||||
if not child_locations_response:
|
||||
logger.warning(f"No locations found for child delivery {child_id}")
|
||||
continue # Skip this child tenant and continue with the next one
|
||||
|
||||
# Extract the actual locations array from the response object
|
||||
# The response format is {"locations": [...], "total": N}
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
|
||||
# Look for retail outlet or store location as first choice
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
|
||||
|
||||
# If no specific retail location found, use first available location
|
||||
if not child_location and child_locations:
|
||||
child_location = child_locations[0]
|
||||
logger.warning(f"No retail outlet location found for child delivery {child_id}, using first location: {child_location.get('name', 'unnamed')}")
|
||||
|
||||
if child_location:
|
||||
# Ensure we have valid coordinates
|
||||
latitude = child_location.get('latitude')
|
||||
longitude = child_location.get('longitude')
|
||||
|
||||
if latitude is not None and longitude is not None:
|
||||
try:
|
||||
lat = float(latitude)
|
||||
lng = float(longitude)
|
||||
delivery_data.append({
|
||||
'id': f"demo_delivery_{child_id}",
|
||||
'child_tenant_id': child_id,
|
||||
'location': (lat, lng),
|
||||
'weight_kg': 150.0, # Fixed weight for demo
|
||||
'po_id': f"demo_po_{child_id}", # Would be actual PO ID in real implementation
|
||||
'items_count': 20
|
||||
})
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"Invalid coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
|
||||
else:
|
||||
logger.warning(f"Missing coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
|
||||
else:
|
||||
logger.warning(f"No location found for child delivery {child_id}, skipping")
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
|
||||
|
||||
# Optimize routes using VRP - ensure we have valid coordinates
|
||||
parent_latitude = parent_location.get('latitude')
|
||||
parent_longitude = parent_location.get('longitude')
|
||||
|
||||
# BUG-013 FIX: Use HTTPException for coordinate validation errors
|
||||
if parent_latitude is None or parent_longitude is None:
|
||||
logger.error(f"Missing coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}")
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Parent location {parent_tenant_id} missing coordinates. "
|
||||
f"Latitude and longitude must be provided for distribution planning."
|
||||
)
|
||||
|
||||
try:
|
||||
depot_location = (float(parent_latitude), float(parent_longitude))
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.error(f"Invalid coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}, error: {e}")
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Parent location {parent_tenant_id} has invalid coordinates: {e}"
|
||||
)
|
||||
|
||||
optimization_result = await distribution_service.routing_optimizer.optimize_daily_routes(
|
||||
deliveries=delivery_data,
|
||||
depot_location=depot_location,
|
||||
vehicle_capacity_kg=1000.0 # Standard vehicle capacity
|
||||
)
|
||||
|
||||
# BUG-012 FIX: Create the delivery route using demo reference date
|
||||
routes = optimization_result.get('routes', [])
|
||||
route_sequence = routes[0].get('route_sequence', []) if routes else []
|
||||
|
||||
# Use session_id suffix to ensure unique route numbers for concurrent demo sessions
|
||||
session_suffix = session_id.split('_')[-1][:8] if session_id else '001'
|
||||
route = await distribution_service.route_repository.create_route({
|
||||
'tenant_id': uuid.UUID(parent_tenant_id),
|
||||
'route_number': f"DEMO-{demo_today.strftime('%Y%m%d')}-{session_suffix}",
|
||||
'route_date': datetime.combine(demo_today, datetime.min.time()),
|
||||
'total_distance_km': optimization_result.get('total_distance_km', 0),
|
||||
'estimated_duration_minutes': optimization_result.get('estimated_duration_minutes', 0),
|
||||
'route_sequence': route_sequence,
|
||||
'status': 'planned'
|
||||
})
|
||||
|
||||
# BUG-012 FIX: Create shipment records using demo reference date
|
||||
# Use session_id suffix to ensure unique shipment numbers
|
||||
shipments = []
|
||||
for idx, delivery in enumerate(delivery_data):
|
||||
shipment = await distribution_service.shipment_repository.create_shipment({
|
||||
'tenant_id': uuid.UUID(parent_tenant_id),
|
||||
'parent_tenant_id': uuid.UUID(parent_tenant_id),
|
||||
'child_tenant_id': uuid.UUID(delivery['child_tenant_id']),
|
||||
'shipment_number': f"DEMOSHP-{demo_today.strftime('%Y%m%d')}-{session_suffix}-{idx+1:03d}",
|
||||
'shipment_date': datetime.combine(demo_today, datetime.min.time()),
|
||||
'status': 'pending',
|
||||
'total_weight_kg': delivery['weight_kg']
|
||||
})
|
||||
shipments.append(shipment)
|
||||
|
||||
logger.info(f"Demo distribution setup completed: 1 route, {len(shipments)} shipments")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"route_id": str(route['id']),
|
||||
"shipment_count": len(shipments),
|
||||
"total_distance_km": optimization_result.get('total_distance_km', 0),
|
||||
"session_id": session_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up demo distribution: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to setup demo distribution: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/internal/demo/cleanup")
|
||||
async def cleanup_demo_distribution(
|
||||
cleanup_request: dict, # Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
distribution_service: DistributionService = Depends(get_distribution_service),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to cleanup distribution data for enterprise demo
|
||||
|
||||
Args:
|
||||
cleanup_request: Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
"""
|
||||
try:
|
||||
parent_tenant_id = cleanup_request.get('parent_tenant_id')
|
||||
child_tenant_ids = cleanup_request.get('child_tenant_ids', [])
|
||||
session_id = cleanup_request.get('session_id')
|
||||
|
||||
if not all([parent_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: parent_tenant_id, session_id"
|
||||
)
|
||||
|
||||
logger.info("Cleaning up demo distribution",
|
||||
parent=parent_tenant_id,
|
||||
session_id=session_id)
|
||||
|
||||
# Delete all demo routes and shipments for this parent tenant
|
||||
deleted_routes_count = await distribution_service.route_repository.delete_demo_routes_for_tenant(
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
deleted_shipments_count = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
logger.info(f"Demo distribution cleanup completed: {deleted_routes_count} routes, {deleted_shipments_count} shipments deleted")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"routes_deleted": deleted_routes_count,
|
||||
"shipments_deleted": deleted_shipments_count,
|
||||
"session_id": session_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up demo distribution: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to cleanup demo distribution: {str(e)}")
|
||||
# Legacy /internal/demo/setup and /internal/demo/cleanup endpoints removed
|
||||
# Distribution now uses the standard /internal/demo/clone pattern like all other services
|
||||
# Data is cloned from base template tenants via DataCloner
|
||||
|
||||
|
||||
@router.get("/internal/health")
|
||||
@@ -357,64 +50,301 @@ async def internal_health_check(
|
||||
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data(
|
||||
clone_request: dict,
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
session_metadata: Optional[str] = None,
|
||||
distribution_service: DistributionService = Depends(get_distribution_service),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Clone/Setup distribution data for a virtual demo tenant
|
||||
|
||||
Clone distribution data from base tenant to virtual tenant
|
||||
|
||||
This follows the standard cloning pattern used by other services:
|
||||
1. Query base tenant data (routes, shipments, schedules)
|
||||
2. Clone to virtual tenant with ID substitution and date adjustment
|
||||
3. Return records cloned count
|
||||
|
||||
Args:
|
||||
clone_request: Contains base_tenant_id, virtual_tenant_id, session_id, demo_account_type
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
"""
|
||||
try:
|
||||
virtual_tenant_id = clone_request.get('virtual_tenant_id')
|
||||
session_id = clone_request.get('session_id')
|
||||
|
||||
if not all([virtual_tenant_id, session_id]):
|
||||
if not all([base_tenant_id, virtual_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: virtual_tenant_id, session_id"
|
||||
status_code=400,
|
||||
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
|
||||
)
|
||||
|
||||
logger.info("Cloning distribution data",
|
||||
logger.info("Cloning distribution data from base tenant",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
session_id=session_id)
|
||||
|
||||
# 1. Fetch child tenants for the new virtual parent
|
||||
child_tenants = await distribution_service.tenant_client.get_child_tenants(virtual_tenant_id)
|
||||
|
||||
if not child_tenants:
|
||||
logger.warning(f"No child tenants found for virtual parent {virtual_tenant_id}, skipping distribution setup")
|
||||
return {
|
||||
"status": "skipped",
|
||||
"reason": "no_child_tenants",
|
||||
"virtual_tenant_id": virtual_tenant_id
|
||||
}
|
||||
|
||||
child_tenant_ids = [child['id'] for child in child_tenants]
|
||||
|
||||
# 2. Call existing setup logic
|
||||
result = await distribution_service.setup_demo_enterprise_distribution(
|
||||
parent_tenant_id=virtual_tenant_id,
|
||||
child_tenant_ids=child_tenant_ids,
|
||||
session_id=session_id
|
||||
# Clean up any existing demo data for this virtual tenant to prevent conflicts
|
||||
logger.info("Cleaning up existing demo data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(virtual_tenant_id)
|
||||
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(virtual_tenant_id)
|
||||
|
||||
if deleted_routes > 0 or deleted_shipments > 0:
|
||||
logger.info("Cleaned up existing demo data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
deleted_routes=deleted_routes,
|
||||
deleted_shipments=deleted_shipments)
|
||||
|
||||
# Generate a single timestamp suffix for this cloning operation to ensure uniqueness
|
||||
timestamp_suffix = str(int(time.time()))[-6:] # Last 6 digits of timestamp
|
||||
|
||||
# Parse session creation date for date adjustment
|
||||
from datetime import date, datetime, timezone
|
||||
from dateutil import parser as date_parser
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE, adjust_date_for_demo
|
||||
|
||||
if session_created_at:
|
||||
if isinstance(session_created_at, str):
|
||||
session_dt = date_parser.parse(session_created_at)
|
||||
else:
|
||||
session_dt = session_created_at
|
||||
else:
|
||||
session_dt = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_metadata to extract child tenant mappings for enterprise demos
|
||||
child_tenant_id_map = {}
|
||||
if session_metadata:
|
||||
try:
|
||||
metadata_dict = json.loads(session_metadata)
|
||||
child_configs = metadata_dict.get("child_configs", [])
|
||||
child_tenant_ids = metadata_dict.get("child_tenant_ids", [])
|
||||
|
||||
# Build mapping: base_child_id -> virtual_child_id
|
||||
for idx, child_config in enumerate(child_configs):
|
||||
if idx < len(child_tenant_ids):
|
||||
base_child_id = child_config.get("base_tenant_id")
|
||||
virtual_child_id = child_tenant_ids[idx]
|
||||
if base_child_id and virtual_child_id:
|
||||
child_tenant_id_map[base_child_id] = virtual_child_id
|
||||
|
||||
logger.info(
|
||||
"Built child tenant ID mapping for enterprise demo",
|
||||
mapping_count=len(child_tenant_id_map),
|
||||
session_id=session_id,
|
||||
mappings=child_tenant_id_map
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to parse session_metadata", error=str(e), session_id=session_id)
|
||||
|
||||
# Clone delivery routes from base tenant
|
||||
base_routes = await distribution_service.route_repository.get_all_routes_for_tenant(base_tenant_id)
|
||||
|
||||
routes_cloned = 0
|
||||
route_id_map = {} # Map old route IDs to new route IDs
|
||||
|
||||
for base_route in base_routes:
|
||||
# Adjust route_date relative to session creation
|
||||
adjusted_route_date = adjust_date_for_demo(
|
||||
base_route.get('route_date'),
|
||||
session_dt,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
# Map child tenant IDs in route_sequence
|
||||
route_sequence = base_route.get('route_sequence', [])
|
||||
if child_tenant_id_map and route_sequence:
|
||||
mapped_sequence = []
|
||||
for stop in route_sequence:
|
||||
if isinstance(stop, dict) and 'child_tenant_id' in stop:
|
||||
base_child_id = str(stop['child_tenant_id'])
|
||||
if base_child_id in child_tenant_id_map:
|
||||
stop = {**stop, 'child_tenant_id': child_tenant_id_map[base_child_id]}
|
||||
logger.debug(
|
||||
"Mapped child_tenant_id in route_sequence",
|
||||
base_child_id=base_child_id,
|
||||
virtual_child_id=child_tenant_id_map[base_child_id],
|
||||
session_id=session_id
|
||||
)
|
||||
mapped_sequence.append(stop)
|
||||
route_sequence = mapped_sequence
|
||||
|
||||
# Generate unique route number for the virtual tenant to avoid duplicates
|
||||
base_route_number = base_route.get('route_number')
|
||||
if base_route_number and base_route_number.startswith('DEMO-'):
|
||||
# For demo routes, append the virtual tenant ID to ensure uniqueness
|
||||
# Use more characters from UUID and include a timestamp component to reduce collision risk
|
||||
# Handle both string and UUID inputs for virtual_tenant_id
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
|
||||
except (ValueError, TypeError):
|
||||
# If it's already a UUID object, use it directly
|
||||
tenant_uuid = virtual_tenant_id
|
||||
# Use more characters to make it more unique
|
||||
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
|
||||
# Use the single timestamp suffix generated at the start of the operation
|
||||
route_number = f"{base_route_number}-{tenant_suffix}-{timestamp_suffix}"
|
||||
else:
|
||||
# For non-demo routes, use original route number
|
||||
route_number = base_route_number
|
||||
|
||||
new_route = await distribution_service.route_repository.create_route({
|
||||
'tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'route_number': route_number,
|
||||
'route_date': adjusted_route_date,
|
||||
'vehicle_id': base_route.get('vehicle_id'),
|
||||
'driver_id': base_route.get('driver_id'),
|
||||
'total_distance_km': base_route.get('total_distance_km'),
|
||||
'estimated_duration_minutes': base_route.get('estimated_duration_minutes'),
|
||||
'route_sequence': route_sequence,
|
||||
'status': base_route.get('status')
|
||||
})
|
||||
routes_cloned += 1
|
||||
|
||||
# Map old route ID to the new route ID returned by the repository
|
||||
route_id_map[base_route.get('id')] = new_route['id']
|
||||
|
||||
# Clone shipments from base tenant
|
||||
base_shipments = await distribution_service.shipment_repository.get_all_shipments_for_tenant(base_tenant_id)
|
||||
|
||||
shipments_cloned = 0
|
||||
for base_shipment in base_shipments:
|
||||
# Adjust shipment_date relative to session creation
|
||||
adjusted_shipment_date = adjust_date_for_demo(
|
||||
base_shipment.get('shipment_date'),
|
||||
session_dt,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
# Map delivery_route_id to new route ID
|
||||
old_route_id = base_shipment.get('delivery_route_id')
|
||||
new_route_id = route_id_map.get(old_route_id) if old_route_id else None
|
||||
|
||||
# Generate unique shipment number for the virtual tenant to avoid duplicates
|
||||
base_shipment_number = base_shipment.get('shipment_number')
|
||||
if base_shipment_number and base_shipment_number.startswith('DEMO'):
|
||||
# For demo shipments, append the virtual tenant ID to ensure uniqueness
|
||||
# Use more characters from UUID and include a timestamp component to reduce collision risk
|
||||
# Handle both string and UUID inputs for virtual_tenant_id
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
|
||||
except (ValueError, TypeError):
|
||||
# If it's already a UUID object, use it directly
|
||||
tenant_uuid = virtual_tenant_id
|
||||
# Use more characters to make it more unique
|
||||
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
|
||||
# Use the single timestamp suffix generated at the start of the operation
|
||||
shipment_number = f"{base_shipment_number}-{tenant_suffix}-{timestamp_suffix}"
|
||||
else:
|
||||
# For non-demo shipments, use original shipment number
|
||||
shipment_number = base_shipment_number
|
||||
|
||||
# Map child_tenant_id to virtual child ID (THE KEY FIX)
|
||||
base_child_id = base_shipment.get('child_tenant_id')
|
||||
virtual_child_id = None
|
||||
if base_child_id:
|
||||
base_child_id_str = str(base_child_id)
|
||||
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
|
||||
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
|
||||
logger.debug(
|
||||
"Mapped child tenant ID for shipment",
|
||||
base_child_id=base_child_id_str,
|
||||
virtual_child_id=str(virtual_child_id),
|
||||
shipment_number=shipment_number,
|
||||
session_id=session_id
|
||||
)
|
||||
else:
|
||||
virtual_child_id = base_child_id # Fallback to original
|
||||
else:
|
||||
virtual_child_id = None
|
||||
|
||||
new_shipment = await distribution_service.shipment_repository.create_shipment({
|
||||
'id': uuid.uuid4(),
|
||||
'tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
|
||||
'delivery_route_id': new_route_id,
|
||||
'shipment_number': shipment_number,
|
||||
'shipment_date': adjusted_shipment_date,
|
||||
'status': base_shipment.get('status'),
|
||||
'total_weight_kg': base_shipment.get('total_weight_kg'),
|
||||
'total_volume_m3': base_shipment.get('total_volume_m3'),
|
||||
'delivery_notes': base_shipment.get('delivery_notes')
|
||||
})
|
||||
shipments_cloned += 1
|
||||
|
||||
# Clone delivery schedules from base tenant
|
||||
base_schedules = await distribution_service.schedule_repository.get_schedules_by_tenant(base_tenant_id)
|
||||
|
||||
schedules_cloned = 0
|
||||
for base_schedule in base_schedules:
|
||||
# Map child_tenant_id to virtual child ID
|
||||
base_child_id = base_schedule.get('child_tenant_id')
|
||||
virtual_child_id = None
|
||||
if base_child_id:
|
||||
base_child_id_str = str(base_child_id)
|
||||
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
|
||||
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
|
||||
logger.debug(
|
||||
"Mapped child tenant ID for delivery schedule",
|
||||
base_child_id=base_child_id_str,
|
||||
virtual_child_id=str(virtual_child_id),
|
||||
session_id=session_id
|
||||
)
|
||||
else:
|
||||
virtual_child_id = base_child_id # Fallback to original
|
||||
else:
|
||||
virtual_child_id = None
|
||||
|
||||
new_schedule = await distribution_service.schedule_repository.create_schedule({
|
||||
'id': uuid.uuid4(),
|
||||
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
|
||||
'schedule_name': base_schedule.get('schedule_name'),
|
||||
'delivery_days': base_schedule.get('delivery_days'),
|
||||
'delivery_time': base_schedule.get('delivery_time'),
|
||||
'auto_generate_orders': base_schedule.get('auto_generate_orders'),
|
||||
'lead_time_days': base_schedule.get('lead_time_days'),
|
||||
'is_active': base_schedule.get('is_active')
|
||||
})
|
||||
schedules_cloned += 1
|
||||
|
||||
total_records = routes_cloned + shipments_cloned + schedules_cloned
|
||||
|
||||
logger.info(
|
||||
"Distribution cloning completed successfully",
|
||||
session_id=session_id,
|
||||
routes_cloned=routes_cloned,
|
||||
shipments_cloned=shipments_cloned,
|
||||
schedules_cloned=schedules_cloned,
|
||||
total_records=total_records,
|
||||
child_mappings_applied=len(child_tenant_id_map),
|
||||
is_enterprise=len(child_tenant_id_map) > 0
|
||||
)
|
||||
|
||||
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "completed",
|
||||
"records_cloned": result.get('shipment_count', 0) + 1, # shipments + 1 route
|
||||
"details": result
|
||||
"records_cloned": total_records,
|
||||
"routes_cloned": routes_cloned,
|
||||
"shipments_cloned": shipments_cloned,
|
||||
"schedules_cloned": schedules_cloned
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cloning distribution data: {e}", exc_info=True)
|
||||
# Don't fail the entire cloning process if distribution fails
|
||||
# Don't fail the entire cloning process if distribution fails, but add more context
|
||||
error_msg = f"Distribution cloning failed: {str(e)}"
|
||||
logger.warning(f"Distribution cloning partially failed but continuing: {error_msg}")
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "failed",
|
||||
"error": str(e)
|
||||
"error": error_msg,
|
||||
"records_cloned": 0,
|
||||
"routes_cloned": 0,
|
||||
"shipments_cloned": 0,
|
||||
"schedules_cloned": 0
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -9,11 +9,15 @@ import structlog
|
||||
import os
|
||||
|
||||
from app.api.dependencies import get_distribution_service
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.auth.tenant_access import verify_tenant_access_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize route builder for distribution service
|
||||
route_builder = RouteBuilder('distribution')
|
||||
|
||||
|
||||
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
@@ -27,13 +31,13 @@ async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/distribution/plans/generate")
|
||||
@router.post(route_builder.build_base_route("plans/generate"))
|
||||
async def generate_daily_distribution_plan(
|
||||
tenant_id: str,
|
||||
target_date: date = Query(..., description="Date for which to generate distribution plan"),
|
||||
vehicle_capacity_kg: float = Query(1000.0, description="Vehicle capacity in kg"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Generate daily distribution plan for internal transfers
|
||||
@@ -75,14 +79,14 @@ async def generate_daily_distribution_plan(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate distribution plan: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/routes")
|
||||
@router.get(route_builder.build_base_route("routes"))
|
||||
async def get_delivery_routes(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for route filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for route filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by route status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get delivery routes with optional filtering
|
||||
@@ -111,97 +115,16 @@ async def get_delivery_routes(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get delivery routes: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/shipments")
|
||||
async def get_shipments(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by shipment status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get shipments with optional filtering
|
||||
"""
|
||||
try:
|
||||
# If no date range specified, default to today
|
||||
if not date_from and not date_to:
|
||||
date_from = date.today()
|
||||
date_to = date.today()
|
||||
elif not date_to:
|
||||
date_to = date_from
|
||||
|
||||
shipments = []
|
||||
current_date = date_from
|
||||
while current_date <= date_to:
|
||||
daily_shipments = await distribution_service.get_shipments_for_date(tenant_id, current_date)
|
||||
shipments.extend(daily_shipments)
|
||||
current_date = current_date + timedelta(days=1)
|
||||
|
||||
if status:
|
||||
shipments = [s for s in shipments if s.get('status') == status]
|
||||
|
||||
return {"shipments": shipments}
|
||||
except Exception as e:
|
||||
logger.error("Error getting shipments", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/status")
|
||||
async def update_shipment_status(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
status_update: dict, # Should be a Pydantic model in production
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Update shipment status
|
||||
"""
|
||||
try:
|
||||
new_status = status_update.get('status')
|
||||
if not new_status:
|
||||
raise HTTPException(status_code=400, detail="Status is required")
|
||||
|
||||
user_id = "temp_user" # Would come from auth context
|
||||
result = await distribution_service.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status=new_status,
|
||||
user_id=user_id,
|
||||
metadata=status_update.get('metadata')
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating shipment status", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/delivery-proof")
|
||||
async def upload_delivery_proof(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
delivery_proof: dict, # Should be a Pydantic model in production
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Upload delivery proof (signature, photo, etc.)
|
||||
"""
|
||||
try:
|
||||
# Implementation would handle signature/photo upload
|
||||
# This is a placeholder until proper models are created
|
||||
raise HTTPException(status_code=501, detail="Delivery proof upload endpoint not yet implemented")
|
||||
except Exception as e:
|
||||
logger.error("Error uploading delivery proof", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/routes/{route_id}")
|
||||
@router.get(route_builder.build_base_route("routes/{route_id}"))
|
||||
async def get_route_detail(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get delivery route details
|
||||
|
||||
@@ -7,19 +7,23 @@ from typing import List, Optional
|
||||
from datetime import date, timedelta
|
||||
|
||||
from app.api.dependencies import get_distribution_service
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.auth.tenant_access import verify_tenant_access_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Initialize route builder for distribution service
|
||||
route_builder = RouteBuilder('distribution')
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/shipments")
|
||||
|
||||
@router.get(route_builder.build_base_route("shipments"))
|
||||
async def get_shipments(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by shipment status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
List shipments with optional filtering
|
||||
@@ -47,13 +51,13 @@ async def get_shipments(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/status")
|
||||
@router.put(route_builder.build_base_route("shipments/{shipment_id}/status"))
|
||||
async def update_shipment_status(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
status_update: dict, # Should be a proper Pydantic model
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Update shipment status
|
||||
@@ -75,38 +79,88 @@ async def update_shipment_status(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/delivery-proof")
|
||||
@router.post(route_builder.build_base_route("shipments/{shipment_id}/delivery-proof"))
|
||||
async def upload_delivery_proof(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
delivery_proof: dict, # Should be a proper Pydantic model
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Upload delivery proof (signature, photo, etc.)
|
||||
|
||||
Expected delivery_proof fields:
|
||||
- signature: Base64 encoded signature image or signature data
|
||||
- photo_url: URL to uploaded delivery photo
|
||||
- received_by_name: Name of person who received delivery
|
||||
- delivery_notes: Optional notes about delivery
|
||||
"""
|
||||
try:
|
||||
# Implementation would handle signature/photo upload
|
||||
# This is a placeholder until proper models are created
|
||||
raise HTTPException(status_code=501, detail="Delivery proof upload endpoint not yet implemented")
|
||||
user_id = "temp_user_id" # Would come from auth context
|
||||
|
||||
# Prepare metadata for shipment update
|
||||
metadata = {}
|
||||
if 'signature' in delivery_proof:
|
||||
metadata['signature'] = delivery_proof['signature']
|
||||
if 'photo_url' in delivery_proof:
|
||||
metadata['photo_url'] = delivery_proof['photo_url']
|
||||
if 'received_by_name' in delivery_proof:
|
||||
metadata['received_by_name'] = delivery_proof['received_by_name']
|
||||
if 'delivery_notes' in delivery_proof:
|
||||
metadata['delivery_notes'] = delivery_proof['delivery_notes']
|
||||
|
||||
# Update shipment with delivery proof
|
||||
result = await distribution_service.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status='delivered', # Automatically mark as delivered when proof uploaded
|
||||
user_id=user_id,
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Shipment not found")
|
||||
|
||||
return {
|
||||
"message": "Delivery proof uploaded successfully",
|
||||
"shipment_id": shipment_id,
|
||||
"status": "delivered"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/shipments/{shipment_id}")
|
||||
@router.get(route_builder.build_base_route("shipments/{shipment_id}"))
|
||||
async def get_shipment_detail(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get detailed information about a specific shipment
|
||||
Get detailed information about a specific shipment including:
|
||||
- Basic shipment info (number, date, status)
|
||||
- Parent and child tenant details
|
||||
- Delivery route assignment
|
||||
- Purchase order reference
|
||||
- Delivery proof (signature, photo, received by)
|
||||
- Location tracking data
|
||||
"""
|
||||
try:
|
||||
# Implementation would fetch detailed shipment information
|
||||
# This is a placeholder until repositories are created
|
||||
raise HTTPException(status_code=501, detail="Shipment detail endpoint not yet implemented")
|
||||
# Access the shipment repository from the distribution service
|
||||
shipment = await distribution_service.shipment_repository.get_shipment_by_id(shipment_id)
|
||||
|
||||
if not shipment:
|
||||
raise HTTPException(status_code=404, detail="Shipment not found")
|
||||
|
||||
# Verify tenant access
|
||||
if str(shipment.get('tenant_id')) != tenant_id:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this shipment")
|
||||
|
||||
return shipment
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipment details: {str(e)}")
|
||||
@@ -181,6 +181,33 @@ class DeliveryRouteRepository:
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
|
||||
async def get_all_routes_for_tenant(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all delivery routes for a tenant
|
||||
"""
|
||||
stmt = select(DeliveryRoute).where(DeliveryRoute.tenant_id == tenant_id)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
routes = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(route.id),
|
||||
'tenant_id': str(route.tenant_id),
|
||||
'route_number': route.route_number,
|
||||
'route_date': route.route_date,
|
||||
'vehicle_id': route.vehicle_id,
|
||||
'driver_id': route.driver_id,
|
||||
'total_distance_km': route.total_distance_km,
|
||||
'estimated_duration_minutes': route.estimated_duration_minutes,
|
||||
'route_sequence': route.route_sequence,
|
||||
'status': route.status.value if hasattr(route.status, 'value') else route.status,
|
||||
'created_at': route.created_at,
|
||||
'updated_at': route.updated_at
|
||||
}
|
||||
for route in routes
|
||||
]
|
||||
|
||||
async def delete_demo_routes_for_tenant(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Delete all demo routes for a tenant
|
||||
|
||||
@@ -283,6 +283,42 @@ class ShipmentRepository:
|
||||
'count': len(updated_shipments)
|
||||
}
|
||||
|
||||
async def get_all_shipments_for_tenant(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all shipments for a tenant
|
||||
"""
|
||||
stmt = select(Shipment).where(Shipment.tenant_id == tenant_id)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
shipments = result.scalars().all()
|
||||
|
||||
return [
|
||||
{
|
||||
'id': str(shipment.id),
|
||||
'tenant_id': str(shipment.tenant_id),
|
||||
'parent_tenant_id': str(shipment.parent_tenant_id),
|
||||
'child_tenant_id': str(shipment.child_tenant_id),
|
||||
'purchase_order_id': str(shipment.purchase_order_id) if shipment.purchase_order_id else None,
|
||||
'delivery_route_id': str(shipment.delivery_route_id) if shipment.delivery_route_id else None,
|
||||
'shipment_number': shipment.shipment_number,
|
||||
'shipment_date': shipment.shipment_date,
|
||||
'current_location_lat': shipment.current_location_lat,
|
||||
'current_location_lng': shipment.current_location_lng,
|
||||
'last_tracked_at': shipment.last_tracked_at,
|
||||
'status': shipment.status.value if hasattr(shipment.status, 'value') else shipment.status,
|
||||
'actual_delivery_time': shipment.actual_delivery_time,
|
||||
'signature': shipment.signature,
|
||||
'photo_url': shipment.photo_url,
|
||||
'received_by_name': shipment.received_by_name,
|
||||
'delivery_notes': shipment.delivery_notes,
|
||||
'total_weight_kg': shipment.total_weight_kg,
|
||||
'total_volume_m3': shipment.total_volume_m3,
|
||||
'created_at': shipment.created_at,
|
||||
'updated_at': shipment.updated_at
|
||||
}
|
||||
for shipment in shipments
|
||||
]
|
||||
|
||||
async def delete_demo_shipments_for_tenant(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Delete all demo shipments for a tenant
|
||||
|
||||
@@ -219,288 +219,8 @@ class DistributionService:
|
||||
# In a real implementation, this would publish to RabbitMQ
|
||||
logger.info(f"Distribution plan created event published for parent {parent_tenant_id}")
|
||||
|
||||
async def setup_demo_enterprise_distribution(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
child_tenant_ids: List[str],
|
||||
session_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Setup distribution routes and schedules for enterprise demo
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Setting up demo distribution for parent {parent_tenant_id} with {len(child_tenant_ids)} children")
|
||||
|
||||
# Get locations for all tenants
|
||||
parent_locations_response = await self.tenant_client.get_tenant_locations(parent_tenant_id)
|
||||
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
|
||||
|
||||
# Look for central production or warehouse location as fallback
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'warehouse'), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('central')), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('main')), None)
|
||||
|
||||
# If no specific central location found, use first available location
|
||||
if not parent_location and parent_locations:
|
||||
parent_location = parent_locations[0]
|
||||
logger.warning(f"No central production location found for parent tenant {parent_tenant_id}, using first location: {parent_location.get('name', 'unnamed')}")
|
||||
|
||||
if not parent_location:
|
||||
raise ValueError(f"No location found for parent tenant {parent_tenant_id} to use as distribution center")
|
||||
|
||||
# Create delivery schedules for each child
|
||||
for child_id in child_tenant_ids:
|
||||
try:
|
||||
child_locations_response = await self.tenant_client.get_tenant_locations(child_id)
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
|
||||
# Look for retail outlet or store location as first choice
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
|
||||
|
||||
# If no specific retail location found, use first available location
|
||||
if not child_location and child_locations:
|
||||
child_location = child_locations[0]
|
||||
logger.warning(f"No retail outlet location found for child tenant {child_id}, using first location: {child_location.get('name', 'unnamed')}")
|
||||
|
||||
if not child_location:
|
||||
logger.warning(f"No location found for child tenant {child_id}")
|
||||
continue
|
||||
|
||||
# Create delivery schedule
|
||||
schedule_data = {
|
||||
'parent_tenant_id': parent_tenant_id,
|
||||
'child_tenant_id': child_id,
|
||||
'schedule_name': f"Demo Schedule: {child_location.get('name', f'Child {child_id}')}",
|
||||
'delivery_days': "Mon,Wed,Fri", # Tri-weekly delivery
|
||||
'delivery_time': "09:00", # Morning delivery
|
||||
'auto_generate_orders': True,
|
||||
'lead_time_days': 1,
|
||||
'is_active': True
|
||||
}
|
||||
|
||||
# Create the delivery schedule record
|
||||
await self.create_delivery_schedule(schedule_data)
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
|
||||
continue
|
||||
|
||||
# Create sample delivery route for today
|
||||
today = date.today()
|
||||
delivery_data = []
|
||||
|
||||
# Prepare delivery information for each child
|
||||
for child_id in child_tenant_ids:
|
||||
try:
|
||||
child_locations_response = await self.tenant_client.get_tenant_locations(child_id)
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
|
||||
# Look for retail outlet or store location as first choice
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
|
||||
|
||||
# If no specific retail location found, use first available location
|
||||
if not child_location and child_locations:
|
||||
child_location = child_locations[0]
|
||||
logger.warning(f"No retail outlet location found for child delivery {child_id}, using first location: {child_location.get('name', 'unnamed')}")
|
||||
|
||||
if child_location:
|
||||
# Ensure we have valid coordinates
|
||||
latitude = child_location.get('latitude')
|
||||
longitude = child_location.get('longitude')
|
||||
|
||||
if latitude is not None and longitude is not None:
|
||||
try:
|
||||
lat = float(latitude)
|
||||
lng = float(longitude)
|
||||
delivery_data.append({
|
||||
'id': f"demo_delivery_{child_id}",
|
||||
'child_tenant_id': child_id,
|
||||
'location': (lat, lng),
|
||||
'weight_kg': 150.0, # Fixed weight for demo
|
||||
'po_id': f"demo_po_{child_id}", # Would be actual PO ID in real implementation
|
||||
'items_count': 20
|
||||
})
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"Invalid coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
|
||||
else:
|
||||
logger.warning(f"Missing coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
|
||||
else:
|
||||
logger.warning(f"No location found for child delivery {child_id}, skipping")
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
|
||||
|
||||
# Optimize routes using VRP - ensure we have valid coordinates
|
||||
parent_latitude = parent_location.get('latitude')
|
||||
parent_longitude = parent_location.get('longitude')
|
||||
|
||||
if parent_latitude is None or parent_longitude is None:
|
||||
logger.error(f"Missing coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}")
|
||||
raise ValueError(f"Parent location {parent_tenant_id} missing coordinates")
|
||||
|
||||
try:
|
||||
depot_location = (float(parent_latitude), float(parent_longitude))
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.error(f"Invalid coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}, error: {e}")
|
||||
raise ValueError(f"Parent location {parent_tenant_id} has invalid coordinates: {e}")
|
||||
|
||||
optimization_result = await self.routing_optimizer.optimize_daily_routes(
|
||||
deliveries=delivery_data,
|
||||
depot_location=depot_location,
|
||||
vehicle_capacity_kg=1000.0 # Standard vehicle capacity
|
||||
)
|
||||
|
||||
# Create the delivery route for today
|
||||
# Use a random suffix to ensure unique route numbers
|
||||
import secrets
|
||||
unique_suffix = secrets.token_hex(4)[:8]
|
||||
route = await self.route_repository.create_route({
|
||||
'tenant_id': parent_tenant_id,
|
||||
'route_number': f"DEMO-{today.strftime('%Y%m%d')}-{unique_suffix}",
|
||||
'route_date': datetime.combine(today, datetime.min.time()),
|
||||
'total_distance_km': optimization_result.get('total_distance_km', 0),
|
||||
'estimated_duration_minutes': optimization_result.get('estimated_duration_minutes', 0),
|
||||
'route_sequence': optimization_result.get('routes', [])[0].get('route_sequence', []) if optimization_result.get('routes') else [],
|
||||
'status': 'planned'
|
||||
})
|
||||
|
||||
# Create shipment records for each delivery
|
||||
shipments = []
|
||||
for idx, delivery in enumerate(delivery_data):
|
||||
shipment = await self.shipment_repository.create_shipment({
|
||||
'tenant_id': parent_tenant_id,
|
||||
'parent_tenant_id': parent_tenant_id,
|
||||
'child_tenant_id': delivery['child_tenant_id'],
|
||||
'shipment_number': f"DEMOSHP-{today.strftime('%Y%m%d')}-{idx+1:03d}",
|
||||
'shipment_date': datetime.combine(today, datetime.min.time()),
|
||||
'status': 'pending',
|
||||
'total_weight_kg': delivery['weight_kg']
|
||||
})
|
||||
shipments.append(shipment)
|
||||
|
||||
# BUG-012 FIX: Clone historical data from template
|
||||
# Define template tenant IDs (matching seed script)
|
||||
TEMPLATE_PARENT_ID = "c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8"
|
||||
TEMPLATE_CHILD_IDS = [
|
||||
"d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9", # Madrid Centro
|
||||
"e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0", # Barcelona Gràcia
|
||||
"f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1" # Valencia Ruzafa
|
||||
]
|
||||
|
||||
# Create mapping from template child IDs to new session child IDs
|
||||
# Assumption: child_tenant_ids are passed in same order (Madrid, Barcelona, Valencia)
|
||||
child_id_map = {}
|
||||
for idx, template_child_id in enumerate(TEMPLATE_CHILD_IDS):
|
||||
if idx < len(child_tenant_ids):
|
||||
child_id_map[template_child_id] = child_tenant_ids[idx]
|
||||
|
||||
# Calculate date range for history (last 30 days)
|
||||
# Use demo reference date if available in session metadata, otherwise today
|
||||
# Note: session_id is passed, but we need to fetch metadata or infer date
|
||||
# For now, we'll use BASE_REFERENCE_DATE as the anchor, similar to the seed script
|
||||
end_date = BASE_REFERENCE_DATE
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
logger.info(f"Cloning historical distribution data from {start_date} to {end_date}")
|
||||
|
||||
# Fetch historical routes from template parent
|
||||
historical_routes = await self.route_repository.get_routes_by_date_range(
|
||||
tenant_id=TEMPLATE_PARENT_ID,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
# Fetch historical shipments from template parent
|
||||
historical_shipments = await self.shipment_repository.get_shipments_by_date_range(
|
||||
tenant_id=TEMPLATE_PARENT_ID,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(historical_routes)} routes and {len(historical_shipments)} shipments to clone")
|
||||
|
||||
# Clone routes
|
||||
route_id_map = {} # Old route ID -> New route ID
|
||||
cloned_routes_count = 0
|
||||
|
||||
for route_data in historical_routes:
|
||||
old_route_id = route_data['id']
|
||||
|
||||
# Update route sequence with new child IDs
|
||||
new_sequence = []
|
||||
for stop in route_data.get('route_sequence', []):
|
||||
new_stop = stop.copy()
|
||||
if 'tenant_id' in new_stop and new_stop['tenant_id'] in child_id_map:
|
||||
new_stop['tenant_id'] = child_id_map[new_stop['tenant_id']]
|
||||
new_sequence.append(new_stop)
|
||||
|
||||
# Create new route
|
||||
new_route = await self.route_repository.create_route({
|
||||
'tenant_id': parent_tenant_id,
|
||||
'route_number': route_data['route_number'], # Keep same number for consistency
|
||||
'route_date': route_data['route_date'],
|
||||
'vehicle_id': route_data['vehicle_id'],
|
||||
'driver_id': str(uuid.uuid4()), # New driver
|
||||
'total_distance_km': route_data['total_distance_km'],
|
||||
'estimated_duration_minutes': route_data['estimated_duration_minutes'],
|
||||
'route_sequence': new_sequence,
|
||||
'status': route_data['status']
|
||||
})
|
||||
|
||||
route_id_map[old_route_id] = str(new_route['id'])
|
||||
cloned_routes_count += 1
|
||||
|
||||
# Clone shipments
|
||||
cloned_shipments_count = 0
|
||||
|
||||
for shipment_data in historical_shipments:
|
||||
# Skip if child tenant not in our map (e.g. if we have fewer children than template)
|
||||
if shipment_data['child_tenant_id'] not in child_id_map:
|
||||
continue
|
||||
|
||||
# Map route ID
|
||||
new_route_id = None
|
||||
if shipment_data['delivery_route_id'] in route_id_map:
|
||||
new_route_id = route_id_map[shipment_data['delivery_route_id']]
|
||||
|
||||
# Create new shipment
|
||||
await self.shipment_repository.create_shipment({
|
||||
'tenant_id': parent_tenant_id,
|
||||
'parent_tenant_id': parent_tenant_id,
|
||||
'child_tenant_id': child_id_map[shipment_data['child_tenant_id']],
|
||||
'shipment_number': shipment_data['shipment_number'],
|
||||
'shipment_date': shipment_data['shipment_date'],
|
||||
'status': shipment_data['status'],
|
||||
'total_weight_kg': shipment_data['total_weight_kg'],
|
||||
'total_volume_m3': shipment_data['total_volume_m3'],
|
||||
'delivery_route_id': new_route_id
|
||||
})
|
||||
cloned_shipments_count += 1
|
||||
|
||||
logger.info(f"Demo distribution setup completed: {cloned_routes_count} routes, {cloned_shipments_count} shipments cloned")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"route_id": None, # No single route ID to return
|
||||
"shipment_count": cloned_shipments_count,
|
||||
"routes_count": cloned_routes_count,
|
||||
"total_distance_km": 0, # Not calculating total for history
|
||||
"session_id": session_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up demo distribution: {e}", exc_info=True)
|
||||
raise
|
||||
# Legacy setup_demo_enterprise_distribution method removed
|
||||
# Distribution now uses standard cloning pattern via /internal/demo/clone endpoint
|
||||
|
||||
async def get_delivery_routes_for_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
|
||||
@@ -65,9 +65,10 @@ DELIVERY_WEEKDAYS = [0, 2, 4] # Monday, Wednesday, Friday
|
||||
|
||||
async def seed_distribution_history(db: AsyncSession):
|
||||
"""
|
||||
Seed 30 days of historical distribution data (routes + shipments)
|
||||
Seed 30 days of distribution data (routes + shipments) centered around BASE_REFERENCE_DATE
|
||||
|
||||
Creates delivery routes for Mon/Wed/Fri pattern going back 30 days from BASE_REFERENCE_DATE
|
||||
Creates delivery routes for Mon/Wed/Fri pattern spanning from 15 days before to 15 days after BASE_REFERENCE_DATE.
|
||||
This ensures data exists for today when BASE_REFERENCE_DATE is set to the current date.
|
||||
"""
|
||||
logger.info("=" * 80)
|
||||
logger.info("🚚 Starting Demo Distribution History Seeding")
|
||||
@@ -75,15 +76,18 @@ async def seed_distribution_history(db: AsyncSession):
|
||||
logger.info(f"Parent Tenant: {DEMO_TENANT_ENTERPRISE_CHAIN} (Obrador Madrid)")
|
||||
logger.info(f"Child Tenants: {len(CHILD_TENANTS)}")
|
||||
logger.info(f"Delivery Pattern: Mon/Wed/Fri (3x per week)")
|
||||
logger.info(f"History: 30 days from {BASE_REFERENCE_DATE}")
|
||||
logger.info(f"Date Range: {(BASE_REFERENCE_DATE - timedelta(days=15)).strftime('%Y-%m-%d')} to {(BASE_REFERENCE_DATE + timedelta(days=15)).strftime('%Y-%m-%d')}")
|
||||
logger.info(f"Reference Date (today): {BASE_REFERENCE_DATE.strftime('%Y-%m-%d')}")
|
||||
logger.info("")
|
||||
|
||||
routes_created = 0
|
||||
shipments_created = 0
|
||||
|
||||
# Generate 30 days of historical routes (working backwards from BASE_REFERENCE_DATE)
|
||||
for days_ago in range(30, 0, -1):
|
||||
delivery_date = BASE_REFERENCE_DATE - timedelta(days=days_ago)
|
||||
# Generate 30 days of routes centered around BASE_REFERENCE_DATE (-15 to +15 days)
|
||||
# This ensures we have past data, current data, and future data
|
||||
# Range is inclusive of start, exclusive of end, so -15 to 16 gives -15..15
|
||||
for days_offset in range(-15, 16): # -15 to +15 = 31 days total
|
||||
delivery_date = BASE_REFERENCE_DATE + timedelta(days=days_offset)
|
||||
|
||||
# Only create routes for Mon/Wed/Fri
|
||||
if delivery_date.weekday() not in DELIVERY_WEEKDAYS:
|
||||
@@ -117,6 +121,11 @@ async def seed_distribution_history(db: AsyncSession):
|
||||
{"stop": 3, "tenant_id": str(DEMO_TENANT_CHILD_3), "location": "Valencia Ruzafa"}
|
||||
]
|
||||
|
||||
# Determine status based on whether the date is in the past or future
|
||||
# Past routes are completed, today and future routes are planned
|
||||
is_past = delivery_date < BASE_REFERENCE_DATE
|
||||
route_status = DeliveryRouteStatus.completed if is_past else DeliveryRouteStatus.planned
|
||||
|
||||
route = DeliveryRoute(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=DEMO_TENANT_ENTERPRISE_CHAIN,
|
||||
@@ -125,7 +134,7 @@ async def seed_distribution_history(db: AsyncSession):
|
||||
total_distance_km=Decimal(str(round(total_distance_km, 2))),
|
||||
estimated_duration_minutes=estimated_duration_minutes,
|
||||
route_sequence=route_sequence,
|
||||
status=DeliveryRouteStatus.completed if days_ago > 1 else DeliveryRouteStatus.planned, # Recent routes are planned, old ones completed
|
||||
status=route_status,
|
||||
driver_id=uuid.uuid4(), # Use a random UUID for the driver_id
|
||||
vehicle_id=f"VEH-{random.choice(['001', '002', '003'])}",
|
||||
created_at=delivery_date - timedelta(days=1), # Routes created day before
|
||||
@@ -144,6 +153,9 @@ async def seed_distribution_history(db: AsyncSession):
|
||||
|
||||
shipment_number = f"DEMOSHP-{delivery_date.strftime('%Y%m%d')}-{child_name.split()[0].upper()[:3]}"
|
||||
|
||||
# Determine shipment status based on date
|
||||
shipment_status = ShipmentStatus.delivered if is_past else ShipmentStatus.pending
|
||||
|
||||
shipment = Shipment(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=DEMO_TENANT_ENTERPRISE_CHAIN,
|
||||
@@ -151,7 +163,7 @@ async def seed_distribution_history(db: AsyncSession):
|
||||
child_tenant_id=child_tenant_id,
|
||||
shipment_number=shipment_number,
|
||||
shipment_date=delivery_date,
|
||||
status=ShipmentStatus.delivered if days_ago > 1 else ShipmentStatus.pending,
|
||||
status=shipment_status,
|
||||
total_weight_kg=Decimal(str(round(shipment_weight, 2))),
|
||||
delivery_route_id=route.id,
|
||||
delivery_notes=f"Entrega regular a {child_name}",
|
||||
|
||||
Reference in New Issue
Block a user