New alert service
This commit is contained in:
@@ -4,10 +4,12 @@ Handles internal demo setup for enterprise tier
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from typing import Dict, Any, List
|
||||
from typing import Dict, Any, List, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import json
|
||||
import time
|
||||
|
||||
from app.services.distribution_service import DistributionService
|
||||
from app.api.dependencies import get_distribution_service
|
||||
@@ -26,318 +28,9 @@ async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/internal/demo/setup")
|
||||
async def setup_demo_distribution(
|
||||
setup_request: dict, # Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
distribution_service: DistributionService = Depends(get_distribution_service),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to setup distribution for enterprise demo
|
||||
|
||||
Args:
|
||||
setup_request: Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
"""
|
||||
try:
|
||||
parent_tenant_id = setup_request.get('parent_tenant_id')
|
||||
child_tenant_ids = setup_request.get('child_tenant_ids', [])
|
||||
session_id = setup_request.get('session_id')
|
||||
|
||||
if not all([parent_tenant_id, child_tenant_ids, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: parent_tenant_id, child_tenant_ids, session_id"
|
||||
)
|
||||
|
||||
logger.info("Setting up demo distribution",
|
||||
parent=parent_tenant_id,
|
||||
children=child_tenant_ids,
|
||||
session_id=session_id)
|
||||
|
||||
# Get locations for parent and children to set up delivery routes
|
||||
parent_locations_response = await distribution_service.tenant_client.get_tenant_locations(parent_tenant_id)
|
||||
|
||||
# Check if parent_locations_response is None (which happens when the API call fails)
|
||||
if not parent_locations_response:
|
||||
logger.warning(f"No locations found for parent tenant {parent_tenant_id}")
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"No locations found for parent tenant {parent_tenant_id}. "
|
||||
f"Ensure the tenant exists and has locations configured."
|
||||
)
|
||||
|
||||
# Extract the actual locations array from the response object
|
||||
# The response format is {"locations": [...], "total": N}
|
||||
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
|
||||
|
||||
# Look for central production or warehouse location as fallback
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'warehouse'), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('central')), None)
|
||||
if not parent_location:
|
||||
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('main')), None)
|
||||
|
||||
# If no specific central location found, use first available location
|
||||
if not parent_location and parent_locations:
|
||||
parent_location = parent_locations[0]
|
||||
logger.warning(f"No central production location found for parent tenant {parent_tenant_id}, using first location: {parent_location.get('name', 'unnamed')}")
|
||||
|
||||
# BUG-013 FIX: Use HTTPException instead of ValueError
|
||||
if not parent_location:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"No location found for parent tenant {parent_tenant_id} to use as distribution center. "
|
||||
f"Ensure the parent tenant has at least one location configured."
|
||||
)
|
||||
|
||||
# Create delivery schedules for each child
|
||||
for child_id in child_tenant_ids:
|
||||
try:
|
||||
child_locations_response = await distribution_service.tenant_client.get_tenant_locations(child_id)
|
||||
|
||||
# Check if child_locations_response is None (which happens when the API call fails)
|
||||
if not child_locations_response:
|
||||
logger.warning(f"No locations found for child tenant {child_id}")
|
||||
continue # Skip this child tenant and continue with the next one
|
||||
|
||||
# Extract the actual locations array from the response object
|
||||
# The response format is {"locations": [...], "total": N}
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
|
||||
# Look for retail outlet or store location as first choice
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
|
||||
|
||||
# If no specific retail location found, use first available location
|
||||
if not child_location and child_locations:
|
||||
child_location = child_locations[0]
|
||||
logger.warning(f"No retail outlet location found for child tenant {child_id}, using first location: {child_location.get('name', 'unnamed')}")
|
||||
|
||||
if not child_location:
|
||||
logger.warning(f"No location found for child tenant {child_id}")
|
||||
continue
|
||||
|
||||
# Create delivery schedule
|
||||
schedule_data = {
|
||||
'tenant_id': child_id, # The child tenant that will receive deliveries
|
||||
'target_parent_tenant_id': parent_tenant_id, # The parent tenant that supplies
|
||||
'target_child_tenant_ids': [child_id], # Array of child tenant IDs in this schedule
|
||||
'name': f"Demo Schedule: {child_location.get('name', f'Child {child_id}')}",
|
||||
'delivery_days': "Mon,Wed,Fri", # Tri-weekly delivery
|
||||
'delivery_time': "09:00", # Morning delivery
|
||||
'auto_generate_orders': True,
|
||||
'lead_time_days': 1,
|
||||
'is_active': True,
|
||||
'created_by': parent_tenant_id, # BUG FIX: Add required created_by field
|
||||
'updated_by': parent_tenant_id # BUG FIX: Add required updated_by field
|
||||
}
|
||||
|
||||
# Create the delivery schedule record
|
||||
schedule = await distribution_service.create_delivery_schedule(schedule_data)
|
||||
logger.info(f"Created delivery schedule for {parent_tenant_id} to {child_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating delivery schedule for child {child_id}: {e}", exc_info=True)
|
||||
continue # Continue with the next child
|
||||
|
||||
# BUG-012 FIX: Use demo reference date instead of actual today
|
||||
from datetime import date
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
# Get demo reference date from session metadata if available
|
||||
session_metadata = setup_request.get('session_metadata', {})
|
||||
session_created_at = session_metadata.get('session_created_at')
|
||||
|
||||
if session_created_at:
|
||||
# Use the BASE_REFERENCE_DATE for consistent demo data dating
|
||||
# All demo data is anchored to this date (November 25, 2025)
|
||||
demo_today = BASE_REFERENCE_DATE
|
||||
logger.info(f"Using demo reference date: {demo_today}")
|
||||
else:
|
||||
# Fallback to today if no session metadata (shouldn't happen in production)
|
||||
demo_today = date.today()
|
||||
logger.warning(f"No session_created_at in metadata, using today: {demo_today}")
|
||||
|
||||
delivery_data = []
|
||||
|
||||
# Prepare delivery information for each child
|
||||
for child_id in child_tenant_ids:
|
||||
try:
|
||||
child_locations_response = await distribution_service.tenant_client.get_tenant_locations(child_id)
|
||||
|
||||
# Check if child_locations_response is None (which happens when the API call fails)
|
||||
if not child_locations_response:
|
||||
logger.warning(f"No locations found for child delivery {child_id}")
|
||||
continue # Skip this child tenant and continue with the next one
|
||||
|
||||
# Extract the actual locations array from the response object
|
||||
# The response format is {"locations": [...], "total": N}
|
||||
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
|
||||
|
||||
# Look for retail outlet or store location as first choice
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
|
||||
if not child_location:
|
||||
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
|
||||
|
||||
# If no specific retail location found, use first available location
|
||||
if not child_location and child_locations:
|
||||
child_location = child_locations[0]
|
||||
logger.warning(f"No retail outlet location found for child delivery {child_id}, using first location: {child_location.get('name', 'unnamed')}")
|
||||
|
||||
if child_location:
|
||||
# Ensure we have valid coordinates
|
||||
latitude = child_location.get('latitude')
|
||||
longitude = child_location.get('longitude')
|
||||
|
||||
if latitude is not None and longitude is not None:
|
||||
try:
|
||||
lat = float(latitude)
|
||||
lng = float(longitude)
|
||||
delivery_data.append({
|
||||
'id': f"demo_delivery_{child_id}",
|
||||
'child_tenant_id': child_id,
|
||||
'location': (lat, lng),
|
||||
'weight_kg': 150.0, # Fixed weight for demo
|
||||
'po_id': f"demo_po_{child_id}", # Would be actual PO ID in real implementation
|
||||
'items_count': 20
|
||||
})
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"Invalid coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
|
||||
else:
|
||||
logger.warning(f"Missing coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
|
||||
else:
|
||||
logger.warning(f"No location found for child delivery {child_id}, skipping")
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
|
||||
|
||||
# Optimize routes using VRP - ensure we have valid coordinates
|
||||
parent_latitude = parent_location.get('latitude')
|
||||
parent_longitude = parent_location.get('longitude')
|
||||
|
||||
# BUG-013 FIX: Use HTTPException for coordinate validation errors
|
||||
if parent_latitude is None or parent_longitude is None:
|
||||
logger.error(f"Missing coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}")
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Parent location {parent_tenant_id} missing coordinates. "
|
||||
f"Latitude and longitude must be provided for distribution planning."
|
||||
)
|
||||
|
||||
try:
|
||||
depot_location = (float(parent_latitude), float(parent_longitude))
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.error(f"Invalid coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}, error: {e}")
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Parent location {parent_tenant_id} has invalid coordinates: {e}"
|
||||
)
|
||||
|
||||
optimization_result = await distribution_service.routing_optimizer.optimize_daily_routes(
|
||||
deliveries=delivery_data,
|
||||
depot_location=depot_location,
|
||||
vehicle_capacity_kg=1000.0 # Standard vehicle capacity
|
||||
)
|
||||
|
||||
# BUG-012 FIX: Create the delivery route using demo reference date
|
||||
routes = optimization_result.get('routes', [])
|
||||
route_sequence = routes[0].get('route_sequence', []) if routes else []
|
||||
|
||||
# Use session_id suffix to ensure unique route numbers for concurrent demo sessions
|
||||
session_suffix = session_id.split('_')[-1][:8] if session_id else '001'
|
||||
route = await distribution_service.route_repository.create_route({
|
||||
'tenant_id': uuid.UUID(parent_tenant_id),
|
||||
'route_number': f"DEMO-{demo_today.strftime('%Y%m%d')}-{session_suffix}",
|
||||
'route_date': datetime.combine(demo_today, datetime.min.time()),
|
||||
'total_distance_km': optimization_result.get('total_distance_km', 0),
|
||||
'estimated_duration_minutes': optimization_result.get('estimated_duration_minutes', 0),
|
||||
'route_sequence': route_sequence,
|
||||
'status': 'planned'
|
||||
})
|
||||
|
||||
# BUG-012 FIX: Create shipment records using demo reference date
|
||||
# Use session_id suffix to ensure unique shipment numbers
|
||||
shipments = []
|
||||
for idx, delivery in enumerate(delivery_data):
|
||||
shipment = await distribution_service.shipment_repository.create_shipment({
|
||||
'tenant_id': uuid.UUID(parent_tenant_id),
|
||||
'parent_tenant_id': uuid.UUID(parent_tenant_id),
|
||||
'child_tenant_id': uuid.UUID(delivery['child_tenant_id']),
|
||||
'shipment_number': f"DEMOSHP-{demo_today.strftime('%Y%m%d')}-{session_suffix}-{idx+1:03d}",
|
||||
'shipment_date': datetime.combine(demo_today, datetime.min.time()),
|
||||
'status': 'pending',
|
||||
'total_weight_kg': delivery['weight_kg']
|
||||
})
|
||||
shipments.append(shipment)
|
||||
|
||||
logger.info(f"Demo distribution setup completed: 1 route, {len(shipments)} shipments")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"route_id": str(route['id']),
|
||||
"shipment_count": len(shipments),
|
||||
"total_distance_km": optimization_result.get('total_distance_km', 0),
|
||||
"session_id": session_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up demo distribution: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to setup demo distribution: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/internal/demo/cleanup")
|
||||
async def cleanup_demo_distribution(
|
||||
cleanup_request: dict, # Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
distribution_service: DistributionService = Depends(get_distribution_service),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to cleanup distribution data for enterprise demo
|
||||
|
||||
Args:
|
||||
cleanup_request: Contains parent_tenant_id, child_tenant_ids, session_id
|
||||
"""
|
||||
try:
|
||||
parent_tenant_id = cleanup_request.get('parent_tenant_id')
|
||||
child_tenant_ids = cleanup_request.get('child_tenant_ids', [])
|
||||
session_id = cleanup_request.get('session_id')
|
||||
|
||||
if not all([parent_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: parent_tenant_id, session_id"
|
||||
)
|
||||
|
||||
logger.info("Cleaning up demo distribution",
|
||||
parent=parent_tenant_id,
|
||||
session_id=session_id)
|
||||
|
||||
# Delete all demo routes and shipments for this parent tenant
|
||||
deleted_routes_count = await distribution_service.route_repository.delete_demo_routes_for_tenant(
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
deleted_shipments_count = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
logger.info(f"Demo distribution cleanup completed: {deleted_routes_count} routes, {deleted_shipments_count} shipments deleted")
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"routes_deleted": deleted_routes_count,
|
||||
"shipments_deleted": deleted_shipments_count,
|
||||
"session_id": session_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up demo distribution: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to cleanup demo distribution: {str(e)}")
|
||||
# Legacy /internal/demo/setup and /internal/demo/cleanup endpoints removed
|
||||
# Distribution now uses the standard /internal/demo/clone pattern like all other services
|
||||
# Data is cloned from base template tenants via DataCloner
|
||||
|
||||
|
||||
@router.get("/internal/health")
|
||||
@@ -357,64 +50,301 @@ async def internal_health_check(
|
||||
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data(
|
||||
clone_request: dict,
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
session_metadata: Optional[str] = None,
|
||||
distribution_service: DistributionService = Depends(get_distribution_service),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Clone/Setup distribution data for a virtual demo tenant
|
||||
|
||||
Clone distribution data from base tenant to virtual tenant
|
||||
|
||||
This follows the standard cloning pattern used by other services:
|
||||
1. Query base tenant data (routes, shipments, schedules)
|
||||
2. Clone to virtual tenant with ID substitution and date adjustment
|
||||
3. Return records cloned count
|
||||
|
||||
Args:
|
||||
clone_request: Contains base_tenant_id, virtual_tenant_id, session_id, demo_account_type
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
"""
|
||||
try:
|
||||
virtual_tenant_id = clone_request.get('virtual_tenant_id')
|
||||
session_id = clone_request.get('session_id')
|
||||
|
||||
if not all([virtual_tenant_id, session_id]):
|
||||
if not all([base_tenant_id, virtual_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: virtual_tenant_id, session_id"
|
||||
status_code=400,
|
||||
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
|
||||
)
|
||||
|
||||
logger.info("Cloning distribution data",
|
||||
logger.info("Cloning distribution data from base tenant",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
session_id=session_id)
|
||||
|
||||
# 1. Fetch child tenants for the new virtual parent
|
||||
child_tenants = await distribution_service.tenant_client.get_child_tenants(virtual_tenant_id)
|
||||
|
||||
if not child_tenants:
|
||||
logger.warning(f"No child tenants found for virtual parent {virtual_tenant_id}, skipping distribution setup")
|
||||
return {
|
||||
"status": "skipped",
|
||||
"reason": "no_child_tenants",
|
||||
"virtual_tenant_id": virtual_tenant_id
|
||||
}
|
||||
|
||||
child_tenant_ids = [child['id'] for child in child_tenants]
|
||||
|
||||
# 2. Call existing setup logic
|
||||
result = await distribution_service.setup_demo_enterprise_distribution(
|
||||
parent_tenant_id=virtual_tenant_id,
|
||||
child_tenant_ids=child_tenant_ids,
|
||||
session_id=session_id
|
||||
# Clean up any existing demo data for this virtual tenant to prevent conflicts
|
||||
logger.info("Cleaning up existing demo data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(virtual_tenant_id)
|
||||
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(virtual_tenant_id)
|
||||
|
||||
if deleted_routes > 0 or deleted_shipments > 0:
|
||||
logger.info("Cleaned up existing demo data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
deleted_routes=deleted_routes,
|
||||
deleted_shipments=deleted_shipments)
|
||||
|
||||
# Generate a single timestamp suffix for this cloning operation to ensure uniqueness
|
||||
timestamp_suffix = str(int(time.time()))[-6:] # Last 6 digits of timestamp
|
||||
|
||||
# Parse session creation date for date adjustment
|
||||
from datetime import date, datetime, timezone
|
||||
from dateutil import parser as date_parser
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE, adjust_date_for_demo
|
||||
|
||||
if session_created_at:
|
||||
if isinstance(session_created_at, str):
|
||||
session_dt = date_parser.parse(session_created_at)
|
||||
else:
|
||||
session_dt = session_created_at
|
||||
else:
|
||||
session_dt = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_metadata to extract child tenant mappings for enterprise demos
|
||||
child_tenant_id_map = {}
|
||||
if session_metadata:
|
||||
try:
|
||||
metadata_dict = json.loads(session_metadata)
|
||||
child_configs = metadata_dict.get("child_configs", [])
|
||||
child_tenant_ids = metadata_dict.get("child_tenant_ids", [])
|
||||
|
||||
# Build mapping: base_child_id -> virtual_child_id
|
||||
for idx, child_config in enumerate(child_configs):
|
||||
if idx < len(child_tenant_ids):
|
||||
base_child_id = child_config.get("base_tenant_id")
|
||||
virtual_child_id = child_tenant_ids[idx]
|
||||
if base_child_id and virtual_child_id:
|
||||
child_tenant_id_map[base_child_id] = virtual_child_id
|
||||
|
||||
logger.info(
|
||||
"Built child tenant ID mapping for enterprise demo",
|
||||
mapping_count=len(child_tenant_id_map),
|
||||
session_id=session_id,
|
||||
mappings=child_tenant_id_map
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to parse session_metadata", error=str(e), session_id=session_id)
|
||||
|
||||
# Clone delivery routes from base tenant
|
||||
base_routes = await distribution_service.route_repository.get_all_routes_for_tenant(base_tenant_id)
|
||||
|
||||
routes_cloned = 0
|
||||
route_id_map = {} # Map old route IDs to new route IDs
|
||||
|
||||
for base_route in base_routes:
|
||||
# Adjust route_date relative to session creation
|
||||
adjusted_route_date = adjust_date_for_demo(
|
||||
base_route.get('route_date'),
|
||||
session_dt,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
# Map child tenant IDs in route_sequence
|
||||
route_sequence = base_route.get('route_sequence', [])
|
||||
if child_tenant_id_map and route_sequence:
|
||||
mapped_sequence = []
|
||||
for stop in route_sequence:
|
||||
if isinstance(stop, dict) and 'child_tenant_id' in stop:
|
||||
base_child_id = str(stop['child_tenant_id'])
|
||||
if base_child_id in child_tenant_id_map:
|
||||
stop = {**stop, 'child_tenant_id': child_tenant_id_map[base_child_id]}
|
||||
logger.debug(
|
||||
"Mapped child_tenant_id in route_sequence",
|
||||
base_child_id=base_child_id,
|
||||
virtual_child_id=child_tenant_id_map[base_child_id],
|
||||
session_id=session_id
|
||||
)
|
||||
mapped_sequence.append(stop)
|
||||
route_sequence = mapped_sequence
|
||||
|
||||
# Generate unique route number for the virtual tenant to avoid duplicates
|
||||
base_route_number = base_route.get('route_number')
|
||||
if base_route_number and base_route_number.startswith('DEMO-'):
|
||||
# For demo routes, append the virtual tenant ID to ensure uniqueness
|
||||
# Use more characters from UUID and include a timestamp component to reduce collision risk
|
||||
# Handle both string and UUID inputs for virtual_tenant_id
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
|
||||
except (ValueError, TypeError):
|
||||
# If it's already a UUID object, use it directly
|
||||
tenant_uuid = virtual_tenant_id
|
||||
# Use more characters to make it more unique
|
||||
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
|
||||
# Use the single timestamp suffix generated at the start of the operation
|
||||
route_number = f"{base_route_number}-{tenant_suffix}-{timestamp_suffix}"
|
||||
else:
|
||||
# For non-demo routes, use original route number
|
||||
route_number = base_route_number
|
||||
|
||||
new_route = await distribution_service.route_repository.create_route({
|
||||
'tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'route_number': route_number,
|
||||
'route_date': adjusted_route_date,
|
||||
'vehicle_id': base_route.get('vehicle_id'),
|
||||
'driver_id': base_route.get('driver_id'),
|
||||
'total_distance_km': base_route.get('total_distance_km'),
|
||||
'estimated_duration_minutes': base_route.get('estimated_duration_minutes'),
|
||||
'route_sequence': route_sequence,
|
||||
'status': base_route.get('status')
|
||||
})
|
||||
routes_cloned += 1
|
||||
|
||||
# Map old route ID to the new route ID returned by the repository
|
||||
route_id_map[base_route.get('id')] = new_route['id']
|
||||
|
||||
# Clone shipments from base tenant
|
||||
base_shipments = await distribution_service.shipment_repository.get_all_shipments_for_tenant(base_tenant_id)
|
||||
|
||||
shipments_cloned = 0
|
||||
for base_shipment in base_shipments:
|
||||
# Adjust shipment_date relative to session creation
|
||||
adjusted_shipment_date = adjust_date_for_demo(
|
||||
base_shipment.get('shipment_date'),
|
||||
session_dt,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
# Map delivery_route_id to new route ID
|
||||
old_route_id = base_shipment.get('delivery_route_id')
|
||||
new_route_id = route_id_map.get(old_route_id) if old_route_id else None
|
||||
|
||||
# Generate unique shipment number for the virtual tenant to avoid duplicates
|
||||
base_shipment_number = base_shipment.get('shipment_number')
|
||||
if base_shipment_number and base_shipment_number.startswith('DEMO'):
|
||||
# For demo shipments, append the virtual tenant ID to ensure uniqueness
|
||||
# Use more characters from UUID and include a timestamp component to reduce collision risk
|
||||
# Handle both string and UUID inputs for virtual_tenant_id
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
|
||||
except (ValueError, TypeError):
|
||||
# If it's already a UUID object, use it directly
|
||||
tenant_uuid = virtual_tenant_id
|
||||
# Use more characters to make it more unique
|
||||
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
|
||||
# Use the single timestamp suffix generated at the start of the operation
|
||||
shipment_number = f"{base_shipment_number}-{tenant_suffix}-{timestamp_suffix}"
|
||||
else:
|
||||
# For non-demo shipments, use original shipment number
|
||||
shipment_number = base_shipment_number
|
||||
|
||||
# Map child_tenant_id to virtual child ID (THE KEY FIX)
|
||||
base_child_id = base_shipment.get('child_tenant_id')
|
||||
virtual_child_id = None
|
||||
if base_child_id:
|
||||
base_child_id_str = str(base_child_id)
|
||||
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
|
||||
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
|
||||
logger.debug(
|
||||
"Mapped child tenant ID for shipment",
|
||||
base_child_id=base_child_id_str,
|
||||
virtual_child_id=str(virtual_child_id),
|
||||
shipment_number=shipment_number,
|
||||
session_id=session_id
|
||||
)
|
||||
else:
|
||||
virtual_child_id = base_child_id # Fallback to original
|
||||
else:
|
||||
virtual_child_id = None
|
||||
|
||||
new_shipment = await distribution_service.shipment_repository.create_shipment({
|
||||
'id': uuid.uuid4(),
|
||||
'tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
|
||||
'delivery_route_id': new_route_id,
|
||||
'shipment_number': shipment_number,
|
||||
'shipment_date': adjusted_shipment_date,
|
||||
'status': base_shipment.get('status'),
|
||||
'total_weight_kg': base_shipment.get('total_weight_kg'),
|
||||
'total_volume_m3': base_shipment.get('total_volume_m3'),
|
||||
'delivery_notes': base_shipment.get('delivery_notes')
|
||||
})
|
||||
shipments_cloned += 1
|
||||
|
||||
# Clone delivery schedules from base tenant
|
||||
base_schedules = await distribution_service.schedule_repository.get_schedules_by_tenant(base_tenant_id)
|
||||
|
||||
schedules_cloned = 0
|
||||
for base_schedule in base_schedules:
|
||||
# Map child_tenant_id to virtual child ID
|
||||
base_child_id = base_schedule.get('child_tenant_id')
|
||||
virtual_child_id = None
|
||||
if base_child_id:
|
||||
base_child_id_str = str(base_child_id)
|
||||
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
|
||||
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
|
||||
logger.debug(
|
||||
"Mapped child tenant ID for delivery schedule",
|
||||
base_child_id=base_child_id_str,
|
||||
virtual_child_id=str(virtual_child_id),
|
||||
session_id=session_id
|
||||
)
|
||||
else:
|
||||
virtual_child_id = base_child_id # Fallback to original
|
||||
else:
|
||||
virtual_child_id = None
|
||||
|
||||
new_schedule = await distribution_service.schedule_repository.create_schedule({
|
||||
'id': uuid.uuid4(),
|
||||
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
|
||||
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
|
||||
'schedule_name': base_schedule.get('schedule_name'),
|
||||
'delivery_days': base_schedule.get('delivery_days'),
|
||||
'delivery_time': base_schedule.get('delivery_time'),
|
||||
'auto_generate_orders': base_schedule.get('auto_generate_orders'),
|
||||
'lead_time_days': base_schedule.get('lead_time_days'),
|
||||
'is_active': base_schedule.get('is_active')
|
||||
})
|
||||
schedules_cloned += 1
|
||||
|
||||
total_records = routes_cloned + shipments_cloned + schedules_cloned
|
||||
|
||||
logger.info(
|
||||
"Distribution cloning completed successfully",
|
||||
session_id=session_id,
|
||||
routes_cloned=routes_cloned,
|
||||
shipments_cloned=shipments_cloned,
|
||||
schedules_cloned=schedules_cloned,
|
||||
total_records=total_records,
|
||||
child_mappings_applied=len(child_tenant_id_map),
|
||||
is_enterprise=len(child_tenant_id_map) > 0
|
||||
)
|
||||
|
||||
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "completed",
|
||||
"records_cloned": result.get('shipment_count', 0) + 1, # shipments + 1 route
|
||||
"details": result
|
||||
"records_cloned": total_records,
|
||||
"routes_cloned": routes_cloned,
|
||||
"shipments_cloned": shipments_cloned,
|
||||
"schedules_cloned": schedules_cloned
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error cloning distribution data: {e}", exc_info=True)
|
||||
# Don't fail the entire cloning process if distribution fails
|
||||
# Don't fail the entire cloning process if distribution fails, but add more context
|
||||
error_msg = f"Distribution cloning failed: {str(e)}"
|
||||
logger.warning(f"Distribution cloning partially failed but continuing: {error_msg}")
|
||||
return {
|
||||
"service": "distribution",
|
||||
"status": "failed",
|
||||
"error": str(e)
|
||||
"error": error_msg,
|
||||
"records_cloned": 0,
|
||||
"routes_cloned": 0,
|
||||
"shipments_cloned": 0,
|
||||
"schedules_cloned": 0
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -9,11 +9,15 @@ import structlog
|
||||
import os
|
||||
|
||||
from app.api.dependencies import get_distribution_service
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.auth.tenant_access import verify_tenant_access_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize route builder for distribution service
|
||||
route_builder = RouteBuilder('distribution')
|
||||
|
||||
|
||||
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
@@ -27,13 +31,13 @@ async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/distribution/plans/generate")
|
||||
@router.post(route_builder.build_base_route("plans/generate"))
|
||||
async def generate_daily_distribution_plan(
|
||||
tenant_id: str,
|
||||
target_date: date = Query(..., description="Date for which to generate distribution plan"),
|
||||
vehicle_capacity_kg: float = Query(1000.0, description="Vehicle capacity in kg"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Generate daily distribution plan for internal transfers
|
||||
@@ -75,14 +79,14 @@ async def generate_daily_distribution_plan(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate distribution plan: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/routes")
|
||||
@router.get(route_builder.build_base_route("routes"))
|
||||
async def get_delivery_routes(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for route filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for route filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by route status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get delivery routes with optional filtering
|
||||
@@ -111,97 +115,16 @@ async def get_delivery_routes(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get delivery routes: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/shipments")
|
||||
async def get_shipments(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by shipment status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get shipments with optional filtering
|
||||
"""
|
||||
try:
|
||||
# If no date range specified, default to today
|
||||
if not date_from and not date_to:
|
||||
date_from = date.today()
|
||||
date_to = date.today()
|
||||
elif not date_to:
|
||||
date_to = date_from
|
||||
|
||||
shipments = []
|
||||
current_date = date_from
|
||||
while current_date <= date_to:
|
||||
daily_shipments = await distribution_service.get_shipments_for_date(tenant_id, current_date)
|
||||
shipments.extend(daily_shipments)
|
||||
current_date = current_date + timedelta(days=1)
|
||||
|
||||
if status:
|
||||
shipments = [s for s in shipments if s.get('status') == status]
|
||||
|
||||
return {"shipments": shipments}
|
||||
except Exception as e:
|
||||
logger.error("Error getting shipments", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/status")
|
||||
async def update_shipment_status(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
status_update: dict, # Should be a Pydantic model in production
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Update shipment status
|
||||
"""
|
||||
try:
|
||||
new_status = status_update.get('status')
|
||||
if not new_status:
|
||||
raise HTTPException(status_code=400, detail="Status is required")
|
||||
|
||||
user_id = "temp_user" # Would come from auth context
|
||||
result = await distribution_service.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status=new_status,
|
||||
user_id=user_id,
|
||||
metadata=status_update.get('metadata')
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating shipment status", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/delivery-proof")
|
||||
async def upload_delivery_proof(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
delivery_proof: dict, # Should be a Pydantic model in production
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Upload delivery proof (signature, photo, etc.)
|
||||
"""
|
||||
try:
|
||||
# Implementation would handle signature/photo upload
|
||||
# This is a placeholder until proper models are created
|
||||
raise HTTPException(status_code=501, detail="Delivery proof upload endpoint not yet implemented")
|
||||
except Exception as e:
|
||||
logger.error("Error uploading delivery proof", error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/routes/{route_id}")
|
||||
@router.get(route_builder.build_base_route("routes/{route_id}"))
|
||||
async def get_route_detail(
|
||||
tenant_id: str,
|
||||
route_id: str,
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get delivery route details
|
||||
|
||||
@@ -7,19 +7,23 @@ from typing import List, Optional
|
||||
from datetime import date, timedelta
|
||||
|
||||
from app.api.dependencies import get_distribution_service
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.auth.tenant_access import verify_tenant_access_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Initialize route builder for distribution service
|
||||
route_builder = RouteBuilder('distribution')
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/shipments")
|
||||
|
||||
@router.get(route_builder.build_base_route("shipments"))
|
||||
async def get_shipments(
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = Query(None, description="Start date for shipment filtering"),
|
||||
date_to: Optional[date] = Query(None, description="End date for shipment filtering"),
|
||||
status: Optional[str] = Query(None, description="Filter by shipment status"),
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
List shipments with optional filtering
|
||||
@@ -47,13 +51,13 @@ async def get_shipments(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipments: {str(e)}")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/status")
|
||||
@router.put(route_builder.build_base_route("shipments/{shipment_id}/status"))
|
||||
async def update_shipment_status(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
status_update: dict, # Should be a proper Pydantic model
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Update shipment status
|
||||
@@ -75,38 +79,88 @@ async def update_shipment_status(
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update shipment status: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/distribution/shipments/{shipment_id}/delivery-proof")
|
||||
@router.post(route_builder.build_base_route("shipments/{shipment_id}/delivery-proof"))
|
||||
async def upload_delivery_proof(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
delivery_proof: dict, # Should be a proper Pydantic model
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Upload delivery proof (signature, photo, etc.)
|
||||
|
||||
Expected delivery_proof fields:
|
||||
- signature: Base64 encoded signature image or signature data
|
||||
- photo_url: URL to uploaded delivery photo
|
||||
- received_by_name: Name of person who received delivery
|
||||
- delivery_notes: Optional notes about delivery
|
||||
"""
|
||||
try:
|
||||
# Implementation would handle signature/photo upload
|
||||
# This is a placeholder until proper models are created
|
||||
raise HTTPException(status_code=501, detail="Delivery proof upload endpoint not yet implemented")
|
||||
user_id = "temp_user_id" # Would come from auth context
|
||||
|
||||
# Prepare metadata for shipment update
|
||||
metadata = {}
|
||||
if 'signature' in delivery_proof:
|
||||
metadata['signature'] = delivery_proof['signature']
|
||||
if 'photo_url' in delivery_proof:
|
||||
metadata['photo_url'] = delivery_proof['photo_url']
|
||||
if 'received_by_name' in delivery_proof:
|
||||
metadata['received_by_name'] = delivery_proof['received_by_name']
|
||||
if 'delivery_notes' in delivery_proof:
|
||||
metadata['delivery_notes'] = delivery_proof['delivery_notes']
|
||||
|
||||
# Update shipment with delivery proof
|
||||
result = await distribution_service.update_shipment_status(
|
||||
shipment_id=shipment_id,
|
||||
new_status='delivered', # Automatically mark as delivered when proof uploaded
|
||||
user_id=user_id,
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Shipment not found")
|
||||
|
||||
return {
|
||||
"message": "Delivery proof uploaded successfully",
|
||||
"shipment_id": shipment_id,
|
||||
"status": "delivered"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to upload delivery proof: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/distribution/shipments/{shipment_id}")
|
||||
@router.get(route_builder.build_base_route("shipments/{shipment_id}"))
|
||||
async def get_shipment_detail(
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
distribution_service: object = Depends(get_distribution_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
verified_tenant: str = Depends(verify_tenant_access_dep)
|
||||
):
|
||||
"""
|
||||
Get detailed information about a specific shipment
|
||||
Get detailed information about a specific shipment including:
|
||||
- Basic shipment info (number, date, status)
|
||||
- Parent and child tenant details
|
||||
- Delivery route assignment
|
||||
- Purchase order reference
|
||||
- Delivery proof (signature, photo, received by)
|
||||
- Location tracking data
|
||||
"""
|
||||
try:
|
||||
# Implementation would fetch detailed shipment information
|
||||
# This is a placeholder until repositories are created
|
||||
raise HTTPException(status_code=501, detail="Shipment detail endpoint not yet implemented")
|
||||
# Access the shipment repository from the distribution service
|
||||
shipment = await distribution_service.shipment_repository.get_shipment_by_id(shipment_id)
|
||||
|
||||
if not shipment:
|
||||
raise HTTPException(status_code=404, detail="Shipment not found")
|
||||
|
||||
# Verify tenant access
|
||||
if str(shipment.get('tenant_id')) != tenant_id:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this shipment")
|
||||
|
||||
return shipment
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get shipment details: {str(e)}")
|
||||
Reference in New Issue
Block a user