New alert service

This commit is contained in:
Urtzi Alfaro
2025-12-05 20:07:01 +01:00
parent 1fe3a73549
commit 667e6e0404
393 changed files with 26002 additions and 61033 deletions

View File

@@ -219,288 +219,8 @@ class DistributionService:
# In a real implementation, this would publish to RabbitMQ
logger.info(f"Distribution plan created event published for parent {parent_tenant_id}")
async def setup_demo_enterprise_distribution(
self,
parent_tenant_id: str,
child_tenant_ids: List[str],
session_id: str
) -> Dict[str, Any]:
"""
Setup distribution routes and schedules for enterprise demo
"""
try:
logger.info(f"Setting up demo distribution for parent {parent_tenant_id} with {len(child_tenant_ids)} children")
# Get locations for all tenants
parent_locations_response = await self.tenant_client.get_tenant_locations(parent_tenant_id)
parent_locations = parent_locations_response.get("locations", []) if isinstance(parent_locations_response, dict) else parent_locations_response
# Look for central production or warehouse location as fallback
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'central_production'), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('location_type') == 'warehouse'), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('central')), None)
if not parent_location:
parent_location = next((loc for loc in parent_locations if loc.get('name', '').lower().startswith('main')), None)
# If no specific central location found, use first available location
if not parent_location and parent_locations:
parent_location = parent_locations[0]
logger.warning(f"No central production location found for parent tenant {parent_tenant_id}, using first location: {parent_location.get('name', 'unnamed')}")
if not parent_location:
raise ValueError(f"No location found for parent tenant {parent_tenant_id} to use as distribution center")
# Create delivery schedules for each child
for child_id in child_tenant_ids:
try:
child_locations_response = await self.tenant_client.get_tenant_locations(child_id)
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
# Look for retail outlet or store location as first choice
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
# If no specific retail location found, use first available location
if not child_location and child_locations:
child_location = child_locations[0]
logger.warning(f"No retail outlet location found for child tenant {child_id}, using first location: {child_location.get('name', 'unnamed')}")
if not child_location:
logger.warning(f"No location found for child tenant {child_id}")
continue
# Create delivery schedule
schedule_data = {
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': child_id,
'schedule_name': f"Demo Schedule: {child_location.get('name', f'Child {child_id}')}",
'delivery_days': "Mon,Wed,Fri", # Tri-weekly delivery
'delivery_time': "09:00", # Morning delivery
'auto_generate_orders': True,
'lead_time_days': 1,
'is_active': True
}
# Create the delivery schedule record
await self.create_delivery_schedule(schedule_data)
except Exception as e:
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
continue
# Create sample delivery route for today
today = date.today()
delivery_data = []
# Prepare delivery information for each child
for child_id in child_tenant_ids:
try:
child_locations_response = await self.tenant_client.get_tenant_locations(child_id)
child_locations = child_locations_response.get("locations", []) if isinstance(child_locations_response, dict) else child_locations_response
# Look for retail outlet or store location as first choice
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'retail_outlet'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'store'), None)
if not child_location:
child_location = next((loc for loc in child_locations if loc.get('location_type') == 'branch'), None)
# If no specific retail location found, use first available location
if not child_location and child_locations:
child_location = child_locations[0]
logger.warning(f"No retail outlet location found for child delivery {child_id}, using first location: {child_location.get('name', 'unnamed')}")
if child_location:
# Ensure we have valid coordinates
latitude = child_location.get('latitude')
longitude = child_location.get('longitude')
if latitude is not None and longitude is not None:
try:
lat = float(latitude)
lng = float(longitude)
delivery_data.append({
'id': f"demo_delivery_{child_id}",
'child_tenant_id': child_id,
'location': (lat, lng),
'weight_kg': 150.0, # Fixed weight for demo
'po_id': f"demo_po_{child_id}", # Would be actual PO ID in real implementation
'items_count': 20
})
except (ValueError, TypeError):
logger.warning(f"Invalid coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
else:
logger.warning(f"Missing coordinates for child {child_id}, skipping: lat={latitude}, lng={longitude}")
else:
logger.warning(f"No location found for child delivery {child_id}, skipping")
except Exception as e:
logger.error(f"Error processing child location for {child_id}: {e}", exc_info=True)
# Optimize routes using VRP - ensure we have valid coordinates
parent_latitude = parent_location.get('latitude')
parent_longitude = parent_location.get('longitude')
if parent_latitude is None or parent_longitude is None:
logger.error(f"Missing coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}")
raise ValueError(f"Parent location {parent_tenant_id} missing coordinates")
try:
depot_location = (float(parent_latitude), float(parent_longitude))
except (ValueError, TypeError) as e:
logger.error(f"Invalid coordinates for parent location {parent_tenant_id}: lat={parent_latitude}, lng={parent_longitude}, error: {e}")
raise ValueError(f"Parent location {parent_tenant_id} has invalid coordinates: {e}")
optimization_result = await self.routing_optimizer.optimize_daily_routes(
deliveries=delivery_data,
depot_location=depot_location,
vehicle_capacity_kg=1000.0 # Standard vehicle capacity
)
# Create the delivery route for today
# Use a random suffix to ensure unique route numbers
import secrets
unique_suffix = secrets.token_hex(4)[:8]
route = await self.route_repository.create_route({
'tenant_id': parent_tenant_id,
'route_number': f"DEMO-{today.strftime('%Y%m%d')}-{unique_suffix}",
'route_date': datetime.combine(today, datetime.min.time()),
'total_distance_km': optimization_result.get('total_distance_km', 0),
'estimated_duration_minutes': optimization_result.get('estimated_duration_minutes', 0),
'route_sequence': optimization_result.get('routes', [])[0].get('route_sequence', []) if optimization_result.get('routes') else [],
'status': 'planned'
})
# Create shipment records for each delivery
shipments = []
for idx, delivery in enumerate(delivery_data):
shipment = await self.shipment_repository.create_shipment({
'tenant_id': parent_tenant_id,
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': delivery['child_tenant_id'],
'shipment_number': f"DEMOSHP-{today.strftime('%Y%m%d')}-{idx+1:03d}",
'shipment_date': datetime.combine(today, datetime.min.time()),
'status': 'pending',
'total_weight_kg': delivery['weight_kg']
})
shipments.append(shipment)
# BUG-012 FIX: Clone historical data from template
# Define template tenant IDs (matching seed script)
TEMPLATE_PARENT_ID = "c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8"
TEMPLATE_CHILD_IDS = [
"d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9", # Madrid Centro
"e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0", # Barcelona Gràcia
"f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1" # Valencia Ruzafa
]
# Create mapping from template child IDs to new session child IDs
# Assumption: child_tenant_ids are passed in same order (Madrid, Barcelona, Valencia)
child_id_map = {}
for idx, template_child_id in enumerate(TEMPLATE_CHILD_IDS):
if idx < len(child_tenant_ids):
child_id_map[template_child_id] = child_tenant_ids[idx]
# Calculate date range for history (last 30 days)
# Use demo reference date if available in session metadata, otherwise today
# Note: session_id is passed, but we need to fetch metadata or infer date
# For now, we'll use BASE_REFERENCE_DATE as the anchor, similar to the seed script
end_date = BASE_REFERENCE_DATE
start_date = end_date - timedelta(days=30)
logger.info(f"Cloning historical distribution data from {start_date} to {end_date}")
# Fetch historical routes from template parent
historical_routes = await self.route_repository.get_routes_by_date_range(
tenant_id=TEMPLATE_PARENT_ID,
start_date=start_date,
end_date=end_date
)
# Fetch historical shipments from template parent
historical_shipments = await self.shipment_repository.get_shipments_by_date_range(
tenant_id=TEMPLATE_PARENT_ID,
start_date=start_date,
end_date=end_date
)
logger.info(f"Found {len(historical_routes)} routes and {len(historical_shipments)} shipments to clone")
# Clone routes
route_id_map = {} # Old route ID -> New route ID
cloned_routes_count = 0
for route_data in historical_routes:
old_route_id = route_data['id']
# Update route sequence with new child IDs
new_sequence = []
for stop in route_data.get('route_sequence', []):
new_stop = stop.copy()
if 'tenant_id' in new_stop and new_stop['tenant_id'] in child_id_map:
new_stop['tenant_id'] = child_id_map[new_stop['tenant_id']]
new_sequence.append(new_stop)
# Create new route
new_route = await self.route_repository.create_route({
'tenant_id': parent_tenant_id,
'route_number': route_data['route_number'], # Keep same number for consistency
'route_date': route_data['route_date'],
'vehicle_id': route_data['vehicle_id'],
'driver_id': str(uuid.uuid4()), # New driver
'total_distance_km': route_data['total_distance_km'],
'estimated_duration_minutes': route_data['estimated_duration_minutes'],
'route_sequence': new_sequence,
'status': route_data['status']
})
route_id_map[old_route_id] = str(new_route['id'])
cloned_routes_count += 1
# Clone shipments
cloned_shipments_count = 0
for shipment_data in historical_shipments:
# Skip if child tenant not in our map (e.g. if we have fewer children than template)
if shipment_data['child_tenant_id'] not in child_id_map:
continue
# Map route ID
new_route_id = None
if shipment_data['delivery_route_id'] in route_id_map:
new_route_id = route_id_map[shipment_data['delivery_route_id']]
# Create new shipment
await self.shipment_repository.create_shipment({
'tenant_id': parent_tenant_id,
'parent_tenant_id': parent_tenant_id,
'child_tenant_id': child_id_map[shipment_data['child_tenant_id']],
'shipment_number': shipment_data['shipment_number'],
'shipment_date': shipment_data['shipment_date'],
'status': shipment_data['status'],
'total_weight_kg': shipment_data['total_weight_kg'],
'total_volume_m3': shipment_data['total_volume_m3'],
'delivery_route_id': new_route_id
})
cloned_shipments_count += 1
logger.info(f"Demo distribution setup completed: {cloned_routes_count} routes, {cloned_shipments_count} shipments cloned")
return {
"status": "completed",
"route_id": None, # No single route ID to return
"shipment_count": cloned_shipments_count,
"routes_count": cloned_routes_count,
"total_distance_km": 0, # Not calculating total for history
"session_id": session_id
}
except Exception as e:
logger.error(f"Error setting up demo distribution: {e}", exc_info=True)
raise
# Legacy setup_demo_enterprise_distribution method removed
# Distribution now uses standard cloning pattern via /internal/demo/clone endpoint
async def get_delivery_routes_for_date(self, tenant_id: str, target_date: date) -> List[Dict[str, Any]]:
"""