297 lines
12 KiB
Python
297 lines
12 KiB
Python
# services/training/app/services/data_client.py
|
|
"""
|
|
Training Service Data Client
|
|
Migrated to use shared service clients - much simpler now!
|
|
"""
|
|
|
|
import structlog
|
|
from typing import Dict, Any, List, Optional
|
|
from datetime import datetime
|
|
|
|
# Import the shared clients
|
|
from shared.clients import get_sales_client, get_external_client, get_service_clients
|
|
from app.core.config import settings
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
class DataClient:
|
|
"""
|
|
Data client for training service
|
|
Now uses the shared data service client under the hood
|
|
"""
|
|
|
|
def __init__(self):
|
|
# Get the new specialized clients
|
|
self.sales_client = get_sales_client(settings, "training")
|
|
self.external_client = get_external_client(settings, "training")
|
|
|
|
# Check if the new method is available for stored traffic data
|
|
if hasattr(self.external_client, 'get_stored_traffic_data_for_training'):
|
|
self.supports_stored_traffic_data = True
|
|
else:
|
|
self.supports_stored_traffic_data = False
|
|
logger.warning("Stored traffic data method not available in external client")
|
|
|
|
# Or alternatively, get all clients at once:
|
|
# self.clients = get_service_clients(settings, "training")
|
|
# Then use: self.clients.sales.get_sales_data(...) and self.clients.external.get_weather_forecast(...)
|
|
|
|
async def fetch_sales_data(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: Optional[str] = None,
|
|
end_date: Optional[str] = None,
|
|
product_id: Optional[str] = None,
|
|
fetch_all: bool = True
|
|
) -> List[Dict[str, Any]]:
|
|
"""
|
|
Fetch sales data for training
|
|
|
|
Args:
|
|
tenant_id: Tenant identifier
|
|
start_date: Start date in ISO format
|
|
end_date: End date in ISO format
|
|
product_id: Optional product filter
|
|
fetch_all: If True, fetches ALL records using pagination (original behavior)
|
|
If False, fetches limited records (standard API response)
|
|
"""
|
|
try:
|
|
if fetch_all:
|
|
# Use paginated method to get ALL records (original behavior)
|
|
sales_data = await self.sales_client.get_all_sales_data(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
product_id=product_id,
|
|
aggregation="daily",
|
|
page_size=1000, # Comply with API limit
|
|
max_pages=100 # Safety limit (500k records max)
|
|
)
|
|
else:
|
|
# Use standard method for limited results
|
|
sales_data = await self.sales_client.get_sales_data(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
product_id=product_id,
|
|
aggregation="daily"
|
|
)
|
|
sales_data = sales_data or []
|
|
|
|
if sales_data:
|
|
logger.info(f"Fetched {len(sales_data)} sales records",
|
|
tenant_id=tenant_id, product_id=product_id, fetch_all=fetch_all)
|
|
return sales_data
|
|
else:
|
|
logger.warning("No sales data returned", tenant_id=tenant_id)
|
|
return []
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching sales data: {e}", tenant_id=tenant_id)
|
|
return []
|
|
|
|
async def fetch_weather_data(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: str,
|
|
end_date: str,
|
|
latitude: Optional[float] = None,
|
|
longitude: Optional[float] = None
|
|
) -> List[Dict[str, Any]]:
|
|
"""
|
|
Fetch weather data for training
|
|
All the error handling and retry logic is now in the base client!
|
|
"""
|
|
try:
|
|
weather_data = await self.external_client.get_weather_historical(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
latitude=latitude,
|
|
longitude=longitude
|
|
)
|
|
|
|
if weather_data:
|
|
logger.info(f"Fetched {len(weather_data)} weather records",
|
|
tenant_id=tenant_id)
|
|
return weather_data
|
|
else:
|
|
logger.warning("No weather data returned", tenant_id=tenant_id)
|
|
return []
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching weather data: {e}", tenant_id=tenant_id)
|
|
return []
|
|
|
|
async def fetch_traffic_data_unified(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: str,
|
|
end_date: str,
|
|
latitude: Optional[float] = None,
|
|
longitude: Optional[float] = None,
|
|
force_refresh: bool = False
|
|
) -> List[Dict[str, Any]]:
|
|
"""
|
|
Unified traffic data fetching with intelligent cache-first strategy
|
|
|
|
Strategy:
|
|
1. Check if stored/cached traffic data exists for the date range
|
|
2. If exists and not force_refresh, return cached data
|
|
3. If not exists or force_refresh, fetch fresh data
|
|
4. Always return data without duplicate fetching
|
|
|
|
Args:
|
|
tenant_id: Tenant identifier
|
|
start_date: Start date string (ISO format)
|
|
end_date: End date string (ISO format)
|
|
latitude: Optional latitude for location-based data
|
|
longitude: Optional longitude for location-based data
|
|
force_refresh: If True, bypass cache and fetch fresh data
|
|
"""
|
|
cache_key = f"{tenant_id}_{start_date}_{end_date}_{latitude}_{longitude}"
|
|
|
|
try:
|
|
# Step 1: Try to get stored/cached data first (unless force_refresh)
|
|
if not force_refresh and self.supports_stored_traffic_data:
|
|
logger.info("Attempting to fetch cached traffic data",
|
|
tenant_id=tenant_id, cache_key=cache_key)
|
|
|
|
try:
|
|
cached_data = await self.external_client.get_stored_traffic_data_for_training(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
latitude=latitude,
|
|
longitude=longitude
|
|
)
|
|
|
|
if cached_data and len(cached_data) > 0:
|
|
logger.info(f"✅ Using cached traffic data: {len(cached_data)} records",
|
|
tenant_id=tenant_id)
|
|
return cached_data
|
|
else:
|
|
logger.info("No cached traffic data found, fetching fresh data",
|
|
tenant_id=tenant_id)
|
|
except Exception as cache_error:
|
|
logger.warning(f"Cache fetch failed, falling back to fresh data: {cache_error}",
|
|
tenant_id=tenant_id)
|
|
|
|
# Step 2: Fetch fresh data if no cache or force_refresh
|
|
logger.info("Fetching fresh traffic data" + (" (force refresh)" if force_refresh else ""),
|
|
tenant_id=tenant_id)
|
|
|
|
fresh_data = await self.external_client.get_traffic_data(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
latitude=latitude,
|
|
longitude=longitude
|
|
)
|
|
|
|
if fresh_data and len(fresh_data) > 0:
|
|
logger.info(f"✅ Fetched fresh traffic data: {len(fresh_data)} records",
|
|
tenant_id=tenant_id)
|
|
return fresh_data
|
|
else:
|
|
logger.warning("No fresh traffic data available", tenant_id=tenant_id)
|
|
return []
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in unified traffic data fetch: {e}",
|
|
tenant_id=tenant_id, cache_key=cache_key)
|
|
return []
|
|
|
|
# Legacy methods for backward compatibility - now delegate to unified method
|
|
async def fetch_traffic_data(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: str,
|
|
end_date: str,
|
|
latitude: Optional[float] = None,
|
|
longitude: Optional[float] = None
|
|
) -> List[Dict[str, Any]]:
|
|
"""Legacy method - delegates to unified fetcher with cache-first strategy"""
|
|
logger.info("Legacy fetch_traffic_data called - delegating to unified method", tenant_id=tenant_id)
|
|
return await self.fetch_traffic_data_unified(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
latitude=latitude,
|
|
longitude=longitude,
|
|
force_refresh=False # Use cache-first for legacy calls
|
|
)
|
|
|
|
async def fetch_stored_traffic_data_for_training(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: str,
|
|
end_date: str,
|
|
latitude: Optional[float] = None,
|
|
longitude: Optional[float] = None
|
|
) -> List[Dict[str, Any]]:
|
|
"""Legacy method - delegates to unified fetcher with cache-first strategy"""
|
|
logger.info("Legacy fetch_stored_traffic_data_for_training called - delegating to unified method", tenant_id=tenant_id)
|
|
return await self.fetch_traffic_data_unified(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
latitude=latitude,
|
|
longitude=longitude,
|
|
force_refresh=False # Use cache-first for training calls
|
|
)
|
|
|
|
async def refresh_traffic_data(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: str,
|
|
end_date: str,
|
|
latitude: Optional[float] = None,
|
|
longitude: Optional[float] = None
|
|
) -> List[Dict[str, Any]]:
|
|
"""Convenience method to force refresh traffic data"""
|
|
logger.info("Force refreshing traffic data (bypassing cache)", tenant_id=tenant_id)
|
|
return await self.fetch_traffic_data_unified(
|
|
tenant_id=tenant_id,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
latitude=latitude,
|
|
longitude=longitude,
|
|
force_refresh=True # Force fresh data
|
|
)
|
|
|
|
async def validate_data_quality(
|
|
self,
|
|
tenant_id: str,
|
|
start_date: str,
|
|
end_date: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Validate data quality before training
|
|
"""
|
|
try:
|
|
# Note: validation_data_quality may need to be implemented in one of the new services
|
|
# validation_result = await self.sales_client.validate_data_quality(
|
|
# tenant_id=tenant_id,
|
|
# start_date=start_date,
|
|
# end_date=end_date
|
|
# )
|
|
|
|
# Temporary implementation - assume data is valid for now
|
|
validation_result = {"is_valid": True, "message": "Validation temporarily disabled"}
|
|
|
|
if validation_result:
|
|
logger.info("Data validation completed",
|
|
tenant_id=tenant_id,
|
|
is_valid=validation_result.get("is_valid", False))
|
|
return validation_result
|
|
else:
|
|
logger.warning("Data validation failed", tenant_id=tenant_id)
|
|
return {"is_valid": False, "errors": ["Validation service unavailable"]}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error validating data: {e}", tenant_id=tenant_id)
|
|
return {"is_valid": False, "errors": [str(e)]}
|
|
|
|
# Global instance - same as before, but much simpler implementation
|
|
data_client = DataClient() |