REFACTOR external service and improve websocket training
This commit is contained in:
140
services/external/app/external/base_client.py
vendored
140
services/external/app/external/base_client.py
vendored
@@ -50,8 +50,20 @@ class BaseAPIClient:
|
||||
return response_data
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error("HTTP error", status_code=e.response.status_code, url=url,
|
||||
logger.error("HTTP error", status_code=e.response.status_code, url=url,
|
||||
response_text=e.response.text[:200], attempt=attempt + 1)
|
||||
|
||||
# Handle rate limiting (429) with longer backoff
|
||||
if e.response.status_code == 429:
|
||||
import asyncio
|
||||
# Exponential backoff: 5s, 15s, 45s for rate limits
|
||||
wait_time = 5 * (3 ** attempt)
|
||||
logger.warning(f"Rate limit hit, waiting {wait_time}s before retry",
|
||||
attempt=attempt + 1, max_attempts=self.retries)
|
||||
await asyncio.sleep(wait_time)
|
||||
if attempt < self.retries - 1:
|
||||
continue
|
||||
|
||||
if attempt == self.retries - 1: # Last attempt
|
||||
return None
|
||||
except httpx.RequestError as e:
|
||||
@@ -72,51 +84,87 @@ class BaseAPIClient:
|
||||
return None
|
||||
|
||||
async def _fetch_url_directly(self, url: str, headers: Optional[Dict] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Fetch data directly from a full URL (for AEMET datos URLs)"""
|
||||
try:
|
||||
request_headers = headers or {}
|
||||
|
||||
logger.debug("Making direct URL request", url=url)
|
||||
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.get(url, headers=request_headers)
|
||||
response.raise_for_status()
|
||||
|
||||
# Handle encoding issues common with Spanish data sources
|
||||
try:
|
||||
response_data = response.json()
|
||||
except UnicodeDecodeError:
|
||||
logger.warning("UTF-8 decode failed, trying alternative encodings", url=url)
|
||||
# Try common Spanish encodings
|
||||
for encoding in ['latin-1', 'windows-1252', 'iso-8859-1']:
|
||||
try:
|
||||
text_content = response.content.decode(encoding)
|
||||
import json
|
||||
response_data = json.loads(text_content)
|
||||
logger.info("Successfully decoded with encoding", encoding=encoding)
|
||||
break
|
||||
except (UnicodeDecodeError, json.JSONDecodeError):
|
||||
continue
|
||||
else:
|
||||
logger.error("Failed to decode response with any encoding", url=url)
|
||||
return None
|
||||
|
||||
logger.debug("Direct URL response received",
|
||||
status_code=response.status_code,
|
||||
data_type=type(response_data),
|
||||
data_length=len(response_data) if isinstance(response_data, (list, dict)) else "unknown")
|
||||
|
||||
return response_data
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error("HTTP error in direct fetch", status_code=e.response.status_code, url=url)
|
||||
return None
|
||||
except httpx.RequestError as e:
|
||||
logger.error("Request error in direct fetch", error=str(e), url=url)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error("Unexpected error in direct fetch", error=str(e), url=url)
|
||||
return None
|
||||
"""Fetch data directly from a full URL (for AEMET datos URLs) with retry logic"""
|
||||
request_headers = headers or {}
|
||||
|
||||
logger.debug("Making direct URL request", url=url)
|
||||
|
||||
# Retry logic for unstable AEMET datos URLs
|
||||
for attempt in range(self.retries):
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.get(url, headers=request_headers)
|
||||
response.raise_for_status()
|
||||
|
||||
# Handle encoding issues common with Spanish data sources
|
||||
try:
|
||||
response_data = response.json()
|
||||
except UnicodeDecodeError:
|
||||
logger.warning("UTF-8 decode failed, trying alternative encodings", url=url)
|
||||
# Try common Spanish encodings
|
||||
for encoding in ['latin-1', 'windows-1252', 'iso-8859-1']:
|
||||
try:
|
||||
text_content = response.content.decode(encoding)
|
||||
import json
|
||||
response_data = json.loads(text_content)
|
||||
logger.info("Successfully decoded with encoding", encoding=encoding)
|
||||
break
|
||||
except (UnicodeDecodeError, json.JSONDecodeError):
|
||||
continue
|
||||
else:
|
||||
logger.error("Failed to decode response with any encoding", url=url)
|
||||
if attempt < self.retries - 1:
|
||||
continue
|
||||
return None
|
||||
|
||||
logger.debug("Direct URL response received",
|
||||
status_code=response.status_code,
|
||||
data_type=type(response_data),
|
||||
data_length=len(response_data) if isinstance(response_data, (list, dict)) else "unknown")
|
||||
|
||||
return response_data
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error("HTTP error in direct fetch",
|
||||
status_code=e.response.status_code,
|
||||
url=url,
|
||||
attempt=attempt + 1)
|
||||
|
||||
# On last attempt, return None
|
||||
if attempt == self.retries - 1:
|
||||
return None
|
||||
|
||||
# Wait before retry
|
||||
import asyncio
|
||||
wait_time = 2 ** attempt # 1s, 2s, 4s
|
||||
logger.info(f"Retrying datos URL in {wait_time}s",
|
||||
attempt=attempt + 1, max_attempts=self.retries)
|
||||
await asyncio.sleep(wait_time)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error("Request error in direct fetch",
|
||||
error=str(e), url=url, attempt=attempt + 1)
|
||||
|
||||
# On last attempt, return None
|
||||
if attempt == self.retries - 1:
|
||||
return None
|
||||
|
||||
# Wait before retry
|
||||
import asyncio
|
||||
wait_time = 2 ** attempt # 1s, 2s, 4s
|
||||
logger.info(f"Retrying datos URL in {wait_time}s",
|
||||
attempt=attempt + 1, max_attempts=self.retries)
|
||||
await asyncio.sleep(wait_time)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Unexpected error in direct fetch",
|
||||
error=str(e), url=url, attempt=attempt + 1)
|
||||
|
||||
# On last attempt, return None
|
||||
if attempt == self.retries - 1:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
async def _post(self, endpoint: str, data: Optional[Dict] = None, headers: Optional[Dict] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make POST request"""
|
||||
|
||||
Reference in New Issue
Block a user