Improve the dahboard with the weather info 2

This commit is contained in:
Urtzi Alfaro
2025-08-18 13:53:27 +02:00
parent afca94dadd
commit 0a42fe628c
8 changed files with 164 additions and 61 deletions

View File

@@ -515,6 +515,10 @@ class AEMETClient(BaseAPIClient):
base_url="https://opendata.aemet.es/opendata/api",
api_key=settings.AEMET_API_KEY
)
# Override timeout with settings value
import httpx
self.timeout = httpx.Timeout(float(settings.AEMET_TIMEOUT))
self.retries = settings.AEMET_RETRY_ATTEMPTS
self.parser = WeatherDataParser()
self.synthetic_generator = SyntheticWeatherGenerator()
self.location_service = LocationService()
@@ -541,15 +545,18 @@ class AEMETClient(BaseAPIClient):
weather_data = await self._fetch_current_weather_data(station_id)
if weather_data:
logger.info(" Successfully fetched AEMET weather data", station_id=station_id)
logger.info("🎉 SUCCESS: Real AEMET weather data retrieved!", station_id=station_id)
parsed_data = self.parser.parse_current_weather(weather_data)
# Ensure the source is set to AEMET for successful API calls
if parsed_data and isinstance(parsed_data, dict):
parsed_data["source"] = WeatherSource.AEMET.value
logger.info("📡 AEMET data confirmed - source set to 'aemet'",
temperature=parsed_data.get("temperature"),
description=parsed_data.get("description"))
return parsed_data
logger.warning(" AEMET API returned no data - falling back to synthetic",
station_id=station_id, reason="invalid_weather_data")
logger.warning("⚠️ AEMET API connectivity issues - using synthetic data",
station_id=station_id, reason="aemet_api_unreachable")
return await self._get_synthetic_current_weather()
except Exception as e:

View File

@@ -205,18 +205,30 @@ class MadridTrafficClient(BaseTrafficClient, BaseAPIClient):
traffic_record, location_context
)
# Build enhanced response
# Calculate average speed based on congestion level
if congestion_level == 'high':
average_speed = 15.0
elif congestion_level == 'medium':
average_speed = 35.0
elif congestion_level == 'low':
average_speed = 50.0
else:
average_speed = 30.0 # default
# Build enhanced response with required API fields
enhanced_data = {
'date': datetime.now(timezone.utc), # Required API field
'timestamp': datetime.now(timezone.utc),
'latitude': traffic_point.get('latitude'),
'longitude': traffic_point.get('longitude'),
'measurement_point_id': traffic_point.get('measurement_point_id'),
'measurement_point_name': traffic_point.get('measurement_point_name'),
'traffic_volume': traffic_point.get('intensidad', 0),
'pedestrian_count': pedestrian_count,
'congestion_level': congestion_level,
'average_speed': average_speed, # Required API field
'occupation_percentage': int(traffic_point.get('ocupacion', 0)),
'load_percentage': traffic_point.get('carga', 0),
'congestion_level': congestion_level,
'pedestrian_count': pedestrian_count,
'road_type': road_type,
'distance_km': distance_km,
'source': 'madrid_current_xml',

View File

@@ -15,44 +15,61 @@ class BaseAPIClient:
def __init__(self, base_url: str, api_key: Optional[str] = None):
self.base_url = base_url
self.api_key = api_key
self.timeout = httpx.Timeout(30.0)
# Increase timeout and add connection retries for unstable AEMET API
self.timeout = httpx.Timeout(60.0) # Increased from 30s
self.retries = 3
async def _get(self, endpoint: str, params: Optional[Dict] = None, headers: Optional[Dict] = None) -> Optional[Dict[str, Any]]:
"""Make GET request"""
try:
url = f"{self.base_url}{endpoint}"
# Add API key to params for AEMET (not headers)
request_params = params or {}
if self.api_key:
request_params["api_key"] = self.api_key
# Add headers if provided
request_headers = headers or {}
logger.debug("Making API request", url=url, params=request_params)
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(url, params=request_params, headers=request_headers)
response.raise_for_status()
# Log response for debugging
response_data = response.json()
logger.debug("API response received",
status_code=response.status_code,
response_keys=list(response_data.keys()) if isinstance(response_data, dict) else "non-dict")
return response_data
except httpx.HTTPStatusError as e:
logger.error("HTTP error", status_code=e.response.status_code, url=url, response_text=e.response.text[:200])
return None
except httpx.RequestError as e:
logger.error("Request error", error=str(e), url=url)
return None
except Exception as e:
logger.error("Unexpected error", error=str(e), url=url)
return None
"""Make GET request with retry logic for unstable APIs"""
url = f"{self.base_url}{endpoint}"
# Add API key to params for AEMET (not headers)
request_params = params or {}
if self.api_key:
request_params["api_key"] = self.api_key
# Add headers if provided
request_headers = headers or {}
logger.debug("Making API request", url=url, params=request_params)
# Retry logic for unstable AEMET API
for attempt in range(self.retries):
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(url, params=request_params, headers=request_headers)
response.raise_for_status()
# Log response for debugging
response_data = response.json()
logger.debug("API response received",
status_code=response.status_code,
response_keys=list(response_data.keys()) if isinstance(response_data, dict) else "non-dict",
attempt=attempt + 1)
return response_data
except httpx.HTTPStatusError as e:
logger.error("HTTP error", status_code=e.response.status_code, url=url,
response_text=e.response.text[:200], attempt=attempt + 1)
if attempt == self.retries - 1: # Last attempt
return None
except httpx.RequestError as e:
logger.error("Request error", error=str(e), url=url, attempt=attempt + 1)
if attempt == self.retries - 1: # Last attempt
return None
# Wait before retry (exponential backoff)
import asyncio
wait_time = 2 ** attempt # 1s, 2s, 4s
logger.info(f"Retrying AEMET API in {wait_time}s", attempt=attempt + 1, max_attempts=self.retries)
await asyncio.sleep(wait_time)
except Exception as e:
logger.error("Unexpected error", error=str(e), url=url, attempt=attempt + 1)
if attempt == self.retries - 1: # Last attempt
return None
return None
async def _fetch_url_directly(self, url: str, headers: Optional[Dict] = None) -> Optional[Dict[str, Any]]:
"""Fetch data directly from a full URL (for AEMET datos URLs)"""

View File

@@ -25,9 +25,8 @@ class MadridTrafficDataProcessor:
def __init__(self):
self.logger = structlog.get_logger()
# UTM Zone 30N (Madrid's coordinate system)
self.utm_proj = pyproj.Proj(proj='utm', zone=30, ellps='WGS84', datum='WGS84')
self.wgs84_proj = pyproj.Proj(proj='latlong', ellps='WGS84', datum='WGS84')
# UTM Zone 30N (Madrid's coordinate system) - using modern pyproj API
self.transformer = pyproj.Transformer.from_crs("EPSG:25830", "EPSG:4326", always_xy=True)
def safe_int(self, value: str) -> int:
"""Safely convert string to int"""
@@ -68,8 +67,8 @@ class MadridTrafficDataProcessor:
utm_x_float = float(utm_x.replace(',', '.'))
utm_y_float = float(utm_y.replace(',', '.'))
# Convert from UTM Zone 30N to WGS84
longitude, latitude = pyproj.transform(self.utm_proj, self.wgs84_proj, utm_x_float, utm_y_float)
# Convert from UTM Zone 30N to WGS84 using modern pyproj API
longitude, latitude = self.transformer.transform(utm_x_float, utm_y_float)
# Validate coordinates are in Madrid area
if 40.3 <= latitude <= 40.6 and -3.8 <= longitude <= -3.5:
@@ -455,9 +454,25 @@ class MadridTrafficDataProcessor:
carga = self.safe_int(row.get('carga', '0'))
vmed = self.safe_int(row.get('vmed', '0'))
# Build basic result (business logic will be applied elsewhere)
# Calculate average speed (vmed is in km/h, use it if available)
average_speed = float(vmed) if vmed > 0 else 30.0 # Default speed
# Determine congestion level based on occupation percentage
if ocupacion > 75:
congestion_level = 'high'
elif ocupacion > 40:
congestion_level = 'medium'
else:
congestion_level = 'low'
# Build result with API-compatible fields
result = {
'date': date_obj,
'date': date_obj, # Required API field
'traffic_volume': intensidad, # Required API field
'pedestrian_count': max(1, int(intensidad * 0.1)), # Estimated pedestrian count
'congestion_level': congestion_level, # Required API field
'average_speed': average_speed, # Required API field
'source': 'madrid_historical_csv', # Required API field
'measurement_point_id': measurement_point_id,
'point_data': point_data,
'distance_km': distance_km,