187 lines
6.0 KiB
Python
187 lines
6.0 KiB
Python
# services/external/app/cache/redis_wrapper.py
|
|
"""
|
|
Redis cache layer for fast training data access using shared Redis implementation
|
|
"""
|
|
|
|
from typing import List, Dict, Any, Optional
|
|
import json
|
|
from datetime import datetime, timedelta
|
|
import structlog
|
|
from shared.redis_utils import get_redis_client
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
class ExternalDataCache:
|
|
"""Redis cache for external data service"""
|
|
|
|
def __init__(self):
|
|
self.ttl = 86400 * 7 # 7 days
|
|
|
|
async def _get_client(self):
|
|
"""Get the shared Redis client"""
|
|
return await get_redis_client()
|
|
|
|
def _weather_cache_key(
|
|
self,
|
|
city_id: str,
|
|
start_date: datetime,
|
|
end_date: datetime
|
|
) -> str:
|
|
"""Generate cache key for weather data"""
|
|
return f"weather:{city_id}:{start_date.date()}:{end_date.date()}"
|
|
|
|
async def get_cached_weather(
|
|
self,
|
|
city_id: str,
|
|
start_date: datetime,
|
|
end_date: datetime
|
|
) -> Optional[List[Dict[str, Any]]]:
|
|
"""Get cached weather data"""
|
|
try:
|
|
key = self._weather_cache_key(city_id, start_date, end_date)
|
|
client = await self._get_client()
|
|
cached = await client.get(key)
|
|
|
|
if cached:
|
|
logger.debug("Weather cache hit", city_id=city_id, key=key)
|
|
return json.loads(cached)
|
|
|
|
logger.debug("Weather cache miss", city_id=city_id, key=key)
|
|
return None
|
|
|
|
except Exception as e:
|
|
logger.error("Error reading weather cache", error=str(e))
|
|
return None
|
|
|
|
async def set_cached_weather(
|
|
self,
|
|
city_id: str,
|
|
start_date: datetime,
|
|
end_date: datetime,
|
|
data: List[Dict[str, Any]]
|
|
):
|
|
"""Set cached weather data"""
|
|
try:
|
|
key = self._weather_cache_key(city_id, start_date, end_date)
|
|
|
|
serializable_data = []
|
|
for record in data:
|
|
# Handle both dict and Pydantic model objects
|
|
if hasattr(record, 'model_dump'):
|
|
record_dict = record.model_dump()
|
|
elif hasattr(record, 'dict'):
|
|
record_dict = record.dict()
|
|
else:
|
|
record_dict = record.copy() if isinstance(record, dict) else dict(record)
|
|
|
|
# Convert any datetime fields to ISO format strings
|
|
for key_name, value in record_dict.items():
|
|
if isinstance(value, datetime):
|
|
record_dict[key_name] = value.isoformat()
|
|
|
|
serializable_data.append(record_dict)
|
|
|
|
client = await self._get_client()
|
|
await client.setex(
|
|
key,
|
|
self.ttl,
|
|
json.dumps(serializable_data)
|
|
)
|
|
|
|
logger.debug("Weather data cached", city_id=city_id, records=len(data))
|
|
|
|
except Exception as e:
|
|
logger.error("Error caching weather data", error=str(e))
|
|
|
|
def _traffic_cache_key(
|
|
self,
|
|
city_id: str,
|
|
start_date: datetime,
|
|
end_date: datetime
|
|
) -> str:
|
|
"""Generate cache key for traffic data"""
|
|
return f"traffic:{city_id}:{start_date.date()}:{end_date.date()}"
|
|
|
|
async def get_cached_traffic(
|
|
self,
|
|
city_id: str,
|
|
start_date: datetime,
|
|
end_date: datetime
|
|
) -> Optional[List[Dict[str, Any]]]:
|
|
"""Get cached traffic data"""
|
|
try:
|
|
key = self._traffic_cache_key(city_id, start_date, end_date)
|
|
client = await self._get_client()
|
|
cached = await client.get(key)
|
|
|
|
if cached:
|
|
logger.debug("Traffic cache hit", city_id=city_id, key=key)
|
|
return json.loads(cached)
|
|
|
|
logger.debug("Traffic cache miss", city_id=city_id, key=key)
|
|
return None
|
|
|
|
except Exception as e:
|
|
logger.error("Error reading traffic cache", error=str(e))
|
|
return None
|
|
|
|
async def set_cached_traffic(
|
|
self,
|
|
city_id: str,
|
|
start_date: datetime,
|
|
end_date: datetime,
|
|
data: List[Dict[str, Any]]
|
|
):
|
|
"""Set cached traffic data"""
|
|
try:
|
|
key = self._traffic_cache_key(city_id, start_date, end_date)
|
|
|
|
serializable_data = []
|
|
for record in data:
|
|
# Handle both dict and Pydantic model objects
|
|
if hasattr(record, 'model_dump'):
|
|
record_dict = record.model_dump()
|
|
elif hasattr(record, 'dict'):
|
|
record_dict = record.dict()
|
|
else:
|
|
record_dict = record.copy() if isinstance(record, dict) else dict(record)
|
|
|
|
# Convert any datetime fields to ISO format strings
|
|
for key_name, value in record_dict.items():
|
|
if isinstance(value, datetime):
|
|
record_dict[key_name] = value.isoformat()
|
|
|
|
serializable_data.append(record_dict)
|
|
|
|
client = await self._get_client()
|
|
await client.setex(
|
|
key,
|
|
self.ttl,
|
|
json.dumps(serializable_data)
|
|
)
|
|
|
|
logger.debug("Traffic data cached", city_id=city_id, records=len(data))
|
|
|
|
except Exception as e:
|
|
logger.error("Error caching traffic data", error=str(e))
|
|
|
|
async def invalidate_city_cache(self, city_id: str):
|
|
"""Invalidate all cache entries for a city"""
|
|
try:
|
|
client = await self._get_client()
|
|
pattern = f"*:{city_id}:*"
|
|
|
|
# Use scan_iter for safer key pattern matching
|
|
keys_to_delete = []
|
|
async for key in client.scan_iter(match=pattern):
|
|
keys_to_delete.append(key)
|
|
|
|
if keys_to_delete:
|
|
await client.delete(*keys_to_delete)
|
|
|
|
logger.info("City cache invalidated", city_id=city_id, keys_deleted=len(keys_to_delete))
|
|
|
|
except Exception as e:
|
|
logger.error("Error invalidating cache", error=str(e))
|