Files
bakery-ia/services/external/app/cache/redis_wrapper.py

299 lines
9.5 KiB
Python
Raw Normal View History

# services/external/app/cache/redis_wrapper.py
"""
Redis cache layer for fast training data access using shared Redis implementation
"""
from typing import List, Dict, Any, Optional
import json
from datetime import datetime, timedelta
import structlog
from shared.redis_utils import get_redis_client
logger = structlog.get_logger()
class ExternalDataCache:
"""Redis cache for external data service"""
def __init__(self):
self.ttl = 86400 * 7 # 7 days
async def _get_client(self):
"""Get the shared Redis client"""
return await get_redis_client()
def _weather_cache_key(
self,
city_id: str,
start_date: datetime,
end_date: datetime
) -> str:
"""Generate cache key for weather data"""
return f"weather:{city_id}:{start_date.date()}:{end_date.date()}"
async def get_cached_weather(
self,
city_id: str,
start_date: datetime,
end_date: datetime
) -> Optional[List[Dict[str, Any]]]:
"""Get cached weather data"""
try:
key = self._weather_cache_key(city_id, start_date, end_date)
client = await self._get_client()
cached = await client.get(key)
if cached:
logger.debug("Weather cache hit", city_id=city_id, key=key)
return json.loads(cached)
logger.debug("Weather cache miss", city_id=city_id, key=key)
return None
except Exception as e:
logger.error("Error reading weather cache", error=str(e))
return None
async def set_cached_weather(
self,
city_id: str,
start_date: datetime,
end_date: datetime,
data: List[Dict[str, Any]]
):
"""Set cached weather data"""
try:
key = self._weather_cache_key(city_id, start_date, end_date)
serializable_data = []
for record in data:
# Handle both dict and Pydantic model objects
if hasattr(record, 'model_dump'):
record_dict = record.model_dump()
elif hasattr(record, 'dict'):
record_dict = record.dict()
else:
record_dict = record.copy() if isinstance(record, dict) else dict(record)
# Convert any datetime fields to ISO format strings
for key_name, value in record_dict.items():
if isinstance(value, datetime):
record_dict[key_name] = value.isoformat()
serializable_data.append(record_dict)
client = await self._get_client()
await client.setex(
key,
self.ttl,
json.dumps(serializable_data)
)
logger.debug("Weather data cached", city_id=city_id, records=len(data))
except Exception as e:
logger.error("Error caching weather data", error=str(e))
def _traffic_cache_key(
self,
city_id: str,
start_date: datetime,
end_date: datetime
) -> str:
"""Generate cache key for traffic data"""
return f"traffic:{city_id}:{start_date.date()}:{end_date.date()}"
async def get_cached_traffic(
self,
city_id: str,
start_date: datetime,
end_date: datetime
) -> Optional[List[Dict[str, Any]]]:
"""Get cached traffic data"""
try:
key = self._traffic_cache_key(city_id, start_date, end_date)
client = await self._get_client()
cached = await client.get(key)
if cached:
logger.debug("Traffic cache hit", city_id=city_id, key=key)
return json.loads(cached)
logger.debug("Traffic cache miss", city_id=city_id, key=key)
return None
except Exception as e:
logger.error("Error reading traffic cache", error=str(e))
return None
async def set_cached_traffic(
self,
city_id: str,
start_date: datetime,
end_date: datetime,
data: List[Dict[str, Any]]
):
"""Set cached traffic data"""
try:
key = self._traffic_cache_key(city_id, start_date, end_date)
serializable_data = []
for record in data:
# Handle both dict and Pydantic model objects
if hasattr(record, 'model_dump'):
record_dict = record.model_dump()
elif hasattr(record, 'dict'):
record_dict = record.dict()
else:
record_dict = record.copy() if isinstance(record, dict) else dict(record)
# Convert any datetime fields to ISO format strings
for key_name, value in record_dict.items():
if isinstance(value, datetime):
record_dict[key_name] = value.isoformat()
serializable_data.append(record_dict)
client = await self._get_client()
await client.setex(
key,
self.ttl,
json.dumps(serializable_data)
)
logger.debug("Traffic data cached", city_id=city_id, records=len(data))
except Exception as e:
logger.error("Error caching traffic data", error=str(e))
async def invalidate_city_cache(self, city_id: str):
"""Invalidate all cache entries for a city"""
try:
client = await self._get_client()
pattern = f"*:{city_id}:*"
# Use scan_iter for safer key pattern matching
keys_to_delete = []
async for key in client.scan_iter(match=pattern):
keys_to_delete.append(key)
if keys_to_delete:
await client.delete(*keys_to_delete)
logger.info("City cache invalidated", city_id=city_id, keys_deleted=len(keys_to_delete))
except Exception as e:
logger.error("Error invalidating cache", error=str(e))
2025-11-02 20:24:44 +01:00
# ===== Calendar Caching Methods =====
def _calendar_cache_key(self, calendar_id: str) -> str:
"""Generate cache key for school calendar"""
return f"calendar:{calendar_id}"
def _tenant_context_cache_key(self, tenant_id: str) -> str:
"""Generate cache key for tenant location context"""
return f"tenant_context:{tenant_id}"
async def get_cached_calendar(
self,
calendar_id: str
) -> Optional[Dict[str, Any]]:
"""Get cached school calendar by ID"""
try:
key = self._calendar_cache_key(calendar_id)
client = await self._get_client()
cached = await client.get(key)
if cached:
logger.debug("Calendar cache hit", calendar_id=calendar_id)
return json.loads(cached)
logger.debug("Calendar cache miss", calendar_id=calendar_id)
return None
except Exception as e:
logger.error("Error reading calendar cache", error=str(e))
return None
async def set_cached_calendar(
self,
calendar_id: str,
calendar_data: Dict[str, Any]
):
"""Cache school calendar data (7 days TTL)"""
try:
key = self._calendar_cache_key(calendar_id)
client = await self._get_client()
# Calendars change rarely, use 7-day TTL
ttl = 86400 * 7
await client.setex(
key,
ttl,
json.dumps(calendar_data)
)
logger.debug("Calendar cached", calendar_id=calendar_id)
except Exception as e:
logger.error("Error caching calendar", error=str(e))
async def get_cached_tenant_context(
self,
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Get cached tenant location context"""
try:
key = self._tenant_context_cache_key(tenant_id)
client = await self._get_client()
cached = await client.get(key)
if cached:
logger.debug("Tenant context cache hit", tenant_id=tenant_id)
return json.loads(cached)
logger.debug("Tenant context cache miss", tenant_id=tenant_id)
return None
except Exception as e:
logger.error("Error reading tenant context cache", error=str(e))
return None
async def set_cached_tenant_context(
self,
tenant_id: str,
context_data: Dict[str, Any]
):
"""Cache tenant location context (24 hours TTL)"""
try:
key = self._tenant_context_cache_key(tenant_id)
client = await self._get_client()
# Tenant context changes less frequently, 24-hour TTL
ttl = 86400
await client.setex(
key,
ttl,
json.dumps(context_data)
)
logger.debug("Tenant context cached", tenant_id=tenant_id)
except Exception as e:
logger.error("Error caching tenant context", error=str(e))
async def invalidate_tenant_context(self, tenant_id: str):
"""Invalidate tenant context cache (called when context is updated)"""
try:
key = self._tenant_context_cache_key(tenant_id)
client = await self._get_client()
await client.delete(key)
logger.info("Tenant context cache invalidated", tenant_id=tenant_id)
except Exception as e:
logger.error("Error invalidating tenant context cache", error=str(e))