Initial commit - production deployment
This commit is contained in:
1
services/external/app/cache/__init__.py
vendored
Normal file
1
services/external/app/cache/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"""Cache module for external data service"""
|
||||
208
services/external/app/cache/poi_cache_service.py
vendored
Normal file
208
services/external/app/cache/poi_cache_service.py
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
"""
|
||||
POI Cache Service
|
||||
|
||||
Caches POI detection results to avoid hammering Overpass API.
|
||||
POI data doesn't change frequently, so aggressive caching is appropriate.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
import json
|
||||
import structlog
|
||||
from datetime import timedelta
|
||||
|
||||
from app.core.poi_config import (
|
||||
POI_CACHE_TTL_DAYS,
|
||||
POI_COORDINATE_PRECISION
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POICacheService:
|
||||
"""
|
||||
Redis-based cache for POI detection results.
|
||||
|
||||
Caches results by rounded coordinates to allow reuse for nearby locations.
|
||||
Reduces load on Overpass API and improves onboarding performance.
|
||||
"""
|
||||
|
||||
def __init__(self, redis_client):
|
||||
"""
|
||||
Initialize cache service.
|
||||
|
||||
Args:
|
||||
redis_client: Redis client instance
|
||||
"""
|
||||
self.redis = redis_client
|
||||
self.cache_ttl_days = POI_CACHE_TTL_DAYS
|
||||
self.coordinate_precision = POI_COORDINATE_PRECISION
|
||||
|
||||
def _generate_cache_key(self, latitude: float, longitude: float) -> str:
|
||||
"""
|
||||
Generate cache key from coordinates.
|
||||
|
||||
Rounds coordinates to specified precision (default 4 decimals ≈ 10m).
|
||||
This allows cache reuse for bakeries in very close proximity.
|
||||
|
||||
Args:
|
||||
latitude: Bakery latitude
|
||||
longitude: Bakery longitude
|
||||
|
||||
Returns:
|
||||
Redis cache key
|
||||
"""
|
||||
lat_rounded = round(latitude, self.coordinate_precision)
|
||||
lon_rounded = round(longitude, self.coordinate_precision)
|
||||
return f"poi_cache:{lat_rounded}:{lon_rounded}"
|
||||
|
||||
async def get_cached_pois(
|
||||
self,
|
||||
latitude: float,
|
||||
longitude: float
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached POI results for location.
|
||||
|
||||
Args:
|
||||
latitude: Bakery latitude
|
||||
longitude: Bakery longitude
|
||||
|
||||
Returns:
|
||||
Cached POI detection results or None if not cached
|
||||
"""
|
||||
cache_key = self._generate_cache_key(latitude, longitude)
|
||||
|
||||
try:
|
||||
cached_data = await self.redis.get(cache_key)
|
||||
if cached_data:
|
||||
logger.info(
|
||||
"POI cache hit",
|
||||
cache_key=cache_key,
|
||||
location=(latitude, longitude)
|
||||
)
|
||||
return json.loads(cached_data)
|
||||
else:
|
||||
logger.debug(
|
||||
"POI cache miss",
|
||||
cache_key=cache_key,
|
||||
location=(latitude, longitude)
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to retrieve POI cache",
|
||||
error=str(e),
|
||||
cache_key=cache_key
|
||||
)
|
||||
return None
|
||||
|
||||
async def cache_poi_results(
|
||||
self,
|
||||
latitude: float,
|
||||
longitude: float,
|
||||
poi_data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Cache POI detection results.
|
||||
|
||||
Args:
|
||||
latitude: Bakery latitude
|
||||
longitude: Bakery longitude
|
||||
poi_data: Complete POI detection results
|
||||
|
||||
Returns:
|
||||
True if cached successfully, False otherwise
|
||||
"""
|
||||
cache_key = self._generate_cache_key(latitude, longitude)
|
||||
ttl_seconds = self.cache_ttl_days * 24 * 60 * 60
|
||||
|
||||
try:
|
||||
await self.redis.setex(
|
||||
cache_key,
|
||||
ttl_seconds,
|
||||
json.dumps(poi_data)
|
||||
)
|
||||
logger.info(
|
||||
"POI results cached",
|
||||
cache_key=cache_key,
|
||||
ttl_days=self.cache_ttl_days,
|
||||
location=(latitude, longitude)
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to cache POI results",
|
||||
error=str(e),
|
||||
cache_key=cache_key
|
||||
)
|
||||
return False
|
||||
|
||||
async def invalidate_cache(
|
||||
self,
|
||||
latitude: float,
|
||||
longitude: float
|
||||
) -> bool:
|
||||
"""
|
||||
Invalidate cached POI results for location.
|
||||
|
||||
Useful for manual refresh or data corrections.
|
||||
|
||||
Args:
|
||||
latitude: Bakery latitude
|
||||
longitude: Bakery longitude
|
||||
|
||||
Returns:
|
||||
True if invalidated successfully
|
||||
"""
|
||||
cache_key = self._generate_cache_key(latitude, longitude)
|
||||
|
||||
try:
|
||||
deleted = await self.redis.delete(cache_key)
|
||||
if deleted:
|
||||
logger.info(
|
||||
"POI cache invalidated",
|
||||
cache_key=cache_key,
|
||||
location=(latitude, longitude)
|
||||
)
|
||||
return bool(deleted)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to invalidate POI cache",
|
||||
error=str(e),
|
||||
cache_key=cache_key
|
||||
)
|
||||
return False
|
||||
|
||||
async def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get cache statistics.
|
||||
|
||||
Returns:
|
||||
Dictionary with cache stats (key count, memory usage, etc.)
|
||||
"""
|
||||
try:
|
||||
# Count POI cache keys
|
||||
pattern = "poi_cache:*"
|
||||
cursor = 0
|
||||
key_count = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = await self.redis.scan(
|
||||
cursor=cursor,
|
||||
match=pattern,
|
||||
count=100
|
||||
)
|
||||
key_count += len(keys)
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
return {
|
||||
"total_cached_locations": key_count,
|
||||
"cache_ttl_days": self.cache_ttl_days,
|
||||
"coordinate_precision": self.coordinate_precision
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get cache stats", error=str(e))
|
||||
return {
|
||||
"error": str(e)
|
||||
}
|
||||
298
services/external/app/cache/redis_wrapper.py
vendored
Normal file
298
services/external/app/cache/redis_wrapper.py
vendored
Normal file
@@ -0,0 +1,298 @@
|
||||
# services/external/app/cache/redis_wrapper.py
|
||||
"""
|
||||
Redis cache layer for fast training data access using shared Redis implementation
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any, Optional
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ExternalDataCache:
|
||||
"""Redis cache for external data service"""
|
||||
|
||||
def __init__(self):
|
||||
self.ttl = 86400 * 7 # 7 days
|
||||
|
||||
async def _get_client(self):
|
||||
"""Get the shared Redis client"""
|
||||
return await get_redis_client()
|
||||
|
||||
def _weather_cache_key(
|
||||
self,
|
||||
city_id: str,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> str:
|
||||
"""Generate cache key for weather data"""
|
||||
return f"weather:{city_id}:{start_date.date()}:{end_date.date()}"
|
||||
|
||||
async def get_cached_weather(
|
||||
self,
|
||||
city_id: str,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get cached weather data"""
|
||||
try:
|
||||
key = self._weather_cache_key(city_id, start_date, end_date)
|
||||
client = await self._get_client()
|
||||
cached = await client.get(key)
|
||||
|
||||
if cached:
|
||||
logger.debug("Weather cache hit", city_id=city_id, key=key)
|
||||
return json.loads(cached)
|
||||
|
||||
logger.debug("Weather cache miss", city_id=city_id, key=key)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reading weather cache", error=str(e))
|
||||
return None
|
||||
|
||||
async def set_cached_weather(
|
||||
self,
|
||||
city_id: str,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
data: List[Dict[str, Any]]
|
||||
):
|
||||
"""Set cached weather data"""
|
||||
try:
|
||||
key = self._weather_cache_key(city_id, start_date, end_date)
|
||||
|
||||
serializable_data = []
|
||||
for record in data:
|
||||
# Handle both dict and Pydantic model objects
|
||||
if hasattr(record, 'model_dump'):
|
||||
record_dict = record.model_dump()
|
||||
elif hasattr(record, 'dict'):
|
||||
record_dict = record.dict()
|
||||
else:
|
||||
record_dict = record.copy() if isinstance(record, dict) else dict(record)
|
||||
|
||||
# Convert any datetime fields to ISO format strings
|
||||
for key_name, value in record_dict.items():
|
||||
if isinstance(value, datetime):
|
||||
record_dict[key_name] = value.isoformat()
|
||||
|
||||
serializable_data.append(record_dict)
|
||||
|
||||
client = await self._get_client()
|
||||
await client.setex(
|
||||
key,
|
||||
self.ttl,
|
||||
json.dumps(serializable_data)
|
||||
)
|
||||
|
||||
logger.debug("Weather data cached", city_id=city_id, records=len(data))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error caching weather data", error=str(e))
|
||||
|
||||
def _traffic_cache_key(
|
||||
self,
|
||||
city_id: str,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> str:
|
||||
"""Generate cache key for traffic data"""
|
||||
return f"traffic:{city_id}:{start_date.date()}:{end_date.date()}"
|
||||
|
||||
async def get_cached_traffic(
|
||||
self,
|
||||
city_id: str,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get cached traffic data"""
|
||||
try:
|
||||
key = self._traffic_cache_key(city_id, start_date, end_date)
|
||||
client = await self._get_client()
|
||||
cached = await client.get(key)
|
||||
|
||||
if cached:
|
||||
logger.debug("Traffic cache hit", city_id=city_id, key=key)
|
||||
return json.loads(cached)
|
||||
|
||||
logger.debug("Traffic cache miss", city_id=city_id, key=key)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reading traffic cache", error=str(e))
|
||||
return None
|
||||
|
||||
async def set_cached_traffic(
|
||||
self,
|
||||
city_id: str,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
data: List[Dict[str, Any]]
|
||||
):
|
||||
"""Set cached traffic data"""
|
||||
try:
|
||||
key = self._traffic_cache_key(city_id, start_date, end_date)
|
||||
|
||||
serializable_data = []
|
||||
for record in data:
|
||||
# Handle both dict and Pydantic model objects
|
||||
if hasattr(record, 'model_dump'):
|
||||
record_dict = record.model_dump()
|
||||
elif hasattr(record, 'dict'):
|
||||
record_dict = record.dict()
|
||||
else:
|
||||
record_dict = record.copy() if isinstance(record, dict) else dict(record)
|
||||
|
||||
# Convert any datetime fields to ISO format strings
|
||||
for key_name, value in record_dict.items():
|
||||
if isinstance(value, datetime):
|
||||
record_dict[key_name] = value.isoformat()
|
||||
|
||||
serializable_data.append(record_dict)
|
||||
|
||||
client = await self._get_client()
|
||||
await client.setex(
|
||||
key,
|
||||
self.ttl,
|
||||
json.dumps(serializable_data)
|
||||
)
|
||||
|
||||
logger.debug("Traffic data cached", city_id=city_id, records=len(data))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error caching traffic data", error=str(e))
|
||||
|
||||
async def invalidate_city_cache(self, city_id: str):
|
||||
"""Invalidate all cache entries for a city"""
|
||||
try:
|
||||
client = await self._get_client()
|
||||
pattern = f"*:{city_id}:*"
|
||||
|
||||
# Use scan_iter for safer key pattern matching
|
||||
keys_to_delete = []
|
||||
async for key in client.scan_iter(match=pattern):
|
||||
keys_to_delete.append(key)
|
||||
|
||||
if keys_to_delete:
|
||||
await client.delete(*keys_to_delete)
|
||||
|
||||
logger.info("City cache invalidated", city_id=city_id, keys_deleted=len(keys_to_delete))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error invalidating cache", error=str(e))
|
||||
|
||||
# ===== Calendar Caching Methods =====
|
||||
|
||||
def _calendar_cache_key(self, calendar_id: str) -> str:
|
||||
"""Generate cache key for school calendar"""
|
||||
return f"calendar:{calendar_id}"
|
||||
|
||||
def _tenant_context_cache_key(self, tenant_id: str) -> str:
|
||||
"""Generate cache key for tenant location context"""
|
||||
return f"tenant_context:{tenant_id}"
|
||||
|
||||
async def get_cached_calendar(
|
||||
self,
|
||||
calendar_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached school calendar by ID"""
|
||||
try:
|
||||
key = self._calendar_cache_key(calendar_id)
|
||||
client = await self._get_client()
|
||||
cached = await client.get(key)
|
||||
|
||||
if cached:
|
||||
logger.debug("Calendar cache hit", calendar_id=calendar_id)
|
||||
return json.loads(cached)
|
||||
|
||||
logger.debug("Calendar cache miss", calendar_id=calendar_id)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reading calendar cache", error=str(e))
|
||||
return None
|
||||
|
||||
async def set_cached_calendar(
|
||||
self,
|
||||
calendar_id: str,
|
||||
calendar_data: Dict[str, Any]
|
||||
):
|
||||
"""Cache school calendar data (7 days TTL)"""
|
||||
try:
|
||||
key = self._calendar_cache_key(calendar_id)
|
||||
client = await self._get_client()
|
||||
|
||||
# Calendars change rarely, use 7-day TTL
|
||||
ttl = 86400 * 7
|
||||
|
||||
await client.setex(
|
||||
key,
|
||||
ttl,
|
||||
json.dumps(calendar_data)
|
||||
)
|
||||
|
||||
logger.debug("Calendar cached", calendar_id=calendar_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error caching calendar", error=str(e))
|
||||
|
||||
async def get_cached_tenant_context(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached tenant location context"""
|
||||
try:
|
||||
key = self._tenant_context_cache_key(tenant_id)
|
||||
client = await self._get_client()
|
||||
cached = await client.get(key)
|
||||
|
||||
if cached:
|
||||
logger.debug("Tenant context cache hit", tenant_id=tenant_id)
|
||||
return json.loads(cached)
|
||||
|
||||
logger.debug("Tenant context cache miss", tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reading tenant context cache", error=str(e))
|
||||
return None
|
||||
|
||||
async def set_cached_tenant_context(
|
||||
self,
|
||||
tenant_id: str,
|
||||
context_data: Dict[str, Any]
|
||||
):
|
||||
"""Cache tenant location context (24 hours TTL)"""
|
||||
try:
|
||||
key = self._tenant_context_cache_key(tenant_id)
|
||||
client = await self._get_client()
|
||||
|
||||
# Tenant context changes less frequently, 24-hour TTL
|
||||
ttl = 86400
|
||||
|
||||
await client.setex(
|
||||
key,
|
||||
ttl,
|
||||
json.dumps(context_data)
|
||||
)
|
||||
|
||||
logger.debug("Tenant context cached", tenant_id=tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error caching tenant context", error=str(e))
|
||||
|
||||
async def invalidate_tenant_context(self, tenant_id: str):
|
||||
"""Invalidate tenant context cache (called when context is updated)"""
|
||||
try:
|
||||
key = self._tenant_context_cache_key(tenant_id)
|
||||
client = await self._get_client()
|
||||
await client.delete(key)
|
||||
|
||||
logger.info("Tenant context cache invalidated", tenant_id=tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error invalidating tenant context cache", error=str(e))
|
||||
Reference in New Issue
Block a user