209 lines
5.8 KiB
Python
209 lines
5.8 KiB
Python
|
|
"""
|
||
|
|
POI Cache Service
|
||
|
|
|
||
|
|
Caches POI detection results to avoid hammering Overpass API.
|
||
|
|
POI data doesn't change frequently, so aggressive caching is appropriate.
|
||
|
|
"""
|
||
|
|
|
||
|
|
from typing import Optional, Dict, Any
|
||
|
|
import json
|
||
|
|
import structlog
|
||
|
|
from datetime import timedelta
|
||
|
|
|
||
|
|
from app.core.poi_config import (
|
||
|
|
POI_CACHE_TTL_DAYS,
|
||
|
|
POI_COORDINATE_PRECISION
|
||
|
|
)
|
||
|
|
|
||
|
|
logger = structlog.get_logger()
|
||
|
|
|
||
|
|
|
||
|
|
class POICacheService:
|
||
|
|
"""
|
||
|
|
Redis-based cache for POI detection results.
|
||
|
|
|
||
|
|
Caches results by rounded coordinates to allow reuse for nearby locations.
|
||
|
|
Reduces load on Overpass API and improves onboarding performance.
|
||
|
|
"""
|
||
|
|
|
||
|
|
def __init__(self, redis_client):
|
||
|
|
"""
|
||
|
|
Initialize cache service.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
redis_client: Redis client instance
|
||
|
|
"""
|
||
|
|
self.redis = redis_client
|
||
|
|
self.cache_ttl_days = POI_CACHE_TTL_DAYS
|
||
|
|
self.coordinate_precision = POI_COORDINATE_PRECISION
|
||
|
|
|
||
|
|
def _generate_cache_key(self, latitude: float, longitude: float) -> str:
|
||
|
|
"""
|
||
|
|
Generate cache key from coordinates.
|
||
|
|
|
||
|
|
Rounds coordinates to specified precision (default 4 decimals ≈ 10m).
|
||
|
|
This allows cache reuse for bakeries in very close proximity.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
latitude: Bakery latitude
|
||
|
|
longitude: Bakery longitude
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
Redis cache key
|
||
|
|
"""
|
||
|
|
lat_rounded = round(latitude, self.coordinate_precision)
|
||
|
|
lon_rounded = round(longitude, self.coordinate_precision)
|
||
|
|
return f"poi_cache:{lat_rounded}:{lon_rounded}"
|
||
|
|
|
||
|
|
async def get_cached_pois(
|
||
|
|
self,
|
||
|
|
latitude: float,
|
||
|
|
longitude: float
|
||
|
|
) -> Optional[Dict[str, Any]]:
|
||
|
|
"""
|
||
|
|
Get cached POI results for location.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
latitude: Bakery latitude
|
||
|
|
longitude: Bakery longitude
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
Cached POI detection results or None if not cached
|
||
|
|
"""
|
||
|
|
cache_key = self._generate_cache_key(latitude, longitude)
|
||
|
|
|
||
|
|
try:
|
||
|
|
cached_data = await self.redis.get(cache_key)
|
||
|
|
if cached_data:
|
||
|
|
logger.info(
|
||
|
|
"POI cache hit",
|
||
|
|
cache_key=cache_key,
|
||
|
|
location=(latitude, longitude)
|
||
|
|
)
|
||
|
|
return json.loads(cached_data)
|
||
|
|
else:
|
||
|
|
logger.debug(
|
||
|
|
"POI cache miss",
|
||
|
|
cache_key=cache_key,
|
||
|
|
location=(latitude, longitude)
|
||
|
|
)
|
||
|
|
return None
|
||
|
|
except Exception as e:
|
||
|
|
logger.warning(
|
||
|
|
"Failed to retrieve POI cache",
|
||
|
|
error=str(e),
|
||
|
|
cache_key=cache_key
|
||
|
|
)
|
||
|
|
return None
|
||
|
|
|
||
|
|
async def cache_poi_results(
|
||
|
|
self,
|
||
|
|
latitude: float,
|
||
|
|
longitude: float,
|
||
|
|
poi_data: Dict[str, Any]
|
||
|
|
) -> bool:
|
||
|
|
"""
|
||
|
|
Cache POI detection results.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
latitude: Bakery latitude
|
||
|
|
longitude: Bakery longitude
|
||
|
|
poi_data: Complete POI detection results
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
True if cached successfully, False otherwise
|
||
|
|
"""
|
||
|
|
cache_key = self._generate_cache_key(latitude, longitude)
|
||
|
|
ttl_seconds = self.cache_ttl_days * 24 * 60 * 60
|
||
|
|
|
||
|
|
try:
|
||
|
|
await self.redis.setex(
|
||
|
|
cache_key,
|
||
|
|
ttl_seconds,
|
||
|
|
json.dumps(poi_data)
|
||
|
|
)
|
||
|
|
logger.info(
|
||
|
|
"POI results cached",
|
||
|
|
cache_key=cache_key,
|
||
|
|
ttl_days=self.cache_ttl_days,
|
||
|
|
location=(latitude, longitude)
|
||
|
|
)
|
||
|
|
return True
|
||
|
|
except Exception as e:
|
||
|
|
logger.error(
|
||
|
|
"Failed to cache POI results",
|
||
|
|
error=str(e),
|
||
|
|
cache_key=cache_key
|
||
|
|
)
|
||
|
|
return False
|
||
|
|
|
||
|
|
async def invalidate_cache(
|
||
|
|
self,
|
||
|
|
latitude: float,
|
||
|
|
longitude: float
|
||
|
|
) -> bool:
|
||
|
|
"""
|
||
|
|
Invalidate cached POI results for location.
|
||
|
|
|
||
|
|
Useful for manual refresh or data corrections.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
latitude: Bakery latitude
|
||
|
|
longitude: Bakery longitude
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
True if invalidated successfully
|
||
|
|
"""
|
||
|
|
cache_key = self._generate_cache_key(latitude, longitude)
|
||
|
|
|
||
|
|
try:
|
||
|
|
deleted = await self.redis.delete(cache_key)
|
||
|
|
if deleted:
|
||
|
|
logger.info(
|
||
|
|
"POI cache invalidated",
|
||
|
|
cache_key=cache_key,
|
||
|
|
location=(latitude, longitude)
|
||
|
|
)
|
||
|
|
return bool(deleted)
|
||
|
|
except Exception as e:
|
||
|
|
logger.error(
|
||
|
|
"Failed to invalidate POI cache",
|
||
|
|
error=str(e),
|
||
|
|
cache_key=cache_key
|
||
|
|
)
|
||
|
|
return False
|
||
|
|
|
||
|
|
async def get_cache_stats(self) -> Dict[str, Any]:
|
||
|
|
"""
|
||
|
|
Get cache statistics.
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
Dictionary with cache stats (key count, memory usage, etc.)
|
||
|
|
"""
|
||
|
|
try:
|
||
|
|
# Count POI cache keys
|
||
|
|
pattern = "poi_cache:*"
|
||
|
|
cursor = 0
|
||
|
|
key_count = 0
|
||
|
|
|
||
|
|
while True:
|
||
|
|
cursor, keys = await self.redis.scan(
|
||
|
|
cursor=cursor,
|
||
|
|
match=pattern,
|
||
|
|
count=100
|
||
|
|
)
|
||
|
|
key_count += len(keys)
|
||
|
|
if cursor == 0:
|
||
|
|
break
|
||
|
|
|
||
|
|
return {
|
||
|
|
"total_cached_locations": key_count,
|
||
|
|
"cache_ttl_days": self.cache_ttl_days,
|
||
|
|
"coordinate_precision": self.coordinate_precision
|
||
|
|
}
|
||
|
|
except Exception as e:
|
||
|
|
logger.error("Failed to get cache stats", error=str(e))
|
||
|
|
return {
|
||
|
|
"error": str(e)
|
||
|
|
}
|