Add role-based filtering and imporve code
This commit is contained in:
@@ -14,11 +14,11 @@ Cache Strategy:
|
||||
"""
|
||||
|
||||
import json
|
||||
import redis
|
||||
from datetime import datetime, date, timedelta
|
||||
from typing import Optional, Dict, Any, List
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
from shared.redis_utils import get_redis_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -26,47 +26,20 @@ logger = structlog.get_logger()
|
||||
class ForecastCacheService:
|
||||
"""Service-level caching for forecast predictions"""
|
||||
|
||||
def __init__(self, redis_url: str):
|
||||
"""
|
||||
Initialize Redis connection for forecast caching
|
||||
def __init__(self):
|
||||
"""Initialize forecast cache service"""
|
||||
pass
|
||||
|
||||
Args:
|
||||
redis_url: Redis connection URL
|
||||
"""
|
||||
self.redis_url = redis_url
|
||||
self._redis_client = None
|
||||
self._connect()
|
||||
async def _get_redis(self):
|
||||
"""Get shared Redis client"""
|
||||
return await get_redis_client()
|
||||
|
||||
def _connect(self):
|
||||
"""Establish Redis connection with retry logic"""
|
||||
try:
|
||||
self._redis_client = redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=True,
|
||||
socket_keepalive=True,
|
||||
socket_keepalive_options={1: 1, 3: 3, 5: 5},
|
||||
retry_on_timeout=True,
|
||||
max_connections=100, # Higher limit for forecast service
|
||||
health_check_interval=30
|
||||
)
|
||||
# Test connection
|
||||
self._redis_client.ping()
|
||||
logger.info("Forecast cache Redis connection established")
|
||||
except Exception as e:
|
||||
logger.error("Failed to connect to forecast cache Redis", error=str(e))
|
||||
self._redis_client = None
|
||||
|
||||
@property
|
||||
def redis(self):
|
||||
"""Get Redis client with connection check"""
|
||||
if self._redis_client is None:
|
||||
self._connect()
|
||||
return self._redis_client
|
||||
|
||||
def is_available(self) -> bool:
|
||||
async def is_available(self) -> bool:
|
||||
"""Check if Redis cache is available"""
|
||||
try:
|
||||
return self.redis is not None and self.redis.ping()
|
||||
client = await self._get_redis()
|
||||
await client.ping()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@@ -138,12 +111,13 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Cached forecast data or None if not found
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return None
|
||||
|
||||
try:
|
||||
key = self._get_forecast_key(tenant_id, product_id, forecast_date)
|
||||
cached_data = self.redis.get(key)
|
||||
client = await self._get_redis()
|
||||
cached_data = await client.get(key)
|
||||
|
||||
if cached_data:
|
||||
forecast_data = json.loads(cached_data)
|
||||
@@ -188,7 +162,7 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
True if cached successfully, False otherwise
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
logger.warning("Redis not available, skipping forecast cache")
|
||||
return False
|
||||
|
||||
@@ -205,7 +179,8 @@ class ForecastCacheService:
|
||||
}
|
||||
|
||||
# Serialize and cache
|
||||
self.redis.setex(
|
||||
client = await self._get_redis()
|
||||
await client.setex(
|
||||
key,
|
||||
ttl,
|
||||
json.dumps(cache_entry, default=str)
|
||||
@@ -241,12 +216,13 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Cached batch forecast data or None
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return None
|
||||
|
||||
try:
|
||||
key = self._get_batch_forecast_key(tenant_id, product_ids, forecast_date)
|
||||
cached_data = self.redis.get(key)
|
||||
client = await self._get_redis()
|
||||
cached_data = await client.get(key)
|
||||
|
||||
if cached_data:
|
||||
forecast_data = json.loads(cached_data)
|
||||
@@ -273,7 +249,7 @@ class ForecastCacheService:
|
||||
forecast_data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""Cache batch forecast result"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -287,7 +263,8 @@ class ForecastCacheService:
|
||||
'ttl_seconds': ttl
|
||||
}
|
||||
|
||||
self.redis.setex(key, ttl, json.dumps(cache_entry, default=str))
|
||||
client = await self._get_redis()
|
||||
await client.setex(key, ttl, json.dumps(cache_entry, default=str))
|
||||
|
||||
logger.info("Batch forecast cached successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
@@ -320,16 +297,17 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Number of cache entries invalidated
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return 0
|
||||
|
||||
try:
|
||||
# Find all keys matching this product
|
||||
pattern = f"forecast:{tenant_id}:{product_id}:*"
|
||||
keys = self.redis.keys(pattern)
|
||||
client = await self._get_redis()
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
if keys:
|
||||
deleted = self.redis.delete(*keys)
|
||||
deleted = await client.delete(*keys)
|
||||
logger.info("Invalidated product forecast cache",
|
||||
tenant_id=str(tenant_id),
|
||||
product_id=str(product_id),
|
||||
@@ -359,7 +337,7 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Number of cache entries invalidated
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return 0
|
||||
|
||||
try:
|
||||
@@ -368,10 +346,11 @@ class ForecastCacheService:
|
||||
else:
|
||||
pattern = f"forecast:{tenant_id}:*"
|
||||
|
||||
keys = self.redis.keys(pattern)
|
||||
client = await self._get_redis()
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
if keys:
|
||||
deleted = self.redis.delete(*keys)
|
||||
deleted = await client.delete(*keys)
|
||||
logger.info("Invalidated tenant forecast cache",
|
||||
tenant_id=str(tenant_id),
|
||||
forecast_date=str(forecast_date) if forecast_date else "all",
|
||||
@@ -391,15 +370,16 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Number of cache entries invalidated
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return 0
|
||||
|
||||
try:
|
||||
pattern = "forecast:*"
|
||||
keys = self.redis.keys(pattern)
|
||||
client = await self._get_redis()
|
||||
keys = await client.keys(pattern)
|
||||
|
||||
if keys:
|
||||
deleted = self.redis.delete(*keys)
|
||||
deleted = await client.delete(*keys)
|
||||
logger.warning("Invalidated ALL forecast cache", keys_deleted=deleted)
|
||||
return deleted
|
||||
|
||||
@@ -413,22 +393,23 @@ class ForecastCacheService:
|
||||
# CACHE STATISTICS & MONITORING
|
||||
# ================================================================
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
async def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get cache statistics for monitoring
|
||||
|
||||
Returns:
|
||||
Dictionary with cache metrics
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return {"available": False}
|
||||
|
||||
try:
|
||||
info = self.redis.info()
|
||||
client = await self._get_redis()
|
||||
info = await client.info()
|
||||
|
||||
# Get forecast-specific stats
|
||||
forecast_keys = self.redis.keys("forecast:*")
|
||||
batch_keys = self.redis.keys("forecast:batch:*")
|
||||
forecast_keys = await client.keys("forecast:*")
|
||||
batch_keys = await client.keys("forecast:batch:*")
|
||||
|
||||
return {
|
||||
"available": True,
|
||||
@@ -471,12 +452,13 @@ class ForecastCacheService:
|
||||
Returns:
|
||||
Cache metadata or None
|
||||
"""
|
||||
if not self.is_available():
|
||||
if not await self.is_available():
|
||||
return None
|
||||
|
||||
try:
|
||||
key = self._get_forecast_key(tenant_id, product_id, forecast_date)
|
||||
ttl = self.redis.ttl(key)
|
||||
client = await self._get_redis()
|
||||
ttl = await client.ttl(key)
|
||||
|
||||
if ttl > 0:
|
||||
return {
|
||||
@@ -498,21 +480,16 @@ class ForecastCacheService:
|
||||
_cache_service = None
|
||||
|
||||
|
||||
def get_forecast_cache_service(redis_url: Optional[str] = None) -> ForecastCacheService:
|
||||
def get_forecast_cache_service() -> ForecastCacheService:
|
||||
"""
|
||||
Get the global forecast cache service instance
|
||||
|
||||
Args:
|
||||
redis_url: Redis connection URL (required for first call)
|
||||
|
||||
Returns:
|
||||
ForecastCacheService instance
|
||||
"""
|
||||
global _cache_service
|
||||
|
||||
if _cache_service is None:
|
||||
if redis_url is None:
|
||||
raise ValueError("redis_url required for first initialization")
|
||||
_cache_service = ForecastCacheService(redis_url)
|
||||
_cache_service = ForecastCacheService()
|
||||
|
||||
return _cache_service
|
||||
|
||||
Reference in New Issue
Block a user