Files
bakery-ia/services/orders/app/services/cache_service.py
2025-10-15 16:12:49 +02:00

452 lines
17 KiB
Python

# ================================================================
# services/orders/app/services/cache_service.py
# ================================================================
"""
Cache Service - Redis caching for procurement plans and related data
"""
import json
import uuid
from datetime import datetime, date, timedelta
from typing import Optional, Dict, Any, List
import structlog
from pydantic import BaseModel
from shared.redis_utils import get_redis_client
from app.core.config import settings
from app.models.procurement import ProcurementPlan
from app.schemas.procurement_schemas import ProcurementPlanResponse
logger = structlog.get_logger()
class CacheService:
"""Service for managing Redis cache operations"""
def __init__(self):
"""Initialize cache service"""
self._redis_client = None
async def _get_redis(self):
"""Get shared Redis client"""
if self._redis_client is None:
self._redis_client = await get_redis_client()
return self._redis_client
@property
def redis(self):
"""Get Redis client with connection check"""
if self._redis_client is None:
self._connect()
return self._redis_client
def is_available(self) -> bool:
"""Check if Redis is available"""
try:
return self.redis is not None and self.redis.ping()
except Exception:
return False
# ================================================================
# PROCUREMENT PLAN CACHING
# ================================================================
def _get_plan_key(self, tenant_id: uuid.UUID, plan_date: Optional[date] = None, plan_id: Optional[uuid.UUID] = None) -> str:
"""Generate cache key for procurement plan"""
if plan_id:
return f"procurement:plan:id:{tenant_id}:{plan_id}"
elif plan_date:
return f"procurement:plan:date:{tenant_id}:{plan_date.isoformat()}"
else:
return f"procurement:plan:current:{tenant_id}"
def _get_dashboard_key(self, tenant_id: uuid.UUID) -> str:
"""Generate cache key for dashboard data"""
return f"procurement:dashboard:{tenant_id}"
def _get_requirements_key(self, tenant_id: uuid.UUID, plan_id: uuid.UUID) -> str:
"""Generate cache key for plan requirements"""
return f"procurement:requirements:{tenant_id}:{plan_id}"
async def cache_procurement_plan(
self,
plan: ProcurementPlan,
ttl_hours: int = 6
) -> bool:
"""Cache a procurement plan with multiple keys for different access patterns"""
if not self.is_available():
logger.warning("Redis not available, skipping cache")
return False
try:
# Convert plan to cacheable format
plan_data = self._serialize_plan(plan)
ttl_seconds = ttl_hours * 3600
# Cache by plan ID
id_key = self._get_plan_key(plan.tenant_id, plan_id=plan.id)
self.redis.setex(id_key, ttl_seconds, plan_data)
# Cache by plan date
date_key = self._get_plan_key(plan.tenant_id, plan_date=plan.plan_date)
self.redis.setex(date_key, ttl_seconds, plan_data)
# If this is today's plan, cache as current
if plan.plan_date == date.today():
current_key = self._get_plan_key(plan.tenant_id)
self.redis.setex(current_key, ttl_seconds, plan_data)
# Cache requirements separately for faster access
if plan.requirements:
requirements_data = self._serialize_requirements(plan.requirements)
req_key = self._get_requirements_key(plan.tenant_id, plan.id)
self.redis.setex(req_key, ttl_seconds, requirements_data)
# Update plan list cache
await self._update_plan_list_cache(plan.tenant_id, plan)
logger.info("Procurement plan cached", plan_id=plan.id, tenant_id=plan.tenant_id)
return True
except Exception as e:
logger.error("Error caching procurement plan", error=str(e), plan_id=plan.id)
return False
async def get_cached_plan(
self,
tenant_id: uuid.UUID,
plan_date: Optional[date] = None,
plan_id: Optional[uuid.UUID] = None
) -> Optional[Dict[str, Any]]:
"""Get cached procurement plan"""
if not self.is_available():
return None
try:
key = self._get_plan_key(tenant_id, plan_date, plan_id)
cached_data = self.redis.get(key)
if cached_data:
plan_data = json.loads(cached_data)
logger.debug("Procurement plan retrieved from cache",
tenant_id=tenant_id, key=key)
return plan_data
return None
except Exception as e:
logger.error("Error retrieving cached plan", error=str(e))
return None
async def get_cached_requirements(
self,
tenant_id: uuid.UUID,
plan_id: uuid.UUID
) -> Optional[List[Dict[str, Any]]]:
"""Get cached plan requirements"""
if not self.is_available():
return None
try:
key = self._get_requirements_key(tenant_id, plan_id)
cached_data = self.redis.get(key)
if cached_data:
requirements_data = json.loads(cached_data)
logger.debug("Requirements retrieved from cache",
tenant_id=tenant_id, plan_id=plan_id)
return requirements_data
return None
except Exception as e:
logger.error("Error retrieving cached requirements", error=str(e))
return None
async def cache_dashboard_data(
self,
tenant_id: uuid.UUID,
dashboard_data: Dict[str, Any],
ttl_hours: int = 1
) -> bool:
"""Cache dashboard data with shorter TTL"""
if not self.is_available():
return False
try:
key = self._get_dashboard_key(tenant_id)
data_json = json.dumps(dashboard_data, cls=DateTimeEncoder)
ttl_seconds = ttl_hours * 3600
self.redis.setex(key, ttl_seconds, data_json)
logger.debug("Dashboard data cached", tenant_id=tenant_id)
return True
except Exception as e:
logger.error("Error caching dashboard data", error=str(e))
return False
async def get_cached_dashboard_data(self, tenant_id: uuid.UUID) -> Optional[Dict[str, Any]]:
"""Get cached dashboard data"""
if not self.is_available():
return None
try:
key = self._get_dashboard_key(tenant_id)
cached_data = self.redis.get(key)
if cached_data:
return json.loads(cached_data)
return None
except Exception as e:
logger.error("Error retrieving cached dashboard data", error=str(e))
return None
async def invalidate_plan_cache(
self,
tenant_id: uuid.UUID,
plan_id: Optional[uuid.UUID] = None,
plan_date: Optional[date] = None
) -> bool:
"""Invalidate cached procurement plan data"""
if not self.is_available():
return False
try:
keys_to_delete = []
if plan_id:
# Delete specific plan cache
keys_to_delete.append(self._get_plan_key(tenant_id, plan_id=plan_id))
keys_to_delete.append(self._get_requirements_key(tenant_id, plan_id))
if plan_date:
keys_to_delete.append(self._get_plan_key(tenant_id, plan_date=plan_date))
# Always invalidate current plan cache and dashboard
keys_to_delete.extend([
self._get_plan_key(tenant_id),
self._get_dashboard_key(tenant_id)
])
# Delete plan list cache
list_key = f"procurement:plans:list:{tenant_id}:*"
list_keys = self.redis.keys(list_key)
keys_to_delete.extend(list_keys)
if keys_to_delete:
self.redis.delete(*keys_to_delete)
logger.info("Plan cache invalidated",
tenant_id=tenant_id, keys_count=len(keys_to_delete))
return True
except Exception as e:
logger.error("Error invalidating plan cache", error=str(e))
return False
# ================================================================
# LIST CACHING
# ================================================================
async def _update_plan_list_cache(self, tenant_id: uuid.UUID, plan: ProcurementPlan) -> None:
"""Update cached plan lists"""
try:
# Add plan to various lists
list_keys = [
f"procurement:plans:list:{tenant_id}:all",
f"procurement:plans:list:{tenant_id}:status:{plan.status}",
f"procurement:plans:list:{tenant_id}:month:{plan.plan_date.strftime('%Y-%m')}"
]
plan_summary = {
"id": str(plan.id),
"plan_number": plan.plan_number,
"plan_date": plan.plan_date.isoformat(),
"status": plan.status,
"total_requirements": plan.total_requirements,
"total_estimated_cost": float(plan.total_estimated_cost),
"created_at": plan.created_at.isoformat()
}
for key in list_keys:
# Use sorted sets for automatic ordering by date
score = plan.plan_date.toordinal() # Use ordinal date as score
self.redis.zadd(key, {json.dumps(plan_summary): score})
self.redis.expire(key, 3600) # 1 hour TTL
except Exception as e:
logger.warning("Error updating plan list cache", error=str(e))
# ================================================================
# PERFORMANCE METRICS CACHING
# ================================================================
async def cache_performance_metrics(
self,
tenant_id: uuid.UUID,
metrics: Dict[str, Any],
ttl_hours: int = 24
) -> bool:
"""Cache performance metrics"""
if not self.is_available():
return False
try:
key = f"procurement:metrics:{tenant_id}"
data_json = json.dumps(metrics, cls=DateTimeEncoder)
ttl_seconds = ttl_hours * 3600
self.redis.setex(key, ttl_seconds, data_json)
return True
except Exception as e:
logger.error("Error caching performance metrics", error=str(e))
return False
async def get_cached_metrics(self, tenant_id: uuid.UUID) -> Optional[Dict[str, Any]]:
"""Get cached performance metrics"""
if not self.is_available():
return None
try:
key = f"procurement:metrics:{tenant_id}"
cached_data = self.redis.get(key)
if cached_data:
return json.loads(cached_data)
return None
except Exception as e:
logger.error("Error retrieving cached metrics", error=str(e))
return None
# ================================================================
# UTILITY METHODS
# ================================================================
def _serialize_plan(self, plan: ProcurementPlan) -> str:
"""Serialize procurement plan for caching"""
try:
# Convert to dict, handling special types
plan_dict = {
"id": str(plan.id),
"tenant_id": str(plan.tenant_id),
"plan_number": plan.plan_number,
"plan_date": plan.plan_date.isoformat(),
"plan_period_start": plan.plan_period_start.isoformat(),
"plan_period_end": plan.plan_period_end.isoformat(),
"status": plan.status,
"plan_type": plan.plan_type,
"priority": plan.priority,
"total_requirements": plan.total_requirements,
"total_estimated_cost": float(plan.total_estimated_cost),
"total_approved_cost": float(plan.total_approved_cost),
"safety_stock_buffer": float(plan.safety_stock_buffer),
"supply_risk_level": plan.supply_risk_level,
"created_at": plan.created_at.isoformat(),
"updated_at": plan.updated_at.isoformat(),
# Add requirements count for quick reference
"requirements_count": len(plan.requirements) if plan.requirements else 0
}
return json.dumps(plan_dict)
except Exception as e:
logger.error("Error serializing plan", error=str(e))
raise
def _serialize_requirements(self, requirements: List) -> str:
"""Serialize requirements for caching"""
try:
requirements_data = []
for req in requirements:
req_dict = {
"id": str(req.id),
"requirement_number": req.requirement_number,
"product_id": str(req.product_id),
"product_name": req.product_name,
"status": req.status,
"priority": req.priority,
"required_quantity": float(req.required_quantity),
"net_requirement": float(req.net_requirement),
"estimated_total_cost": float(req.estimated_total_cost or 0),
"required_by_date": req.required_by_date.isoformat(),
"suggested_order_date": req.suggested_order_date.isoformat()
}
requirements_data.append(req_dict)
return json.dumps(requirements_data)
except Exception as e:
logger.error("Error serializing requirements", error=str(e))
raise
async def clear_tenant_cache(self, tenant_id: uuid.UUID) -> bool:
"""Clear all cached data for a tenant"""
if not self.is_available():
return False
try:
pattern = f"*:{tenant_id}*"
keys = self.redis.keys(pattern)
if keys:
self.redis.delete(*keys)
logger.info("Tenant cache cleared", tenant_id=tenant_id, keys_count=len(keys))
return True
except Exception as e:
logger.error("Error clearing tenant cache", error=str(e))
return False
def get_cache_stats(self) -> Dict[str, Any]:
"""Get Redis cache statistics"""
if not self.is_available():
return {"available": False}
try:
info = self.redis.info()
return {
"available": True,
"used_memory": info.get("used_memory_human"),
"connected_clients": info.get("connected_clients"),
"total_connections_received": info.get("total_connections_received"),
"keyspace_hits": info.get("keyspace_hits", 0),
"keyspace_misses": info.get("keyspace_misses", 0),
"hit_rate": self._calculate_hit_rate(
info.get("keyspace_hits", 0),
info.get("keyspace_misses", 0)
)
}
except Exception as e:
logger.error("Error getting cache stats", error=str(e))
return {"available": False, "error": str(e)}
def _calculate_hit_rate(self, hits: int, misses: int) -> float:
"""Calculate cache hit rate percentage"""
total = hits + misses
return (hits / total * 100) if total > 0 else 0.0
class DateTimeEncoder(json.JSONEncoder):
"""JSON encoder that handles datetime objects"""
def default(self, obj):
if isinstance(obj, (datetime, date)):
return obj.isoformat()
return super().default(obj)
# Global cache service instance
_cache_service = None
def get_cache_service() -> CacheService:
"""Get the global cache service instance"""
global _cache_service
if _cache_service is None:
_cache_service = CacheService()
return _cache_service