Files
bakery-ia/shared/utils/retry.py

64 lines
2.0 KiB
Python

"""
Retry utilities for shared use across services
"""
import asyncio
import random
from typing import Callable, Any, Tuple, Type
import logging
logger = logging.getLogger(__name__)
async def retry_with_backoff(
func,
max_retries: int = 3,
exceptions: Tuple[Type[Exception], ...] = (Exception,),
base_delay: float = 1.0,
max_delay: float = 60.0,
backoff_factor: float = 2.0
):
"""
Retry a function with exponential backoff.
Args:
func: The function to retry (can be sync or async)
max_retries: Maximum number of retry attempts
exceptions: Tuple of exception types to catch and retry
base_delay: Initial delay in seconds
max_delay: Maximum delay between retries
backoff_factor: Factor by which delay increases after each retry
Returns:
Result of the function call
Raises:
The original exception if all retries are exhausted
"""
for attempt in range(max_retries + 1): # +1 because first attempt doesn't count as retry
try:
result = func()
# Handle both async functions and lambdas that return coroutines
if asyncio.iscoroutine(result):
result = await result
return result
except exceptions as e:
if attempt == max_retries:
# Exhausted all retries, re-raise the exception
raise e
# Calculate delay with exponential backoff and jitter
delay = min(base_delay * (backoff_factor ** attempt), max_delay)
# Add jitter to prevent thundering herd
delay = delay * (0.5 + random.random() * 0.5)
logger.warning(
f"Attempt {attempt + 1} failed, retrying in {delay:.2f}s: {str(e)}",
extra={
"attempt": attempt + 1,
"max_retries": max_retries,
"exception": str(e)
}
)
await asyncio.sleep(delay)