"""
Redis cache wrapper with TTL support.
Provides caching for frequently accessed Odoo data.
"""
from __future__ import annotations
import hashlib
import json
import logging
from functools import wraps
from typing import Any, Callable, ParamSpec, TypeVar
from .config import settings
logger = logging.getLogger(__name__)
P = ParamSpec("P")
T = TypeVar("T")
class RedisCache:
"""
Redis cache wrapper with JSON serialization.
Provides get/set operations with TTL support and pattern-based invalidation.
Example:
>>> cache = RedisCache()
>>> cache.set("projects:all", projects, ttl=300)
>>> cached = cache.get("projects:all")
"""
def __init__(self):
"""Initialize Redis connection."""
self._client = None
self._enabled = settings.redis.enabled
@property
def client(self):
"""Lazy-load Redis client."""
if self._client is None and self._enabled:
try:
import redis
self._client = redis.Redis.from_url(
settings.redis.url,
decode_responses=True,
)
# Test connection
self._client.ping()
logger.info("Redis cache connected", extra={"url": settings.redis.url})
except ImportError:
logger.warning("redis package not installed, cache disabled")
self._enabled = False
except Exception as e:
logger.warning(
"Redis connection failed, cache disabled",
extra={"error": str(e)},
)
self._enabled = False
return self._client
@property
def enabled(self) -> bool:
"""Check if cache is enabled and connected."""
return self._enabled and self.client is not None
def get(self, key: str) -> Any | None:
"""
Get a value from cache.
Args:
key: Cache key
Returns:
Cached value or None if not found
"""
if not self.enabled:
return None
try:
data = self.client.get(key)
if data:
logger.debug("Cache hit", extra={"key": key})
return json.loads(data)
logger.debug("Cache miss", extra={"key": key})
return None
except Exception as e:
logger.warning("Cache get error", extra={"key": key, "error": str(e)})
return None
def set(
self,
key: str,
value: Any,
ttl: int | None = None,
) -> bool:
"""
Set a value in cache.
Args:
key: Cache key
value: Value to cache (must be JSON serializable)
ttl: Time-to-live in seconds (defaults to settings)
Returns:
True if successful
"""
if not self.enabled:
return False
try:
ttl = ttl or settings.redis.default_ttl
self.client.setex(key, ttl, json.dumps(value, default=str))
logger.debug("Cache set", extra={"key": key, "ttl": ttl})
return True
except Exception as e:
logger.warning("Cache set error", extra={"key": key, "error": str(e)})
return False
def delete(self, key: str) -> bool:
"""
Delete a key from cache.
Args:
key: Cache key
Returns:
True if key was deleted
"""
if not self.enabled:
return False
try:
result = self.client.delete(key)
logger.debug("Cache delete", extra={"key": key, "deleted": bool(result)})
return bool(result)
except Exception as e:
logger.warning("Cache delete error", extra={"key": key, "error": str(e)})
return False
def invalidate_pattern(self, pattern: str) -> int:
"""
Delete all keys matching a pattern.
Args:
pattern: Redis glob pattern (e.g., "projects:*")
Returns:
Number of keys deleted
"""
if not self.enabled:
return 0
try:
count = 0
for key in self.client.scan_iter(pattern):
self.client.delete(key)
count += 1
logger.info("Cache invalidated", extra={"pattern": pattern, "count": count})
return count
except Exception as e:
logger.warning(
"Cache invalidate error", extra={"pattern": pattern, "error": str(e)}
)
return 0
def clear(self) -> bool:
"""
Clear all cache entries.
Returns:
True if successful
"""
if not self.enabled:
return False
try:
self.client.flushdb()
logger.info("Cache cleared")
return True
except Exception as e:
logger.warning("Cache clear error", extra={"error": str(e)})
return False
# Global cache instance
cache = RedisCache()
def _make_cache_key(prefix: str, func_name: str, args: tuple, kwargs: dict) -> str:
"""
Generate a cache key from function arguments.
Args:
prefix: Key prefix
func_name: Function name
args: Positional arguments
kwargs: Keyword arguments
Returns:
Cache key string
"""
# Create a hashable representation of arguments
key_data = {
"func": func_name,
"args": args,
"kwargs": kwargs,
}
key_hash = hashlib.md5(
json.dumps(key_data, sort_keys=True, default=str).encode()
).hexdigest()[:12]
if prefix:
return f"{prefix}:{func_name}:{key_hash}"
return f"{func_name}:{key_hash}"
def cached(
ttl: int | None = None,
prefix: str = "",
key_func: Callable[..., str] | None = None,
) -> Callable[[Callable[P, T]], Callable[P, T]]:
"""
Decorator to cache function results in Redis.
Args:
ttl: Time-to-live in seconds (defaults to settings)
prefix: Key prefix for namespacing
key_func: Optional custom function to generate cache key
Returns:
Decorated function
Example:
>>> @cached(ttl=300, prefix="projects")
... def list_projects(limit: int = 50):
... return client.search_read("project.project", [], ["name"])
"""
def decorator(func: Callable[P, T]) -> Callable[P, T]:
@wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
# Skip caching if disabled
if not cache.enabled:
return func(*args, **kwargs)
# Generate cache key
if key_func:
cache_key = key_func(*args, **kwargs)
else:
cache_key = _make_cache_key(prefix, func.__name__, args, kwargs)
# Try to get from cache
result = cache.get(cache_key)
if result is not None:
return result
# Execute function and cache result
result = func(*args, **kwargs)
cache.set(cache_key, result, ttl)
return result
# Add cache control methods to wrapper
wrapper.invalidate = lambda: cache.invalidate_pattern( # type: ignore
f"{prefix}:{func.__name__}:*" if prefix else f"{func.__name__}:*"
)
wrapper.cache_prefix = prefix or func.__name__ # type: ignore
return wrapper
return decorator
def invalidate_on_write(
*cache_prefixes: str,
) -> Callable[[Callable[P, T]], Callable[P, T]]:
"""
Decorator that invalidates cache entries after write operations.
Args:
*cache_prefixes: Cache prefixes to invalidate
Returns:
Decorated function
Example:
>>> @invalidate_on_write("timesheets", "timesheet_summary")
... def create_timesheet(data: dict):
... return client.create("account.analytic.line", data)
"""
def decorator(func: Callable[P, T]) -> Callable[P, T]:
@wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
result = func(*args, **kwargs)
# Invalidate specified cache prefixes
for prefix in cache_prefixes:
cache.invalidate_pattern(f"{prefix}:*")
return result
return wrapper
return decorator
# =============================================================================
# Cache Key Builders
# =============================================================================
def projects_cache_key(limit: int, offset: int) -> str:
"""Build cache key for projects list."""
return f"projects:list:{limit}:{offset}"
def employees_cache_key(department_id: int | None, limit: int, offset: int) -> str:
"""Build cache key for employees list."""
dept = department_id or "all"
return f"employees:list:{dept}:{limit}:{offset}"
def expense_categories_cache_key(limit: int, offset: int) -> str:
"""Build cache key for expense categories."""
return f"expense_categories:list:{limit}:{offset}"
def leave_types_cache_key(limit: int, offset: int) -> str:
"""Build cache key for leave types."""
return f"leave_types:list:{limit}:{offset}"