- Split differrent caching mechanisms (types, version tree, config) into different cachers - Improve resource usage on starting components, and correct gevent usage - Refine repopack usage for eveai_app (too large) - Change nginx dockerfile to allow for specialist overviews being served statically
196 lines
5.8 KiB
Python
196 lines
5.8 KiB
Python
from typing import Any, Dict, List, Optional, TypeVar, Generic, Type
|
|
from dataclasses import dataclass
|
|
from flask import Flask, current_app
|
|
from dogpile.cache import CacheRegion
|
|
from abc import ABC, abstractmethod
|
|
|
|
T = TypeVar('T') # Generic type parameter for cached data
|
|
|
|
|
|
@dataclass
|
|
class CacheKey:
|
|
"""
|
|
Represents a composite cache key made up of multiple components.
|
|
Enables structured and consistent key generation for cache entries.
|
|
|
|
Attributes:
|
|
components (Dict[str, Any]): Dictionary of key components and their values
|
|
|
|
Example:
|
|
key = CacheKey({'tenant_id': 123, 'user_id': 456})
|
|
str(key) -> "tenant_id=123:user_id=456"
|
|
"""
|
|
components: Dict[str, Any]
|
|
|
|
def __str__(self) -> str:
|
|
"""
|
|
Converts components into a deterministic string representation.
|
|
Components are sorted alphabetically to ensure consistent key generation.
|
|
"""
|
|
return ":".join(f"{k}={v}" for k, v in sorted(self.components.items()))
|
|
|
|
|
|
class CacheHandler(Generic[T]):
|
|
"""
|
|
Base cache handler implementation providing structured caching functionality.
|
|
Uses generics to ensure type safety of cached data.
|
|
|
|
Type Parameters:
|
|
T: Type of data being cached
|
|
|
|
Attributes:
|
|
region (CacheRegion): Dogpile cache region for storage
|
|
prefix (str): Prefix for all cache keys managed by this handler
|
|
"""
|
|
|
|
def __init__(self, region: CacheRegion, prefix: str):
|
|
self.region = region
|
|
self.prefix = prefix
|
|
self._key_components = [] # List of required key components
|
|
|
|
@abstractmethod
|
|
def _to_cache_data(self, instance: T) -> Any:
|
|
"""
|
|
Convert the data to a cacheable format for internal use.
|
|
|
|
Args:
|
|
instance: The data to be cached.
|
|
|
|
Returns:
|
|
A serializable format of the instance.
|
|
"""
|
|
pass
|
|
|
|
@abstractmethod
|
|
def _from_cache_data(self, data: Any, **kwargs) -> T:
|
|
"""
|
|
Convert cached data back to usable format for internal use.
|
|
|
|
Args:
|
|
data: The cached data.
|
|
**kwargs: Additional context.
|
|
|
|
Returns:
|
|
The data in its usable format.
|
|
"""
|
|
pass
|
|
|
|
@abstractmethod
|
|
def _should_cache(self, value: T) -> bool:
|
|
"""
|
|
Validate if the value should be cached for internal use.
|
|
|
|
Args:
|
|
value: The value to be cached.
|
|
|
|
Returns:
|
|
True if the value should be cached, False otherwise.
|
|
"""
|
|
pass
|
|
|
|
def configure_keys(self, *components: str):
|
|
"""
|
|
Configure required components for cache key generation.
|
|
|
|
Args:
|
|
*components: Required key component names
|
|
|
|
Returns:
|
|
self for method chaining
|
|
"""
|
|
self._key_components = components
|
|
return self
|
|
|
|
def generate_key(self, **identifiers) -> str:
|
|
"""
|
|
Generate a cache key from provided identifiers.
|
|
|
|
Args:
|
|
**identifiers: Key-value pairs for key components
|
|
|
|
Returns:
|
|
Formatted cache key string
|
|
|
|
Raises:
|
|
ValueError: If required components are missing
|
|
"""
|
|
missing = set(self._key_components) - set(identifiers.keys())
|
|
if missing:
|
|
raise ValueError(f"Missing key components: {missing}")
|
|
|
|
region_name = getattr(self.region, 'name', 'default_region')
|
|
|
|
current_app.logger.debug(f"Generating cache key in region {region_name} with prefix {self.prefix} "
|
|
f"for {self._key_components}")
|
|
|
|
key = CacheKey({k: identifiers[k] for k in self._key_components})
|
|
return f"{region_name}_{self.prefix}:{str(key)}"
|
|
|
|
def get(self, creator_func, **identifiers) -> T:
|
|
"""
|
|
Get or create a cached value.
|
|
|
|
Args:
|
|
creator_func: Function to create value if not cached
|
|
**identifiers: Key components for cache key
|
|
|
|
Returns:
|
|
Cached or newly created value
|
|
"""
|
|
cache_key = self.generate_key(**identifiers)
|
|
current_app.logger.debug(f"Cache key: {cache_key}")
|
|
|
|
def creator():
|
|
instance = creator_func(**identifiers)
|
|
return self._to_cache_data(instance)
|
|
|
|
cached_data = self.region.get_or_create(
|
|
cache_key,
|
|
creator,
|
|
should_cache_fn=self._should_cache
|
|
)
|
|
|
|
return self._from_cache_data(cached_data, **identifiers)
|
|
|
|
def invalidate(self, **identifiers):
|
|
"""
|
|
Invalidate a specific cache entry.
|
|
|
|
Args:
|
|
**identifiers: Key components for the cache entry
|
|
"""
|
|
cache_key = self.generate_key(**identifiers)
|
|
self.region.delete(cache_key)
|
|
|
|
def invalidate_by_model(self, model: str, **identifiers):
|
|
"""
|
|
Invalidate cache entry based on model changes.
|
|
|
|
Args:
|
|
model: Changed model name
|
|
**identifiers: Model instance identifiers
|
|
"""
|
|
try:
|
|
self.invalidate(**identifiers)
|
|
except ValueError:
|
|
pass # Skip if cache key can't be generated from provided identifiers
|
|
|
|
def invalidate_region(self):
|
|
"""
|
|
Invalidate all cache entries within this region.
|
|
|
|
Deletes all keys that start with the region prefix.
|
|
"""
|
|
# Construct the pattern for all keys in this region
|
|
pattern = f"{self.region}_{self.prefix}:*"
|
|
|
|
# Assuming Redis backend with dogpile, use `delete_multi` or direct Redis access
|
|
if hasattr(self.region.backend, 'client'):
|
|
redis_client = self.region.backend.client
|
|
keys_to_delete = redis_client.keys(pattern)
|
|
if keys_to_delete:
|
|
redis_client.delete(*keys_to_delete)
|
|
else:
|
|
# Fallback for other backends
|
|
raise NotImplementedError("Region invalidation is only supported for Redis backend.")
|