- Finish editing of Specialists with overview, agent - task - tool editor
- Split differrent caching mechanisms (types, version tree, config) into different cachers - Improve resource usage on starting components, and correct gevent usage - Refine repopack usage for eveai_app (too large) - Change nginx dockerfile to allow for specialist overviews being served statically
This commit is contained in:
75
common/utils/cache/base.py
vendored
75
common/utils/cache/base.py
vendored
@@ -1,7 +1,8 @@
|
||||
from typing import Any, Dict, List, Optional, TypeVar, Generic, Type
|
||||
from dataclasses import dataclass
|
||||
from flask import Flask
|
||||
from flask import Flask, current_app
|
||||
from dogpile.cache import CacheRegion
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
T = TypeVar('T') # Generic type parameter for cached data
|
||||
|
||||
@@ -47,6 +48,46 @@ class CacheHandler(Generic[T]):
|
||||
self.prefix = prefix
|
||||
self._key_components = [] # List of required key components
|
||||
|
||||
@abstractmethod
|
||||
def _to_cache_data(self, instance: T) -> Any:
|
||||
"""
|
||||
Convert the data to a cacheable format for internal use.
|
||||
|
||||
Args:
|
||||
instance: The data to be cached.
|
||||
|
||||
Returns:
|
||||
A serializable format of the instance.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _from_cache_data(self, data: Any, **kwargs) -> T:
|
||||
"""
|
||||
Convert cached data back to usable format for internal use.
|
||||
|
||||
Args:
|
||||
data: The cached data.
|
||||
**kwargs: Additional context.
|
||||
|
||||
Returns:
|
||||
The data in its usable format.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _should_cache(self, value: T) -> bool:
|
||||
"""
|
||||
Validate if the value should be cached for internal use.
|
||||
|
||||
Args:
|
||||
value: The value to be cached.
|
||||
|
||||
Returns:
|
||||
True if the value should be cached, False otherwise.
|
||||
"""
|
||||
pass
|
||||
|
||||
def configure_keys(self, *components: str):
|
||||
"""
|
||||
Configure required components for cache key generation.
|
||||
@@ -77,8 +118,13 @@ class CacheHandler(Generic[T]):
|
||||
if missing:
|
||||
raise ValueError(f"Missing key components: {missing}")
|
||||
|
||||
region_name = getattr(self.region, 'name', 'default_region')
|
||||
|
||||
current_app.logger.debug(f"Generating cache key in region {region_name} with prefix {self.prefix} "
|
||||
f"for {self._key_components}")
|
||||
|
||||
key = CacheKey({k: identifiers[k] for k in self._key_components})
|
||||
return f"{self.prefix}:{str(key)}"
|
||||
return f"{region_name}_{self.prefix}:{str(key)}"
|
||||
|
||||
def get(self, creator_func, **identifiers) -> T:
|
||||
"""
|
||||
@@ -92,18 +138,19 @@ class CacheHandler(Generic[T]):
|
||||
Cached or newly created value
|
||||
"""
|
||||
cache_key = self.generate_key(**identifiers)
|
||||
current_app.logger.debug(f"Cache key: {cache_key}")
|
||||
|
||||
def creator():
|
||||
instance = creator_func(**identifiers)
|
||||
return self.to_cache_data(instance)
|
||||
return self._to_cache_data(instance)
|
||||
|
||||
cached_data = self.region.get_or_create(
|
||||
cache_key,
|
||||
creator,
|
||||
should_cache_fn=self.should_cache
|
||||
should_cache_fn=self._should_cache
|
||||
)
|
||||
|
||||
return self.from_cache_data(cached_data, **identifiers)
|
||||
return self._from_cache_data(cached_data, **identifiers)
|
||||
|
||||
def invalidate(self, **identifiers):
|
||||
"""
|
||||
@@ -128,3 +175,21 @@ class CacheHandler(Generic[T]):
|
||||
except ValueError:
|
||||
pass # Skip if cache key can't be generated from provided identifiers
|
||||
|
||||
def invalidate_region(self):
|
||||
"""
|
||||
Invalidate all cache entries within this region.
|
||||
|
||||
Deletes all keys that start with the region prefix.
|
||||
"""
|
||||
# Construct the pattern for all keys in this region
|
||||
pattern = f"{self.region}_{self.prefix}:*"
|
||||
|
||||
# Assuming Redis backend with dogpile, use `delete_multi` or direct Redis access
|
||||
if hasattr(self.region.backend, 'client'):
|
||||
redis_client = self.region.backend.client
|
||||
keys_to_delete = redis_client.keys(pattern)
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
else:
|
||||
# Fallback for other backends
|
||||
raise NotImplementedError("Region invalidation is only supported for Redis backend.")
|
||||
|
||||
Reference in New Issue
Block a user