Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions growthbook/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from .growthbook import *

from .growthbook_client import (
GrowthBookClient,
EnhancedFeatureRepository,
FeatureCache,
BackoffStrategy
BackoffStrategy,
InMemoryAsyncFeatureCache,
RedisAsyncFeatureCache
)
from .common_types import AbstractAsyncFeatureCache

# Plugin support
from .plugins import (
Expand Down
13 changes: 13 additions & 0 deletions growthbook/common_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,18 @@ class UserContext:
sticky_bucket_assignment_docs: Dict[str, Any] = field(default_factory=dict)
skip_all_experiments: bool = False

class AbstractAsyncFeatureCache(ABC):
@abstractmethod
async def get(self, key: str) -> Optional[Dict]:
pass

@abstractmethod
async def set(self, key: str, value: Dict, ttl: int) -> None:
pass

async def clear(self) -> None:
pass

@dataclass
class Options:
url: Optional[str] = None
Expand All @@ -431,6 +443,7 @@ class Options:
on_experiment_viewed: Optional[Callable[[Experiment, Result, Optional[UserContext]], None]] = None
on_feature_usage: Optional[Callable[[str, 'FeatureResult', UserContext], None]] = None
tracking_plugins: Optional[List[Any]] = None
cache: Optional[AbstractAsyncFeatureCache] = None
http_connect_timeout: Optional[int] = None
http_read_timeout: Optional[int] = None

Expand Down
96 changes: 91 additions & 5 deletions growthbook/growthbook_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
import asyncio
import threading
import traceback
import time
from datetime import datetime
from growthbook import FeatureRepository, feature_repo
from .growthbook import FeatureRepository, feature_repo, CacheEntry
from contextlib import asynccontextmanager

from .core import eval_feature as core_eval_feature, run_experiment
Expand All @@ -23,7 +24,8 @@
StackContext,
FeatureResult,
FeatureRefreshStrategy,
Experiment
Experiment,
AbstractAsyncFeatureCache
)

logger = logging.getLogger("growthbook.growthbook_client")
Expand All @@ -34,11 +36,17 @@ class SingletonMeta(type):
_lock = threading.Lock()

def __call__(cls, *args, **kwargs):
# Identify instance by api_host and client_key (args[0] and args[1])
key = (args[0], args[1]) if len(args) >= 2 else "default"

with cls._lock:
if cls not in cls._instances:
cls._instances[cls] = {}

if key not in cls._instances[cls]:
instance = super().__call__(*args, **kwargs)
cls._instances[cls] = instance
return cls._instances[cls]
cls._instances[cls][key] = instance
return cls._instances[cls][key]

class BackoffStrategy:
"""Exponential backoff with jitter for failed requests"""
Expand Down Expand Up @@ -101,12 +109,74 @@ def get_current_state(self) -> Dict[str, Any]:
"savedGroups": self._cache['savedGroups']
}

class CacheEntry(object):
def __init__(self, value: Dict, ttl: int) -> None:
self.value = value
self.ttl = ttl
self.expires = time.time() + ttl

def update(self, value: Dict):
self.value = value
self.expires = time.time() + self.ttl

class InMemoryAsyncFeatureCache(AbstractAsyncFeatureCache):
def __init__(self) -> None:
self.cache: Dict[str, CacheEntry] = {}

async def get(self, key: str) -> Optional[Dict]:
if key in self.cache:
entry = self.cache[key]
if entry.expires >= time.time():
return entry.value
else:
del self.cache[key]
return None

async def set(self, key: str, value: Dict, ttl: int) -> None:
if key in self.cache:
self.cache[key].update(value)
else:
self.cache[key] = CacheEntry(value, ttl)

async def clear(self) -> None:
self.cache.clear()

class RedisAsyncFeatureCache(AbstractAsyncFeatureCache):
def __init__(self, redis_url: str, key_prefix: str = "gb_cache:") -> None:
self.key_prefix = key_prefix
try:
import redis.asyncio as redis
self.redis = redis.from_url(redis_url, decode_responses=True)
except ImportError:
raise ImportError("redis package is required for RedisAsyncFeatureCache. Install it with `pip install redis`.")

def _get_key(self, key: str) -> str:
return f"{self.key_prefix}{key}"

async def get(self, key: str) -> Optional[Dict]:
data = await self.redis.get(self._get_key(key))
if data:
return json.loads(data)
return None

async def set(self, key: str, value: Dict, ttl: int) -> None:
await self.redis.set(self._get_key(key), json.dumps(value), ex=ttl)

async def clear(self) -> None:
keys = await self.redis.keys(f"{self.key_prefix}*")
if keys:
await self.redis.delete(*keys)

async def close(self) -> None:
await self.redis.close()

class EnhancedFeatureRepository(FeatureRepository, metaclass=SingletonMeta):
def __init__(self,
api_host: str,
client_key: str,
decryption_key: str = "",
cache_ttl: int = 60,
cache: Optional[AbstractAsyncFeatureCache] = None,
http_connect_timeout: Optional[int] = None,
http_read_timeout: Optional[int] = None):
FeatureRepository.__init__(self)
Expand All @@ -122,6 +192,7 @@ def __init__(self,
self._callbacks: List[Callable[[Dict[str, Any]], Awaitable[None]]] = []
self._last_successful_refresh: Optional[datetime] = None
self._refresh_in_progress = asyncio.Lock()
self.cache = cache if cache else InMemoryAsyncFeatureCache()
self.http_connect_timeout = http_connect_timeout
self.http_read_timeout = http_read_timeout

Expand Down Expand Up @@ -346,7 +417,21 @@ async def load_features_async(
if api_host == self._api_host and client_key == self._client_key:
decryption_key = self._decryption_key
ttl = self._cache_ttl
return await super().load_features_async(api_host, client_key, decryption_key, ttl)

key = api_host + "::" + client_key

# Try async cache first
cached = await self.cache.get(key)
if cached:
return cached

# Fetch from network
res = await self._fetch_features_async(api_host, client_key, decryption_key)
if res is not None:
await self.cache.set(key, res, ttl)
return res

return None

class GrowthBookClient:
def __init__(
Expand Down Expand Up @@ -384,6 +469,7 @@ def __init__(
self.options.client_key or "",
self.options.decryption_key or "",
self.options.cache_ttl,
self.options.cache,
self.options.http_connect_timeout,
self.options.http_read_timeout
)
Expand Down
73 changes: 73 additions & 0 deletions tests/test_async_custom_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import pytest
import asyncio
from typing import Dict, Optional
from growthbook import GrowthBookClient, AbstractAsyncFeatureCache, InMemoryAsyncFeatureCache, Options, EnhancedFeatureRepository

@pytest.fixture(autouse=True)
async def cleanup():
EnhancedFeatureRepository._instances = {}
yield
EnhancedFeatureRepository._instances = {}

class CustomAsyncCache(AbstractAsyncFeatureCache):
def __init__(self):
self.cache = {}
self.get_calls = 0
self.set_calls = 0

async def get(self, key: str) -> Optional[Dict]:
self.get_calls += 1
return self.cache.get(key)

async def set(self, key: str, value: Dict, ttl: int) -> None:
self.set_calls += 1
self.cache[key] = value

async def clear(self) -> None:
self.cache.clear()

@pytest.mark.asyncio
async def test_default_async_cache():
# Test that default cache is InMemoryAsyncFeatureCache
client = GrowthBookClient(options=Options(client_key="123", api_host="http://localhost"))
# Access private repo to check cache (white-box testing)
assert isinstance(client._features_repository.cache, InMemoryAsyncFeatureCache)

# Clean up
await client.close()

@pytest.mark.asyncio
async def test_custom_async_cache():
# Force cleanup manually to debug singleton issue
EnhancedFeatureRepository._instances = {}

custom_cache = CustomAsyncCache()
options = Options(
client_key="123",
api_host="http://localhost",
cache=custom_cache
)
client = GrowthBookClient(options=options)

# Debug info
print(f"DEBUG: Client cache option: {client.options.cache}")
print(f"DEBUG: FeatureRepo cache: {client._features_repository.cache}")

# Ensure options passed correctly
assert client.options.cache is custom_cache

assert client._features_repository.cache is custom_cache

# Simulate loading features (mocking fetch would be better, but checking instance is good step 1)
# Let's try to set something manually in the cache and see if load_features finds it
key = "http://localhost::123"
features_data = {"features": {"foo": {"defaultValue": True}}}
await custom_cache.set(key, features_data, 60)

# This should hit the cache and not fail due to network (since localhost might not be reachable)
# load_features_async is what we want to test
loaded = await client._features_repository.load_features_async("http://localhost", "123")
assert loaded == features_data
assert custom_cache.get_calls > 0

await client.close()
Loading
Loading