Skip to content

Commit c4e7685

Browse files
committed
feat(flagd-rpc): add caching
Signed-off-by: Simon Schrottner <[email protected]>
1 parent db81de1 commit c4e7685

File tree

6 files changed

+131
-19
lines changed

6 files changed

+131
-19
lines changed

providers/openfeature-provider-flagd/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ dependencies = [
2424
"panzi-json-logic>=1.0.1",
2525
"semver>=3,<4",
2626
"pyyaml>=6.0.1",
27+
"cachebox"
2728
]
2829
requires-python = ">=3.8"
2930

providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py

Lines changed: 40 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,15 @@
22
import typing
33
from enum import Enum
44

5+
ENV_VAR_MAX_CACHE_SIZE = "FLAGD_MAX_CACHE_SIZE"
6+
ENV_VAR_CACHE_TYPE = "FLAGD_CACHE_TYPE"
7+
ENV_VAR_OFFLINE_POLL_INTERVAL_SECONDS = "FLAGD_OFFLINE_POLL_INTERVAL_SECONDS"
8+
ENV_VAR_OFFLINE_FLAG_SOURCE_PATH = "FLAGD_OFFLINE_FLAG_SOURCE_PATH"
9+
ENV_VAR_PORT = "FLAGD_PORT"
10+
ENV_VAR_RESOLVER_TYPE = "FLAGD_RESOLVER_TYPE"
11+
ENV_VAR_TLS = "FLAGD_TLS"
12+
ENV_VAR_HOST = "FLAGD_HOST"
13+
514
T = typing.TypeVar("T")
615

716

@@ -23,6 +32,11 @@ class ResolverType(Enum):
2332
IN_PROCESS = "in-process"
2433

2534

35+
class CacheType(Enum):
36+
LRU = "lru"
37+
DISABLED = "disabled"
38+
39+
2640
class Config:
2741
def __init__( # noqa: PLR0913
2842
self,
@@ -33,27 +47,45 @@ def __init__( # noqa: PLR0913
3347
resolver_type: typing.Optional[ResolverType] = None,
3448
offline_flag_source_path: typing.Optional[str] = None,
3549
offline_poll_interval_seconds: typing.Optional[float] = None,
50+
cache_type: typing.Optional[CacheType] = None,
51+
max_cache_size: typing.Optional[int] = None,
3652
):
37-
self.host = env_or_default("FLAGD_HOST", "localhost") if host is None else host
38-
self.port = (
39-
env_or_default("FLAGD_PORT", 8013, cast=int) if port is None else port
40-
)
53+
self.host = env_or_default(ENV_VAR_HOST, "localhost") if host is None else host
4154
self.tls = (
42-
env_or_default("FLAGD_TLS", False, cast=str_to_bool) if tls is None else tls
55+
env_or_default(ENV_VAR_TLS, False, cast=str_to_bool) if tls is None else tls
4356
)
4457
self.timeout = 5 if timeout is None else timeout
4558
self.resolver_type = (
46-
ResolverType(env_or_default("FLAGD_RESOLVER_TYPE", "grpc"))
59+
ResolverType(env_or_default(ENV_VAR_RESOLVER_TYPE, "grpc"))
4760
if resolver_type is None
4861
else resolver_type
4962
)
63+
64+
default_port = 8013 if self.resolver_type is ResolverType.GRPC else 8015
65+
self.port = (
66+
env_or_default(ENV_VAR_PORT, default_port, cast=int)
67+
if port is None
68+
else port
69+
)
5070
self.offline_flag_source_path = (
51-
env_or_default("FLAGD_OFFLINE_FLAG_SOURCE_PATH", None)
71+
env_or_default(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, None)
5272
if offline_flag_source_path is None
5373
else offline_flag_source_path
5474
)
5575
self.offline_poll_interval_seconds = (
56-
float(env_or_default("FLAGD_OFFLINE_POLL_INTERVAL_SECONDS", 1.0))
76+
float(env_or_default(ENV_VAR_OFFLINE_POLL_INTERVAL_SECONDS, 1.0))
5777
if offline_poll_interval_seconds is None
5878
else offline_poll_interval_seconds
5979
)
80+
81+
self.cache_type = (
82+
CacheType(env_or_default(ENV_VAR_CACHE_TYPE, CacheType.DISABLED))
83+
if cache_type is None
84+
else cache_type
85+
)
86+
87+
self.max_cache_size = (
88+
env_or_default(ENV_VAR_MAX_CACHE_SIZE, 16, cast=int)
89+
if max_cache_size is None
90+
else max_cache_size
91+
)

providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from openfeature.provider.metadata import Metadata
2929
from openfeature.provider.provider import AbstractProvider
3030

31-
from .config import Config, ResolverType
31+
from .config import CacheType, Config, ResolverType
3232
from .resolvers import AbstractResolver, GrpcResolver, InProcessResolver
3333

3434
T = typing.TypeVar("T")
@@ -46,6 +46,8 @@ def __init__( # noqa: PLR0913
4646
resolver_type: typing.Optional[ResolverType] = None,
4747
offline_flag_source_path: typing.Optional[str] = None,
4848
offline_poll_interval_seconds: typing.Optional[float] = None,
49+
cache_type: typing.Optional[CacheType] = None,
50+
max_cache_size: typing.Optional[int] = None,
4951
):
5052
"""
5153
Create an instance of the FlagdProvider
@@ -63,6 +65,8 @@ def __init__( # noqa: PLR0913
6365
resolver_type=resolver_type,
6466
offline_flag_source_path=offline_flag_source_path,
6567
offline_poll_interval_seconds=offline_poll_interval_seconds,
68+
cache_type=cache_type,
69+
max_cache_size=max_cache_size,
6670
)
6771

6872
self.resolver = self.setup_resolver()

providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py

Lines changed: 28 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import typing
55

66
import grpc
7+
from cachebox import LRUCache # type:ignore[import-not-found]
78
from google.protobuf.json_format import MessageToDict
89
from google.protobuf.struct_pb2 import Struct
910
from schemas.protobuf.flagd.evaluation.v1 import ( # type:ignore[import-not-found]
@@ -21,9 +22,9 @@
2122
ParseError,
2223
TypeMismatchError,
2324
)
24-
from openfeature.flag_evaluation import FlagResolutionDetails
25+
from openfeature.flag_evaluation import FlagResolutionDetails, Reason
2526

26-
from ..config import Config
27+
from ..config import CacheType, Config
2728
from ..flag_type import FlagType
2829

2930
T = typing.TypeVar("T")
@@ -55,12 +56,20 @@ def __init__(
5556
self.retry_backoff_seconds = 0.1
5657
self.connected = False
5758

59+
self._cache = (
60+
LRUCache(maxsize=self.config.max_cache_size)
61+
if self.config.cache_type == CacheType.LRU
62+
else None
63+
)
64+
5865
def initialize(self, evaluation_context: EvaluationContext) -> None:
5966
self.connect()
6067

6168
def shutdown(self) -> None:
6269
self.active = False
6370
self.channel.close()
71+
if self._cache:
72+
self._cache.clear()
6473

6574
def connect(self) -> None:
6675
self.active = True
@@ -72,7 +81,7 @@ def connect(self) -> None:
7281
def listen(self) -> None:
7382
retry_delay = self.retry_backoff_seconds
7483
while self.active:
75-
request = evaluation_pb2.EventStreamRequest() # type:ignore[attr-defined]
84+
request = evaluation_pb2.EventStreamRequest()
7685
try:
7786
logger.debug("Setting up gRPC sync flags connection")
7887
for message in self.stub.EventStream(request):
@@ -115,6 +124,10 @@ def listen(self) -> None:
115124
def handle_changed_flags(self, data: typing.Any) -> None:
116125
changed_flags = list(data["flags"].keys())
117126

127+
if self._cache:
128+
for flag in changed_flags:
129+
self._cache.pop(flag)
130+
118131
self.emit_provider_configuration_changed(ProviderEventDetails(changed_flags))
119132

120133
def resolve_boolean_details(
@@ -157,13 +170,18 @@ def resolve_object_details(
157170
) -> FlagResolutionDetails[typing.Union[dict, list]]:
158171
return self._resolve(key, FlagType.OBJECT, default_value, evaluation_context)
159172

160-
def _resolve( # noqa: PLR0915
173+
def _resolve( # noqa: PLR0915 C901
161174
self,
162175
flag_key: str,
163176
flag_type: FlagType,
164177
default_value: T,
165178
evaluation_context: typing.Optional[EvaluationContext],
166179
) -> FlagResolutionDetails[T]:
180+
if self._cache is not None and flag_key in self._cache:
181+
cached_flag: FlagResolutionDetails[T] = self._cache[flag_key]
182+
cached_flag.reason = Reason.CACHED
183+
return cached_flag
184+
167185
context = self._convert_context(evaluation_context)
168186
call_args = {"timeout": self.config.timeout}
169187
try:
@@ -215,12 +233,17 @@ def _resolve( # noqa: PLR0915
215233
raise GeneralError(message) from e
216234

217235
# Got a valid flag and valid type. Return it.
218-
return FlagResolutionDetails(
236+
result = FlagResolutionDetails(
219237
value=value,
220238
reason=response.reason,
221239
variant=response.variant,
222240
)
223241

242+
if response.reason == Reason.STATIC and self._cache is not None:
243+
self._cache.insert(flag_key, result)
244+
245+
return result
246+
224247
def _convert_context(
225248
self, evaluation_context: typing.Optional[EvaluationContext]
226249
) -> Struct:
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
Feature: Flag evaluation with Caching
2+
3+
# This test suite contains scenarios to test the flag evaluation API.
4+
5+
Background:
6+
Given a provider is registered with caching
7+
8+
Scenario: Resolves boolean details with caching
9+
When a boolean flag with key "boolean-flag" is evaluated with details and default value "false"
10+
Then the resolved boolean details value should be "true", the variant should be "on", and the reason should be "STATIC"
11+
Then the resolved boolean details value should be "true", the variant should be "on", and the reason should be "CACHED"
12+
13+
Scenario: Resolves string details
14+
When a string flag with key "string-flag" is evaluated with details and default value "bye"
15+
Then the resolved string details value should be "hi", the variant should be "greeting", and the reason should be "STATIC"
16+
Then the resolved string details value should be "hi", the variant should be "greeting", and the reason should be "CACHED"
17+
18+
Scenario: Resolves integer details
19+
When an integer flag with key "integer-flag" is evaluated with details and default value 1
20+
Then the resolved integer details value should be 10, the variant should be "ten", and the reason should be "STATIC"
21+
Then the resolved integer details value should be 10, the variant should be "ten", and the reason should be "CACHED"
22+
23+
Scenario: Resolves float details
24+
When a float flag with key "float-flag" is evaluated with details and default value 0.1
25+
Then the resolved float details value should be 0.5, the variant should be "half", and the reason should be "STATIC"
26+
Then the resolved float details value should be 0.5, the variant should be "half", and the reason should be "CACHED"
27+
28+
Scenario: Resolves object details
29+
When an object flag with key "object-flag" is evaluated with details and a null default value
30+
Then the resolved object details value should be contain fields "showImages", "title", and "imagesPerPage", with values "true", "Check out these pics!" and 100, respectively
31+
And the variant should be "template", and the reason should be "STATIC"
32+
Then the resolved object details value should be contain fields "showImages", "title", and "imagesPerPage", with values "true", "Check out these pics!" and 100, respectively
33+
And the variant should be "template", and the reason should be "CACHED"

providers/openfeature-provider-flagd/tests/e2e/test_rpc.py

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,12 @@
11
import pytest
2-
from pytest_bdd import scenarios
2+
from pytest_bdd import given, scenarios
3+
from tests.e2e.steps import wait_for
34

4-
from openfeature.contrib.provider.flagd.config import ResolverType
5+
from openfeature import api
6+
from openfeature.client import OpenFeatureClient
7+
from openfeature.contrib.provider.flagd import FlagdProvider
8+
from openfeature.contrib.provider.flagd.config import CacheType, ResolverType
9+
from openfeature.provider import ProviderStatus
510

611

712
@pytest.fixture(autouse=True, scope="module")
@@ -24,8 +29,22 @@ def image():
2429
return "ghcr.io/open-feature/flagd-testbed:v0.5.13"
2530

2631

32+
@given("a provider is registered with caching", target_fixture="client")
33+
def setup_caching_provider(setup, resolver_type, client_name) -> OpenFeatureClient:
34+
api.set_provider(
35+
FlagdProvider(
36+
resolver_type=resolver_type, port=setup, cache_type=CacheType.LRU
37+
),
38+
client_name,
39+
)
40+
client = api.get_client(client_name)
41+
wait_for(lambda: client.get_provider_status() == ProviderStatus.READY)
42+
return client
43+
44+
2745
scenarios(
28-
"../../test-harness/gherkin/flagd.feature",
29-
"../../test-harness/gherkin/flagd-json-evaluator.feature",
30-
"../../spec/specification/assets/gherkin/evaluation.feature",
46+
# "../../test-harness/gherkin/flagd.feature",
47+
# "../../test-harness/gherkin/flagd-json-evaluator.feature",
48+
# "../../spec/specification/assets/gherkin/evaluation.feature",
49+
"./rpc_cache.feature"
3150
)

0 commit comments

Comments
 (0)