From 36269b59d015880fe65190ee77987bb1b6a8f125 Mon Sep 17 00:00:00 2001 From: Simon Schrottner Date: Sun, 17 Nov 2024 15:50:34 +0100 Subject: [PATCH] fixup: adding gherkin tests for evaluations, and fxing found issues Signed-off-by: Simon Schrottner --- .../openfeature-provider-flagd/pyproject.toml | 2 +- .../contrib/provider/flagd/resolvers/grpc.py | 2 +- .../provider/flagd/resolvers/in_process.py | 4 - .../process/connector/grpc_watcher.py | 36 +++- .../provider/flagd/resolvers/process/flags.py | 4 + .../openfeature-provider-flagd/test-harness | 2 +- .../tests/e2e/conftest.py | 15 +- .../tests/e2e/inprocess/file/conftest.py | 17 -- .../tests/e2e/inprocess/grpc/conftest.py | 49 ----- .../e2e/inprocess/grpc/test_inprocess_grpc.py | 6 - .../grpc/test_inprocess_grpc_reconnect.py | 12 -- .../tests/e2e/steps.py | 182 ++++++++++++++---- .../tests/e2e/test_in-process-file.py | 38 +++- .../tests/e2e/test_inprocess.py | 37 ++++ .../tests/e2e/test_inprocess_file.py | 83 ++++++++ .../tests/e2e/test_inprocess_reconnect.py | 35 ++++ .../tests/e2e/test_rpc.py | 11 +- .../tests/test_errors.py | 26 +++ .../tests/test_file_store.py | 19 +- 19 files changed, 419 insertions(+), 161 deletions(-) delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/inprocess/file/conftest.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/conftest.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc_reconnect.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_inprocess.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_inprocess_file.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_inprocess_reconnect.py diff --git a/providers/openfeature-provider-flagd/pyproject.toml b/providers/openfeature-provider-flagd/pyproject.toml index 60d79554..8d86a04b 100644 --- a/providers/openfeature-provider-flagd/pyproject.toml +++ b/providers/openfeature-provider-flagd/pyproject.toml @@ -18,7 +18,7 @@ classifiers = [ keywords = [] dependencies = [ "openfeature-sdk>=0.4.0", - "grpcio>=1.60.0", + "grpcio>=1.68.0", "protobuf>=4.25.2", "mmh3>=4.1.0", "panzi-json-logic>=1.0.1", diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py index cfb8ae19..4d11b9b2 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py @@ -20,7 +20,7 @@ from ..config import Config from ..flag_type import FlagType -from . import AbstractResolver +from .protocol import AbstractResolver T = typing.TypeVar("T") diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py index 5c3c2384..7127b46b 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py @@ -73,8 +73,6 @@ def resolve_float_details( evaluation_context: typing.Optional[EvaluationContext] = None, ) -> FlagResolutionDetails[float]: result = self._resolve(key, default_value, evaluation_context) - if not isinstance(result.value, float): - result.value = float(result.value) return result def resolve_integer_details( @@ -84,8 +82,6 @@ def resolve_integer_details( evaluation_context: typing.Optional[EvaluationContext] = None, ) -> FlagResolutionDetails[int]: result = self._resolve(key, default_value, evaluation_context) - if not isinstance(result.value, int): - result.value = int(result.value) return result def resolve_object_details( diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py index f6ea3ecf..350131d7 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py @@ -5,13 +5,16 @@ import typing import grpc +from schemas.protobuf.flagd.sync.v1 import ( # type:ignore[import-not-found] + sync_pb2, + sync_pb2_grpc, +) from openfeature.evaluation_context import EvaluationContext from openfeature.event import ProviderEventDetails from openfeature.exception import ErrorCode, ParseError, ProviderNotReadyError from ....config import Config -from ....proto.flagd.sync.v1 import sync_pb2, sync_pb2_grpc from ..connector import FlagStateConnector from ..flags import FlagStore @@ -29,9 +32,9 @@ def __init__( emit_provider_error: typing.Callable[[ProviderEventDetails], None], ): self.flag_store = flag_store - channel_factory = grpc.secure_channel if config.tls else grpc.insecure_channel - self.channel = channel_factory(f"{config.host}:{config.port}") - self.stub = sync_pb2_grpc.FlagSyncServiceStub(self.channel) + self.config = config + + self.stub, self.channel = self.create_stub() self.timeout = config.timeout self.retry_backoff_seconds = config.retry_backoff_seconds self.selector = config.selector @@ -40,6 +43,22 @@ def __init__( self.connected = False + def create_stub( + self, + ) -> typing.Tuple[sync_pb2_grpc.FlagSyncServiceStub, grpc.Channel]: + config = self.config + channel_factory = grpc.secure_channel if config.tls else grpc.insecure_channel + channel = channel_factory( + f"{config.host}:{config.port}", + options=( + ("grpc.max_reconnect_backoff_ms", 1000), + ("grpc.initial_reconnect_backoff_ms", 1000), + ("grpc.keepalive_time_ms", 1000), + ), + ) + stub = sync_pb2_grpc.FlagSyncServiceStub(channel) + return stub, channel + def initialize(self, context: EvaluationContext) -> None: self.active = True self.thread = threading.Thread( @@ -60,13 +79,13 @@ def initialize(self, context: EvaluationContext) -> None: def shutdown(self) -> None: self.active = False + self.channel.close() def sync_flags(self) -> None: - request = sync_pb2.SyncFlagsRequest(selector=self.selector) # type:ignore[attr-defined] - retry_delay = self.retry_backoff_seconds while self.active: try: + request = sync_pb2.SyncFlagsRequest(selector=self.selector) logger.debug("Setting up gRPC sync flags connection") for flag_rsp in self.stub.SyncFlags(request): flag_str = flag_rsp.flag_configuration @@ -89,6 +108,9 @@ def sync_flags(self) -> None: return except grpc.RpcError as e: logger.error(f"SyncFlags stream error, {e.code()=} {e.details()=}") + if e.code() == grpc.StatusCode.UNAVAILABLE: + self.stub, self.channel = self.create_stub() + except json.JSONDecodeError: logger.exception( f"Could not parse JSON flag data from SyncFlags endpoint: {flag_str=}" @@ -107,4 +129,4 @@ def sync_flags(self) -> None: ) logger.info(f"gRPC sync disconnected, reconnecting in {retry_delay}s") time.sleep(retry_delay) - retry_delay = min(2 * retry_delay, self.MAX_BACK_OFF) + retry_delay = min(2, self.MAX_BACK_OFF) diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py index 889edac7..6b1ac0d1 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py @@ -72,6 +72,10 @@ def from_dict(cls, key: str, data: dict) -> "Flag": data["default_variant"] = data["defaultVariant"] del data["defaultVariant"] + if "source" in data: + del data["source"] + if "selector" in data: + del data["selector"] try: flag = cls(key=key, **data) return flag diff --git a/providers/openfeature-provider-flagd/test-harness b/providers/openfeature-provider-flagd/test-harness index c9e0be36..6197b3d9 160000 --- a/providers/openfeature-provider-flagd/test-harness +++ b/providers/openfeature-provider-flagd/test-harness @@ -1 +1 @@ -Subproject commit c9e0be36e89ad33aa99b8e32b40d67e9bf350f88 +Subproject commit 6197b3d956d358bf662e5b8e0aebdc4800480f6b diff --git a/providers/openfeature-provider-flagd/tests/e2e/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/conftest.py index 2aae58d9..25501b41 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/conftest.py +++ b/providers/openfeature-provider-flagd/tests/e2e/conftest.py @@ -5,29 +5,22 @@ from tests.e2e.flagd_container import FlagdContainer from tests.e2e.steps import * # noqa: F403 -from openfeature import api -from openfeature.contrib.provider.flagd import FlagdProvider - JsonPrimitive = typing.Union[str, bool, float, int] -@pytest.fixture(autouse=True, scope="package") -def setup(request, port, image, resolver_type): +@pytest.fixture(autouse=True, scope="module") +def setup(request, port, image): container: DockerContainer = FlagdContainer( image=image, port=port, ) # Setup code c = container.start() - api.set_provider( - FlagdProvider( - resolver_type=resolver_type, - port=int(container.get_exposed_port(port)), - ) - ) def fin(): c.stop() # Teardown code request.addfinalizer(fin) + + return c.get_exposed_port(port) diff --git a/providers/openfeature-provider-flagd/tests/e2e/inprocess/file/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/inprocess/file/conftest.py deleted file mode 100644 index 81831c69..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/inprocess/file/conftest.py +++ /dev/null @@ -1,17 +0,0 @@ -from pytest_bdd import given - -from openfeature import api -from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.contrib.provider.flagd.config import ResolverType - - -@given("a flagd provider is set", target_fixture="client") -def setup_provider(flag_file): - provider = FlagdProvider( - resolver_type=ResolverType.IN_PROCESS, - offline_flag_source_path=flag_file, - offline_poll_interval_seconds=0.1, - ) - api.set_provider(provider) - yield api.get_client() - provider.shutdown() diff --git a/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/conftest.py deleted file mode 100644 index 3c15b0c8..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/conftest.py +++ /dev/null @@ -1,49 +0,0 @@ -import pytest -from pytest_bdd import given, parsers, then, when -from tests.e2e.conftest import add_event_handler, assert_handlers - -from openfeature import api -from openfeature.client import OpenFeatureClient, ProviderEvent -from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.contrib.provider.flagd.config import ResolverType - - -@pytest.fixture -def port(): - # Port for flagd-sync, override to 9091 to test unstable version - return 9090 - - -@given("a flagd provider is set", target_fixture="client") -def setup_provider(port: int) -> OpenFeatureClient: - api.set_provider( - FlagdProvider( - resolver_type=ResolverType.IN_PROCESS, - port=port, - timeout=0.5, - retry_backoff_seconds=0.1, - ) - ) - return api.get_client() - - -@when(parsers.cfparse('a flag with key "{key}" is modified')) -def modify_flag(key): - # sync service will flip flag contents regularly - pass - - -@given("flagd is unavailable", target_fixture="client") -def flagd_unavailable(): - return setup_provider(99999) - - -@when("a flagd provider is set and initialization is awaited") -def flagd_init(client: OpenFeatureClient, handles): - add_event_handler(client, ProviderEvent.PROVIDER_ERROR, handles) - add_event_handler(client, ProviderEvent.PROVIDER_READY, handles) - - -@then("an error should be indicated within the configured deadline") -def flagd_error(handles): - assert_handlers(handles, ProviderEvent.PROVIDER_ERROR) diff --git a/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc.py b/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc.py deleted file mode 100644 index 926c2195..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc.py +++ /dev/null @@ -1,6 +0,0 @@ -from pytest_bdd import scenarios - -GHERKIN_FOLDER = "../../../../test-harness/gherkin/" - -scenarios(f"{GHERKIN_FOLDER}flagd-json-evaluator.feature") -scenarios(f"{GHERKIN_FOLDER}flagd.feature") diff --git a/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc_reconnect.py b/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc_reconnect.py deleted file mode 100644 index e3e1b85d..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/inprocess/grpc/test_inprocess_grpc_reconnect.py +++ /dev/null @@ -1,12 +0,0 @@ -import pytest -from pytest_bdd import scenarios - -GHERKIN_FOLDER = "../../../../test-harness/gherkin/" - -scenarios(f"{GHERKIN_FOLDER}flagd-reconnect.feature") - - -@pytest.fixture -def port(): - # Port for flagd-sync-unstable, overrides main conftest port - return 9091 diff --git a/providers/openfeature-provider-flagd/tests/e2e/steps.py b/providers/openfeature-provider-flagd/tests/e2e/steps.py index fe490c5f..e4cfdc07 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/steps.py +++ b/providers/openfeature-provider-flagd/tests/e2e/steps.py @@ -1,3 +1,4 @@ +import logging import time import typing @@ -8,10 +9,13 @@ from openfeature import api from openfeature.client import OpenFeatureClient +from openfeature.contrib.provider.flagd import FlagdProvider +from openfeature.contrib.provider.flagd.config import ResolverType from openfeature.evaluation_context import EvaluationContext from openfeature.event import EventDetails, ProviderEvent from openfeature.flag_evaluation import ErrorCode, FlagEvaluationDetails, Reason from openfeature.provider import ProviderStatus +from tests.e2e.parsers import to_bool, to_list JsonObject = typing.Union[dict, list] JsonPrimitive = typing.Union[str, bool, float, int, JsonObject] @@ -24,8 +28,17 @@ def evaluation_context() -> EvaluationContext: @given("a flagd provider is set", target_fixture="client") @given("a provider is registered", target_fixture="client") -def setup_provider() -> OpenFeatureClient: - client = api.get_client() +def setup_provider(setup, resolver_type, client_name) -> OpenFeatureClient: + api.set_provider( + FlagdProvider( + resolver_type=resolver_type, + port=setup, + timeout=0.5, + retry_backoff_seconds=0.1, + ), + client_name, + ) + client = api.get_client(client_name) wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) return client @@ -491,58 +504,134 @@ def assert_reason( assert_equal(evaluation_result.reason, reason) -@when(parsers.cfparse("a PROVIDER_READY handler is added")) -def provider_ready_add(client: OpenFeatureClient, context): - def provider_ready_handler(event_details: EventDetails): - context["provider_ready_ran"] = True - - client.add_handler(ProviderEvent.PROVIDER_READY, provider_ready_handler) - +@pytest.fixture() +def event_handles() -> list: + return [] -@then(parsers.cfparse("the PROVIDER_READY handler must run")) -def provider_ready_was_executed(client: OpenFeatureClient, context): - assert_true(context["provider_ready_ran"]) +@pytest.fixture() +def error_handles() -> list: + return [] -@when(parsers.cfparse("a PROVIDER_CONFIGURATION_CHANGED handler is added")) -def provider_changed_add(client: OpenFeatureClient, context): - def provider_changed_handler(event_details: EventDetails): - context["provider_changed_ran"] = True - client.add_handler( - ProviderEvent.PROVIDER_CONFIGURATION_CHANGED, provider_changed_handler - ) +@when( + parsers.cfparse( + "a {event_type:ProviderEvent} handler is added", + extra_types={"ProviderEvent": ProviderEvent}, + ), +) +def add_event_handler( + client: OpenFeatureClient, event_type: ProviderEvent, event_handles: list +): + def handler(event): + logging.debug((event_type, event)) + event_handles.append( + { + "type": event_type, + "event": event, + } + ) + client.add_handler(event_type, handler) @pytest.fixture(scope="function") def context(): return {} - -@when(parsers.cfparse('a flag with key "{flag_key}" is modified')) -def assert_reason2( +@when( + parsers.cfparse( + "a {event_type:ProviderEvent} handler and a {event_type2:ProviderEvent} handler are added", + extra_types={"ProviderEvent": ProviderEvent}, + ) +) +def add_event_handlers( client: OpenFeatureClient, - context, - flag_key: str, + event_type: ProviderEvent, + event_type2: ProviderEvent, + event_handles, + error_handles, ): - context["flag_key"] = flag_key + add_event_handler(client, event_type, event_handles) + add_event_handler(client, event_type2, error_handles) + + +def assert_handlers( + handles, event_type: ProviderEvent, max_wait: int = 2, num_events: int = 1 +): + poll_interval = 1 + while max_wait > 0: + if sum([h["type"] == event_type for h in handles]) < num_events: + max_wait -= poll_interval + time.sleep(poll_interval) + continue + break + + logging.info(f"asserting num({event_type}) >= {num_events}: {handles}") + actual_num_events = sum([h["type"] == event_type for h in handles]) + assert ( + num_events <= actual_num_events + ), f"Expected {num_events} but got {actual_num_events}: {handles}" + + +@then( + parsers.cfparse( + "the {event_type:ProviderEvent} handler must run", + extra_types={"ProviderEvent": ProviderEvent}, + ) +) +@then( + parsers.cfparse( + "the {event_type:ProviderEvent} handler must run when the provider connects", + extra_types={"ProviderEvent": ProviderEvent}, + ) +) +def assert_handler_run(event_type: ProviderEvent, event_handles): + assert_handlers(event_handles, event_type, max_wait=6) @then( - parsers.cfparse("the PROVIDER_CONFIGURATION_CHANGED handler must run"), + parsers.cfparse( + "the {event_type:ProviderEvent} handler must run when the provider's connection is lost", + extra_types={"ProviderEvent": ProviderEvent}, + ) ) -def provider_changed_was_executed(client: OpenFeatureClient, context): - wait_for(lambda: context.get("provider_changed_ran")) - assert_equal(context["provider_changed_ran"], True) +def assert_disconnect_handler(error_handles, event_type: ProviderEvent): + # docker sync upstream restarts every 5s, waiting 2 cycles reduces test noise + assert_handlers(error_handles, event_type, max_wait=30) -@then(parsers.cfparse('the event details must indicate "{flag_name}" was altered')) -def flag_was_changed( - flag_name: str, - context, +@when( + parsers.cfparse('a flag with key "{flag_key}" is modified'), + target_fixture="changed_flag", +) +def changed_flag( + flag_key: str, +): + return flag_key + + +@then( + parsers.cfparse( + "when the connection is reestablished the {event_type:ProviderEvent} handler must run again", + extra_types={"ProviderEvent": ProviderEvent}, + ) +) +def assert_disconnect_error( + client: OpenFeatureClient, event_type: ProviderEvent, event_handles: list ): - wait_for(lambda: flag_name in context.get("changed_flags")) - assert_in(flag_name, context.get("changed_flags")) + assert_handlers(event_handles, event_type, max_wait=30, num_events=2) + + +@then(parsers.cfparse('the event details must indicate "{key}" was altered')) +def assert_flag_changed(event_handles, key): + handle = None + for h in event_handles: + if h["type"] == ProviderEvent.PROVIDER_CONFIGURATION_CHANGED: + handle = h + break + + assert handle is not None + assert key in handle["event"].flags_changed def wait_for(pred, poll_sec=2, timeout_sec=10): @@ -551,3 +640,26 @@ def wait_for(pred, poll_sec=2, timeout_sec=10): time.sleep(poll_sec) assert_true(pred()) return ok + + +@given("flagd is unavailable", target_fixture="client") +def flagd_unavailable(): + api.set_provider( + FlagdProvider( + resolver_type=ResolverType.IN_PROCESS, + port=99999, + ), + "unavailable", + ) + return api.get_client("unavailable") + + +@when("a flagd provider is set and initialization is awaited") +def flagd_init(client: OpenFeatureClient, event_handles, error_handles): + add_event_handler(client, ProviderEvent.PROVIDER_ERROR, error_handles) + add_event_handler(client, ProviderEvent.PROVIDER_READY, event_handles) + + +@then("an error should be indicated within the configured deadline") +def flagd_error(error_handles): + assert_handlers(error_handles, ProviderEvent.PROVIDER_ERROR) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_in-process-file.py b/providers/openfeature-provider-flagd/tests/e2e/test_in-process-file.py index 2d09ca11..002f6468 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/test_in-process-file.py +++ b/providers/openfeature-provider-flagd/tests/e2e/test_in-process-file.py @@ -5,11 +5,14 @@ import pytest import yaml -from pytest_bdd import scenario, scenarios +from pytest_bdd import given, scenario, scenarios +from tests.e2e.steps import wait_for from openfeature import api +from openfeature.client import OpenFeatureClient from openfeature.contrib.provider.flagd import FlagdProvider from openfeature.contrib.provider.flagd.config import ResolverType +from openfeature.provider import ProviderStatus KEY_EVALUATORS = "$evaluators" @@ -18,7 +21,7 @@ MERGED_FILE = "merged_file" -@pytest.fixture(params=["json", "yaml"], scope="package") +@pytest.fixture(params=["json", "yaml"], scope="module") def file_name(request): extension = request.param result = {KEY_FLAGS: {}, KEY_EVALUATORS: {}} @@ -48,17 +51,38 @@ def file_name(request): return outfile -@pytest.fixture(autouse=True, scope="package") -def setup(request, file_name): - """`file_name` tests""" +@pytest.fixture(autouse=True, scope="module") +def client_name() -> str: + return "in-process" + + +@pytest.fixture(autouse=True, scope="module") +def resolver_type() -> ResolverType: + return ResolverType.IN_PROCESS + + +@pytest.fixture(autouse=True, scope="module") +def setup(request, client_name, file_name, resolver_type): + """nothing to boot""" api.set_provider( FlagdProvider( - resolver_type=ResolverType.IN_PROCESS, + resolver_type=resolver_type, offline_flag_source_path=file_name.name, - ) + timeout=0.5, + retry_backoff_seconds=0.1, + ), + client_name, ) +@given("a flagd provider is set", target_fixture="client") +@given("a provider is registered", target_fixture="client") +def setup_provider(client_name) -> OpenFeatureClient: + client = api.get_client(client_name) + wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) + return client + + @pytest.mark.skip(reason="Eventing not implemented") @scenario("../../test-harness/gherkin/flagd.feature", "Flag change event") def test_flag_change_event(): diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_inprocess.py b/providers/openfeature-provider-flagd/tests/e2e/test_inprocess.py new file mode 100644 index 00000000..1f356f87 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/test_inprocess.py @@ -0,0 +1,37 @@ +import pytest +from pytest_bdd import scenario, scenarios + +from openfeature.contrib.provider.flagd.config import ResolverType + + +@pytest.fixture(autouse=True, scope="module") +def client_name() -> str: + return "in-process" + + +@pytest.fixture(autouse=True, scope="module") +def resolver_type() -> ResolverType: + return ResolverType.IN_PROCESS + + +@pytest.fixture(autouse=True, scope="module") +def port(): + return 8015 + + +@pytest.fixture(autouse=True, scope="module") +def image(): + return "ghcr.io/open-feature/flagd-testbed:v0.5.13" + + +@pytest.mark.skip(reason="0 float might be a int") +@scenario("../../test-harness/gherkin/flagd.feature", "Resolves float zero value") +def test_flag_change_event(): + """not implemented""" + + +scenarios( + "../../test-harness/gherkin/flagd.feature", + "../../test-harness/gherkin/flagd-json-evaluator.feature", + "../../spec/specification/assets/gherkin/evaluation.feature", +) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_inprocess_file.py b/providers/openfeature-provider-flagd/tests/e2e/test_inprocess_file.py new file mode 100644 index 00000000..d980a72d --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/test_inprocess_file.py @@ -0,0 +1,83 @@ +import json +import os +import tempfile +from os import listdir + +import pytest +import yaml +from pytest_bdd import given, scenario, scenarios + +from openfeature import api +from openfeature.client import OpenFeatureClient +from openfeature.contrib.provider.flagd import FlagdProvider +from openfeature.contrib.provider.flagd.config import ResolverType +from openfeature.provider import ProviderStatus +from tests.e2e.steps import wait_for + +KEY_EVALUATORS = "$evaluators" + +KEY_FLAGS = "flags" + +MERGED_FILE = "merged_file" + + +@pytest.fixture(params=["json", "yaml"], autouse=True) +def file_name(request): + extension = request.param + result = {KEY_FLAGS: {}, KEY_EVALUATORS: {}} + + path = os.path.abspath( + os.path.join(os.path.dirname(__file__), "../../test-harness/flags/") + ) + + for f in listdir(path): + with open(path + "/" + f, "rb") as infile: + loaded_json = json.load(infile) + result[KEY_FLAGS] = {**result[KEY_FLAGS], **loaded_json[KEY_FLAGS]} + if loaded_json.get(KEY_EVALUATORS): + result[KEY_EVALUATORS] = { + **result[KEY_EVALUATORS], + **loaded_json[KEY_EVALUATORS], + } + + with tempfile.NamedTemporaryFile( + "w", delete=False, suffix="." + extension + ) as outfile: + if extension == "json": + json.dump(result, outfile) + else: + yaml.dump(result, outfile) + + return outfile + + +@pytest.fixture(autouse=True, scope="module") +def setup(request): + pass + + +@given("a flagd provider is set", target_fixture="client") +@given("a provider is registered", target_fixture="client") +def setup_provider(setup, file_name) -> OpenFeatureClient: + api.set_provider( + FlagdProvider( + resolver_type=ResolverType.IN_PROCESS, + offline_flag_source_path=file_name.name, + ) + ) + client = api.get_client() + wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) + return client + + +@pytest.mark.skip(reason="Eventing not implemented") +@scenario("../../test-harness/gherkin/flagd.feature", "Flag change event") +def test_flag_change_event(): + """not implemented""" + + +scenarios( + "../../test-harness/gherkin/flagd.feature", + "../../test-harness/gherkin/flagd-json-evaluator.feature", + "../../spec/specification/assets/gherkin/evaluation.feature", +) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_inprocess_reconnect.py b/providers/openfeature-provider-flagd/tests/e2e/test_inprocess_reconnect.py new file mode 100644 index 00000000..18fa1767 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/test_inprocess_reconnect.py @@ -0,0 +1,35 @@ +import pytest +from pytest_bdd import scenarios + +from openfeature.contrib.provider.flagd.config import ResolverType + + +@pytest.fixture(autouse=True, scope="module") +def client_name() -> str: + return "in-process-reconnect" + + +@pytest.fixture(autouse=True, scope="module") +def resolver_type() -> ResolverType: + return ResolverType.IN_PROCESS + + +@pytest.fixture(autouse=True, scope="module") +def port(): + return 8015 + + +@pytest.fixture(autouse=True, scope="module") +def image(): + return "ghcr.io/open-feature/flagd-testbed-unstable:v0.5.13" + + +# @pytest.mark.skip(reason="Reconnect seems to be flacky") +# @scenario("../../test-harness/gherkin/flagd-reconnect.feature", "Provider reconnection") +# def test_flag_change_event(): +# """not implemented""" + + +scenarios( + "../../test-harness/gherkin/flagd-reconnect.feature", +) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py b/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py index d2fe57e9..525972f0 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py +++ b/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py @@ -4,17 +4,22 @@ from openfeature.contrib.provider.flagd.config import ResolverType -@pytest.fixture(autouse=True, scope="package") +@pytest.fixture(autouse=True, scope="module") +def client_name() -> str: + return "rpc" + + +@pytest.fixture(autouse=True, scope="module") def resolver_type() -> ResolverType: return ResolverType.GRPC -@pytest.fixture(autouse=True, scope="package") +@pytest.fixture(autouse=True, scope="module") def port(): return 8013 -@pytest.fixture(autouse=True, scope="package") +@pytest.fixture(autouse=True, scope="module") def image(): return "ghcr.io/open-feature/flagd-testbed:v0.5.13" diff --git a/providers/openfeature-provider-flagd/tests/test_errors.py b/providers/openfeature-provider-flagd/tests/test_errors.py index cc053788..1a083b74 100644 --- a/providers/openfeature-provider-flagd/tests/test_errors.py +++ b/providers/openfeature-provider-flagd/tests/test_errors.py @@ -1,4 +1,5 @@ import os +import time import pytest @@ -6,6 +7,7 @@ from openfeature.contrib.provider.flagd import FlagdProvider from openfeature.contrib.provider.flagd.config import ResolverType from openfeature.evaluation_context import EvaluationContext +from openfeature.event import ProviderEvent from openfeature.exception import ErrorCode from openfeature.flag_evaluation import Reason @@ -84,3 +86,27 @@ def test_flag_disabled(): assert res.value == "fallback" assert res.reason == Reason.DISABLED + + +@pytest.mark.parametrize("wait", (0.5, 0.25)) +def test_grpc_sync_fail_deadline(wait: float): + init_failed = False + + def fail(*args, **kwargs): + nonlocal init_failed + init_failed = True + + api.get_client().add_handler(ProviderEvent.PROVIDER_ERROR, fail) + + t = time.time() + api.set_provider( + FlagdProvider( + resolver_type=ResolverType.IN_PROCESS, + port=99999, # dead port to test failure + timeout=wait, + ) + ) + + elapsed = time.time() - t + assert abs(elapsed - wait) < 0.1 + assert init_failed diff --git a/providers/openfeature-provider-flagd/tests/test_file_store.py b/providers/openfeature-provider-flagd/tests/test_file_store.py index 12a13d25..0a152419 100644 --- a/providers/openfeature-provider-flagd/tests/test_file_store.py +++ b/providers/openfeature-provider-flagd/tests/test_file_store.py @@ -6,10 +6,9 @@ from openfeature import api from openfeature.contrib.provider.flagd import FlagdProvider from openfeature.contrib.provider.flagd.resolvers.process.connector.file_watcher import ( - FileWatcherFlagStore, + FileWatcher, ) -from openfeature.contrib.provider.flagd.resolvers.process.flags import Flag -from openfeature.provider.provider import AbstractProvider +from openfeature.contrib.provider.flagd.resolvers.process.flags import Flag, FlagStore def create_client(provider: FlagdProvider): @@ -24,12 +23,18 @@ def create_client(provider: FlagdProvider): "basic-flag.yaml", ], ) -def test_file_load_errors(file_name: str): - provider = Mock(spec=AbstractProvider) +def test_file_load(file_name: str): + emit_provider_configuration_changed = Mock() + emit_provider_ready = Mock() + emit_provider_error = Mock() + flag_store = FlagStore(emit_provider_configuration_changed) path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) - file_store = FileWatcherFlagStore(f"{path}/{file_name}", provider) + file_watcher = FileWatcher( + f"{path}/{file_name}", flag_store, emit_provider_ready, emit_provider_error + ) + file_watcher.initialize(None) - flag = file_store.flag_data.get("basic-flag") + flag = flag_store.get_flag("basic-flag") assert flag is not None assert isinstance(flag, Flag)