From ec814ebff957fc25c3f1affdf480931d1238c372 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Wed, 18 Dec 2024 10:16:56 -0800 Subject: [PATCH 01/12] updated models and recorder --- splitio/models/impressions.py | 8 ++++++++ splitio/models/splits.py | 22 +++++++++++++++++----- splitio/recorder/recorder.py | 16 ++++++++-------- tests/models/test_splits.py | 6 +++++- 4 files changed, 38 insertions(+), 14 deletions(-) diff --git a/splitio/models/impressions.py b/splitio/models/impressions.py index b08d31fb..21daacae 100644 --- a/splitio/models/impressions.py +++ b/splitio/models/impressions.py @@ -16,6 +16,14 @@ ] ) +ImpressionDecorated = namedtuple( + 'ImpressionDecorated', + [ + 'Impression', + 'track' + ] +) + # pre-python3.7 hack to make previous_time optional Impression.__new__.__defaults__ = (None,) diff --git a/splitio/models/splits.py b/splitio/models/splits.py index b5158ac5..170327ab 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -10,7 +10,7 @@ SplitView = namedtuple( 'SplitView', - ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets'] + ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'trackImpressions'] ) _DEFAULT_CONDITIONS_TEMPLATE = { @@ -73,7 +73,8 @@ def __init__( # pylint: disable=too-many-arguments traffic_allocation=None, traffic_allocation_seed=None, configurations=None, - sets=None + sets=None, + trackImpressions=None ): """ Class constructor. @@ -96,6 +97,8 @@ def __init__( # pylint: disable=too-many-arguments :type traffic_allocation_seed: int :pram sets: list of flag sets :type sets: list + :pram trackImpressions: track impressions flag + :type trackImpressions: boolean """ self._name = name self._seed = seed @@ -125,6 +128,7 @@ def __init__( # pylint: disable=too-many-arguments self._configurations = configurations self._sets = set(sets) if sets is not None else set() + self._trackImpressions = trackImpressions if trackImpressions is not None else True @property def name(self): @@ -186,6 +190,11 @@ def sets(self): """Return the flag sets of the split.""" return self._sets + @property + def trackImpressions(self): + """Return trackImpressions of the split.""" + return self._trackImpressions + def get_configurations_for(self, treatment): """Return the mapping of treatments to configurations.""" return self._configurations.get(treatment) if self._configurations else None @@ -214,7 +223,8 @@ def to_json(self): 'algo': self.algo.value, 'conditions': [c.to_json() for c in self.conditions], 'configurations': self._configurations, - 'sets': list(self._sets) + 'sets': list(self._sets), + 'trackImpressions': self._trackImpressions } def to_split_view(self): @@ -232,7 +242,8 @@ def to_split_view(self): self.change_number, self._configurations if self._configurations is not None else {}, self._default_treatment, - list(self._sets) if self._sets is not None else [] + list(self._sets) if self._sets is not None else [], + self._trackImpressions ) def local_kill(self, default_treatment, change_number): @@ -288,5 +299,6 @@ def from_raw(raw_split): traffic_allocation=raw_split.get('trafficAllocation'), traffic_allocation_seed=raw_split.get('trafficAllocationSeed'), configurations=raw_split.get('configurations'), - sets=set(raw_split.get('sets')) if raw_split.get('sets') is not None else [] + sets=set(raw_split.get('sets')) if raw_split.get('sets') is not None else [], + trackImpressions=raw_split.get('trackImpressions') if raw_split.get('trackImpressions') is not None else True ) diff --git a/splitio/recorder/recorder.py b/splitio/recorder/recorder.py index 31a5a7db..4c0ec155 100644 --- a/splitio/recorder/recorder.py +++ b/splitio/recorder/recorder.py @@ -151,7 +151,7 @@ def __init__(self, impressions_manager, event_storage, impression_storage, telem self._telemetry_evaluation_producer = telemetry_evaluation_producer self._telemetry_runtime_producer = telemetry_runtime_producer - def record_treatment_stats(self, impressions, latency, operation, method_name): + def record_treatment_stats(self, impressions_decorated, latency, operation, method_name): """ Record stats for treatment evaluation. @@ -165,7 +165,7 @@ def record_treatment_stats(self, impressions, latency, operation, method_name): try: if method_name is not None: self._telemetry_evaluation_producer.record_latency(operation, latency) - impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions) + impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions_decorated) if deduped > 0: self._telemetry_runtime_producer.record_impression_stats(telemetry.CounterConstants.IMPRESSIONS_DEDUPED, deduped) self._impression_storage.put(impressions) @@ -210,7 +210,7 @@ def __init__(self, impressions_manager, event_storage, impression_storage, telem self._telemetry_evaluation_producer = telemetry_evaluation_producer self._telemetry_runtime_producer = telemetry_runtime_producer - async def record_treatment_stats(self, impressions, latency, operation, method_name): + async def record_treatment_stats(self, impressions_decorated, latency, operation, method_name): """ Record stats for treatment evaluation. @@ -224,7 +224,7 @@ async def record_treatment_stats(self, impressions, latency, operation, method_n try: if method_name is not None: await self._telemetry_evaluation_producer.record_latency(operation, latency) - impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions) + impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions_decorated) if deduped > 0: await self._telemetry_runtime_producer.record_impression_stats(telemetry.CounterConstants.IMPRESSIONS_DEDUPED, deduped) @@ -277,7 +277,7 @@ def __init__(self, pipe, impressions_manager, event_storage, self._data_sampling = data_sampling self._telemetry_redis_storage = telemetry_redis_storage - def record_treatment_stats(self, impressions, latency, operation, method_name): + def record_treatment_stats(self, impressions_decorated, latency, operation, method_name): """ Record stats for treatment evaluation. @@ -294,7 +294,7 @@ def record_treatment_stats(self, impressions, latency, operation, method_name): if self._data_sampling < rnumber: return - impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions) + impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions_decorated) if impressions: pipe = self._make_pipe() self._impression_storage.add_impressions_to_pipe(impressions, pipe) @@ -367,7 +367,7 @@ def __init__(self, pipe, impressions_manager, event_storage, self._data_sampling = data_sampling self._telemetry_redis_storage = telemetry_redis_storage - async def record_treatment_stats(self, impressions, latency, operation, method_name): + async def record_treatment_stats(self, impressions_decorated, latency, operation, method_name): """ Record stats for treatment evaluation. @@ -384,7 +384,7 @@ async def record_treatment_stats(self, impressions, latency, operation, method_n if self._data_sampling < rnumber: return - impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions) + impressions, deduped, for_listener, for_counter, for_unique_keys_tracker = self._impressions_manager.process_impressions(impressions_decorated) if impressions: pipe = self._make_pipe() self._impression_storage.add_impressions_to_pipe(impressions, pipe) diff --git a/tests/models/test_splits.py b/tests/models/test_splits.py index 9cd4bbfa..66718e71 100644 --- a/tests/models/test_splits.py +++ b/tests/models/test_splits.py @@ -60,7 +60,8 @@ class SplitTests(object): 'configurations': { 'on': '{"color": "blue", "size": 13}' }, - 'sets': ['set1', 'set2'] + 'sets': ['set1', 'set2'], + 'trackImpressions': True } def test_from_raw(self): @@ -81,6 +82,7 @@ def test_from_raw(self): assert parsed.get_configurations_for('on') == '{"color": "blue", "size": 13}' assert parsed._configurations == {'on': '{"color": "blue", "size": 13}'} assert parsed.sets == {'set1', 'set2'} + assert parsed.trackImpressions == True def test_get_segment_names(self, mocker): """Test fetching segment names.""" @@ -107,6 +109,7 @@ def test_to_json(self): assert as_json['algo'] == 2 assert len(as_json['conditions']) == 2 assert sorted(as_json['sets']) == ['set1', 'set2'] + assert as_json['trackImpressions'] is True def test_to_split_view(self): """Test SplitView creation.""" @@ -118,6 +121,7 @@ def test_to_split_view(self): assert as_split_view.traffic_type == self.raw['trafficTypeName'] assert set(as_split_view.treatments) == set(['on', 'off']) assert sorted(as_split_view.sets) == sorted(list(self.raw['sets'])) + assert as_split_view.trackImpressions == self.raw['trackImpressions'] def test_incorrect_matcher(self): """Test incorrect matcher in split model parsing.""" From 2a0297e440fe4d29632053f16ec4d6b9e6173e89 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Wed, 18 Dec 2024 11:13:36 -0800 Subject: [PATCH 02/12] updated impressions and evaluator classes --- splitio/engine/evaluator.py | 3 +- splitio/engine/impressions/impressions.py | 26 +++- tests/engine/test_evaluator.py | 1 + tests/engine/test_impressions.py | 172 ++++++++++++++++------ 4 files changed, 148 insertions(+), 54 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index c6588c6f..3b27ad06 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -67,7 +67,8 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): 'impression': { 'label': label, 'change_number': _change_number - } + }, + 'track': feature.trackImpressions } def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): diff --git a/splitio/engine/impressions/impressions.py b/splitio/engine/impressions/impressions.py index 541e2f36..b4545d1e 100644 --- a/splitio/engine/impressions/impressions.py +++ b/splitio/engine/impressions/impressions.py @@ -11,7 +11,7 @@ class ImpressionsMode(Enum): class Manager(object): # pylint:disable=too-few-public-methods """Impression manager.""" - def __init__(self, strategy, telemetry_runtime_producer): + def __init__(self, strategy, none_strategy, telemetry_runtime_producer): """ Construct a manger to track and forward impressions to the queue. @@ -23,19 +23,33 @@ def __init__(self, strategy, telemetry_runtime_producer): """ self._strategy = strategy + self._none_strategy = none_strategy self._telemetry_runtime_producer = telemetry_runtime_producer - def process_impressions(self, impressions): + def process_impressions(self, impressions_decorated): """ Process impressions. Impressions are analyzed to see if they've been seen before and counted. - :param impressions: List of impression objects with attributes - :type impressions: list[tuple[splitio.models.impression.Impression, dict]] + :param impressions_decorated: List of impression objects with attributes + :type impressions_decorated: list[tuple[splitio.models.impression.ImpressionDecorated, dict]] :return: processed and deduped impressions. :rtype: tuple(list[tuple[splitio.models.impression.Impression, dict]], list(int)) """ - for_log, for_listener, for_counter, for_unique_keys_tracker = self._strategy.process_impressions(impressions) - return for_log, len(impressions) - len(for_log), for_listener, for_counter, for_unique_keys_tracker + for_listener_all = [] + for_log_all = [] + for_counter_all = [] + for_unique_keys_tracker_all = [] + for impression_decorated, att in impressions_decorated: + if not impression_decorated.track: + for_log, for_listener, for_counter, for_unique_keys_tracker = self._none_strategy.process_impressions([(impression_decorated.Impression, att)]) + else: + for_log, for_listener, for_counter, for_unique_keys_tracker = self._strategy.process_impressions([(impression_decorated.Impression, att)]) + for_listener_all.extend(for_listener) + for_log_all.extend(for_log) + for_counter_all.extend(for_counter) + for_unique_keys_tracker_all.extend(for_unique_keys_tracker) + + return for_log_all, len(impressions_decorated) - len(for_log_all), for_listener_all, for_counter_all, for_unique_keys_tracker_all diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index b56b7040..89631519 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -52,6 +52,7 @@ def test_evaluate_treatment_ok(self, mocker): assert result['impression']['change_number'] == 123 assert result['impression']['label'] == 'some_label' assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] + assert result['track'] == mocked_split.trackImpressions def test_evaluate_treatment_ok_no_config(self, mocker): diff --git a/tests/engine/test_impressions.py b/tests/engine/test_impressions.py index d736829b..a7b7da68 100644 --- a/tests/engine/test_impressions.py +++ b/tests/engine/test_impressions.py @@ -5,7 +5,7 @@ from splitio.engine.impressions.impressions import Manager, ImpressionsMode from splitio.engine.impressions.manager import Hasher, Observer, Counter, truncate_time from splitio.engine.impressions.strategies import StrategyDebugMode, StrategyOptimizedMode, StrategyNoneMode -from splitio.models.impressions import Impression +from splitio.models.impressions import Impression, ImpressionDecorated from splitio.client.listener import ImpressionListenerWrapper import splitio.models.telemetry as ModelTelemetry from splitio.engine.telemetry import TelemetryStorageProducer @@ -105,14 +105,15 @@ def test_standalone_optimized(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() - manager = Manager(StrategyOptimizedMode(), telemetry_runtime_producer) # no listener + manager = Manager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener assert manager._strategy._observer is not None assert isinstance(manager._strategy, StrategyOptimizedMode) + assert isinstance(manager._none_strategy, StrategyNoneMode) # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), - (Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) ]) assert for_unique_keys_tracker == [] @@ -122,7 +123,7 @@ def test_standalone_optimized(self, mocker): # Tracking the same impression a ms later should be empty imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [] assert deduped == 1 @@ -130,7 +131,7 @@ def test_standalone_optimized(self, mocker): # Tracking an impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert deduped == 0 @@ -143,8 +144,8 @@ def test_standalone_optimized(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), None), - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -157,14 +158,14 @@ def test_standalone_optimized(self, mocker): # Test counting only from the second impression imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert for_counter == [] assert deduped == 0 assert for_unique_keys_tracker == [] imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert for_counter == [Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1, utc_now-1)] assert deduped == 1 @@ -179,14 +180,14 @@ def test_standalone_debug(self, mocker): utc_time_mock.return_value = utc_now mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) - manager = Manager(StrategyDebugMode(), mocker.Mock()) # no listener + manager = Manager(StrategyDebugMode(), StrategyNoneMode(), mocker.Mock()) # no listener assert manager._strategy._observer is not None assert isinstance(manager._strategy, StrategyDebugMode) # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), - (Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] @@ -195,7 +196,7 @@ def test_standalone_debug(self, mocker): # Tracking the same impression a ms later should return the impression imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, utc_now-3)] assert for_counter == [] @@ -203,7 +204,7 @@ def test_standalone_debug(self, mocker): # Tracking a in impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert for_counter == [] @@ -217,8 +218,8 @@ def test_standalone_debug(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), None), - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -236,13 +237,13 @@ def test_standalone_none(self, mocker): utc_time_mock.return_value = utc_now mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) - manager = Manager(StrategyNoneMode(), mocker.Mock()) # no listener + manager = Manager(StrategyNoneMode(), StrategyNoneMode(), mocker.Mock()) # no listener assert isinstance(manager._strategy, StrategyNoneMode) # no impressions are tracked, only counter and mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), - (Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) ]) assert imps == [] assert for_counter == [ @@ -253,13 +254,13 @@ def test_standalone_none(self, mocker): # Tracking the same impression a ms later should not return the impression and no change on mtk cache imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [] # Tracking an impression with a different key, will only increase mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k3', 'f1', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k3', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert imps == [] assert for_unique_keys_tracker == [('k3', 'f1')] @@ -275,8 +276,8 @@ def test_standalone_none(self, mocker): # Track the same impressions but "one hour later", no changes on mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), None), - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [] assert for_counter == [ @@ -294,14 +295,14 @@ def test_standalone_optimized_listener(self, mocker): # mocker.patch('splitio.util.time.utctime_ms', return_value=utc_time_mock) mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) - manager = Manager(StrategyOptimizedMode(), mocker.Mock()) + manager = Manager(StrategyOptimizedMode(), StrategyNoneMode(), mocker.Mock()) assert manager._strategy._observer is not None assert isinstance(manager._strategy, StrategyOptimizedMode) # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), - (Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] @@ -312,7 +313,7 @@ def test_standalone_optimized_listener(self, mocker): # Tracking the same impression a ms later should return empty imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [] assert deduped == 1 @@ -321,7 +322,7 @@ def test_standalone_optimized_listener(self, mocker): # Tracking a in impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert deduped == 0 @@ -336,8 +337,8 @@ def test_standalone_optimized_listener(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), None), - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -355,14 +356,14 @@ def test_standalone_optimized_listener(self, mocker): # Test counting only from the second impression imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert for_counter == [] assert deduped == 0 assert for_unique_keys_tracker == [] imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert for_counter == [ Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1, utc_now-1) @@ -381,13 +382,13 @@ def test_standalone_debug_listener(self, mocker): imps = [] listener = mocker.Mock(spec=ImpressionListenerWrapper) - manager = Manager(StrategyDebugMode(), mocker.Mock()) + manager = Manager(StrategyDebugMode(), StrategyNoneMode(), mocker.Mock()) assert isinstance(manager._strategy, StrategyDebugMode) # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), - (Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] @@ -397,7 +398,7 @@ def test_standalone_debug_listener(self, mocker): # Tracking the same impression a ms later should return the imp imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, utc_now-3)] assert listen == [(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, utc_now-3), None)] @@ -406,7 +407,7 @@ def test_standalone_debug_listener(self, mocker): # Tracking a in impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert listen == [(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None)] @@ -421,8 +422,8 @@ def test_standalone_debug_listener(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), None), - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -443,13 +444,13 @@ def test_standalone_none_listener(self, mocker): utc_time_mock.return_value = utc_now mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) - manager = Manager(StrategyNoneMode(), mocker.Mock()) + manager = Manager(StrategyNoneMode(), StrategyNoneMode(), mocker.Mock()) assert isinstance(manager._strategy, StrategyNoneMode) # An impression that hasn't happened in the last hour (pt = None) should not be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), - (Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) ]) assert imps == [] assert listen == [(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), @@ -461,7 +462,7 @@ def test_standalone_none_listener(self, mocker): # Tracking the same impression a ms later should return empty, no updates on mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [] assert listen == [(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, None), None)] @@ -470,7 +471,7 @@ def test_standalone_none_listener(self, mocker): # Tracking a in impression with a different key update mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) ]) assert imps == [] assert listen == [(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None)] @@ -485,8 +486,8 @@ def test_standalone_none_listener(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), None), - (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) ]) assert imps == [] assert for_counter == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), @@ -496,3 +497,80 @@ def test_standalone_none_listener(self, mocker): (Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, None), None) ] assert for_unique_keys_tracker == [('k1', 'f1'), ('k2', 'f1')] + + def test_impression_toggle_optimized(self, mocker): + """Test impressions manager in optimized mode with sdk in standalone mode.""" + + # Mock utc_time function to be able to play with the clock + utc_now = truncate_time(utctime_ms_reimplement()) + 1800 * 1000 + utc_time_mock = mocker.Mock() + utc_time_mock.return_value = utc_now + mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + manager = Manager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + assert manager._strategy._observer is not None + assert isinstance(manager._strategy, StrategyOptimizedMode) + assert isinstance(manager._none_strategy, StrategyNoneMode) + + # An impression that hasn't happened in the last hour (pt = None) should be tracked + imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + ]) + + assert for_unique_keys_tracker == [('k1', 'f1')] + assert imps == [Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] + assert deduped == 1 + + def test_impression_toggle_debug(self, mocker): + """Test impressions manager in optimized mode with sdk in standalone mode.""" + + # Mock utc_time function to be able to play with the clock + utc_now = truncate_time(utctime_ms_reimplement()) + 1800 * 1000 + utc_time_mock = mocker.Mock() + utc_time_mock.return_value = utc_now + mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + manager = Manager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + assert manager._strategy._observer is not None + + # An impression that hasn't happened in the last hour (pt = None) should be tracked + imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + ]) + + assert for_unique_keys_tracker == [('k1', 'f1')] + assert imps == [Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] + assert deduped == 1 + + def test_impression_toggle_none(self, mocker): + """Test impressions manager in optimized mode with sdk in standalone mode.""" + + # Mock utc_time function to be able to play with the clock + utc_now = truncate_time(utctime_ms_reimplement()) + 1800 * 1000 + utc_time_mock = mocker.Mock() + utc_time_mock.return_value = utc_now + mocker.patch('splitio.engine.impressions.strategies.utctime_ms', return_value=utc_time_mock()) + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + strategy = StrategyNoneMode() + manager = Manager(strategy, strategy, telemetry_runtime_producer) # no listener + + # An impression that hasn't happened in the last hour (pt = None) should be tracked + imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + ]) + + assert for_unique_keys_tracker == [('k1', 'f1'), ('k1', 'f2')] + assert imps == [] + assert deduped == 2 From f0bfd534fd297fcd3d96fbafdf897c854d218d44 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Thu, 19 Dec 2024 12:16:39 -0800 Subject: [PATCH 03/12] updated factory and client classes --- splitio/client/client.py | 55 ++-- splitio/client/factory.py | 30 +- splitio/engine/impressions/__init__.py | 37 +-- tests/client/test_client.py | 436 ++++++++++++++++++++++--- tests/integration/__init__.py | 11 +- 5 files changed, 469 insertions(+), 100 deletions(-) diff --git a/splitio/client/client.py b/splitio/client/client.py index 02bfbbb8..98f621fb 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -3,7 +3,7 @@ from splitio.engine.evaluator import Evaluator, CONTROL, EvaluationDataFactory, AsyncEvaluationDataFactory from splitio.engine.splitters import Splitter -from splitio.models.impressions import Impression, Label +from splitio.models.impressions import Impression, Label, ImpressionDecorated from splitio.models.events import Event, EventWrapper from splitio.models.telemetry import get_latency_bucket_index, MethodExceptionsAndLatencies from splitio.client import input_validator @@ -22,7 +22,8 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes 'impression': { 'label': Label.EXCEPTION, 'change_number': None, - } + }, + 'track': True } _NON_READY_EVAL_RESULT = { @@ -31,7 +32,8 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes 'impression': { 'label': Label.NOT_READY, 'change_number': None - } + }, + 'track': True } def __init__(self, factory, recorder, labels_enabled=True): @@ -116,14 +118,15 @@ def _validate_treatments_input(key, features, attributes, method): def _build_impression(self, key, bucketing, feature, result): """Build an impression based on evaluation data & it's result.""" - return Impression( - matching_key=key, + return ImpressionDecorated( + Impression(matching_key=key, feature_name=feature, treatment=result['treatment'], label=result['impression']['label'] if self._labels_enabled else None, change_number=result['impression']['change_number'], bucketing_key=bucketing, - time=utctime_ms()) + time=utctime_ms()), + track=result['track']) def _build_impressions(self, key, bucketing, results): """Build an impression based on evaluation data & it's result.""" @@ -228,7 +231,7 @@ def get_treatment(self, key, feature_flag_name, attributes=None): treatment, _ = self._get_treatment(MethodExceptionsAndLatencies.TREATMENT, key, feature_flag_name, attributes) return treatment - except: + except Exception as e: _LOGGER.error('get_treatment failed') return CONTROL @@ -296,8 +299,8 @@ def _get_treatment(self, method, key, feature, attributes=None): result = self._FAILED_EVAL_RESULT if result['impression']['label'] != Label.SPLIT_NOT_FOUND: - impression = self._build_impression(key, bucketing, feature, result) - self._record_stats([(impression, attributes)], start, method) + impression_decorated = self._build_impression(key, bucketing, feature, result) + self._record_stats([(impression_decorated, attributes)], start, method) return result['treatment'], result['configurations'] @@ -571,23 +574,23 @@ def _get_treatments(self, key, features, method, attributes=None): self._telemetry_evaluation_producer.record_exception(method) results = {n: self._FAILED_EVAL_RESULT for n in features} - imp_attrs = [ + imp_decorated_attrs = [ (i, attributes) for i in self._build_impressions(key, bucketing, results) - if i.label != Label.SPLIT_NOT_FOUND + if i.Impression.label != Label.SPLIT_NOT_FOUND ] - self._record_stats(imp_attrs, start, method) + self._record_stats(imp_decorated_attrs, start, method) return { feature: (results[feature]['treatment'], results[feature]['configurations']) for feature in results } - def _record_stats(self, impressions, start, operation): + def _record_stats(self, impressions_decorated, start, operation): """ Record impressions. - :param impressions: Generated impressions - :type impressions: list[tuple[splitio.models.impression.Impression, dict]] + :param impressions_decorated: Generated impressions + :type impressions_decorated: list[tuple[splitio.models.impression.ImpressionDecorated, dict]] :param start: timestamp when get_treatment or get_treatments was called :type start: int @@ -596,7 +599,7 @@ def _record_stats(self, impressions, start, operation): :type operation: str """ end = get_current_epoch_time_ms() - self._recorder.record_treatment_stats(impressions, get_latency_bucket_index(end - start), + self._recorder.record_treatment_stats(impressions_decorated, get_latency_bucket_index(end - start), operation, 'get_' + operation.value) def track(self, key, traffic_type, event_type, value=None, properties=None): @@ -695,7 +698,7 @@ async def get_treatment(self, key, feature_flag_name, attributes=None): treatment, _ = await self._get_treatment(MethodExceptionsAndLatencies.TREATMENT, key, feature_flag_name, attributes) return treatment - except: + except Exception as e: _LOGGER.error('get_treatment failed') return CONTROL @@ -763,8 +766,8 @@ async def _get_treatment(self, method, key, feature, attributes=None): result = self._FAILED_EVAL_RESULT if result['impression']['label'] != Label.SPLIT_NOT_FOUND: - impression = self._build_impression(key, bucketing, feature, result) - await self._record_stats([(impression, attributes)], start, method) + impression_decorated = self._build_impression(key, bucketing, feature, result) + await self._record_stats([(impression_decorated, attributes)], start, method) return result['treatment'], result['configurations'] async def get_treatments(self, key, feature_flag_names, attributes=None): @@ -960,23 +963,23 @@ async def _get_treatments(self, key, features, method, attributes=None): await self._telemetry_evaluation_producer.record_exception(method) results = {n: self._FAILED_EVAL_RESULT for n in features} - imp_attrs = [ + imp_decorated_attrs = [ (i, attributes) for i in self._build_impressions(key, bucketing, results) - if i.label != Label.SPLIT_NOT_FOUND + if i.Impression.label != Label.SPLIT_NOT_FOUND ] - await self._record_stats(imp_attrs, start, method) + await self._record_stats(imp_decorated_attrs, start, method) return { feature: (res['treatment'], res['configurations']) for feature, res in results.items() } - async def _record_stats(self, impressions, start, operation): + async def _record_stats(self, impressions_decorated, start, operation): """ Record impressions for async calls - :param impressions: Generated impressions - :type impressions: list[tuple[splitio.models.impression.Impression, dict]] + :param impressions_decorated: Generated impressions decorated + :type impressions_decorated: list[tuple[splitio.models.impression.Impression, dict]] :param start: timestamp when get_treatment or get_treatments was called :type start: int @@ -985,7 +988,7 @@ async def _record_stats(self, impressions, start, operation): :type operation: str """ end = get_current_epoch_time_ms() - await self._recorder.record_treatment_stats(impressions, get_latency_bucket_index(end - start), + await self._recorder.record_treatment_stats(impressions_decorated, get_latency_bucket_index(end - start), operation, 'get_' + operation.value) async def track(self, key, traffic_type, event_type, value=None, properties=None): diff --git a/splitio/client/factory.py b/splitio/client/factory.py index 8c3b7572..bb402bb5 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -13,7 +13,7 @@ from splitio.client.listener import ImpressionListenerWrapper, ImpressionListenerWrapperAsync from splitio.engine.impressions.impressions import Manager as ImpressionsManager from splitio.engine.impressions import set_classes, set_classes_async -from splitio.engine.impressions.strategies import StrategyDebugMode +from splitio.engine.impressions.strategies import StrategyDebugMode, StrategyNoneMode from splitio.engine.telemetry import TelemetryStorageProducer, TelemetryStorageConsumer, \ TelemetryStorageProducerAsync, TelemetryStorageConsumerAsync from splitio.engine.impressions.manager import Counter as ImpressionsCounter @@ -553,10 +553,10 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl unique_keys_tracker = UniqueKeysTracker(_UNIQUE_KEYS_CACHE_SIZE) unique_keys_synchronizer, clear_filter_sync, unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes('MEMORY', cfg['impressionsMode'], apis, imp_counter, unique_keys_tracker) + imp_strategy, none_strategy = set_classes('MEMORY', cfg['impressionsMode'], apis, imp_counter, unique_keys_tracker) imp_manager = ImpressionsManager( - imp_strategy, telemetry_runtime_producer) + imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers( SplitSynchronizer(apis['splits'], storages['splits']), @@ -681,10 +681,10 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= unique_keys_tracker = UniqueKeysTrackerAsync(_UNIQUE_KEYS_CACHE_SIZE) unique_keys_synchronizer, clear_filter_sync, unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes_async('MEMORY', cfg['impressionsMode'], apis, imp_counter, unique_keys_tracker) + imp_strategy, none_strategy = set_classes_async('MEMORY', cfg['impressionsMode'], apis, imp_counter, unique_keys_tracker) imp_manager = ImpressionsManager( - imp_strategy, telemetry_runtime_producer) + imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers( SplitSynchronizerAsync(apis['splits'], storages['splits']), @@ -775,10 +775,10 @@ def _build_redis_factory(api_key, cfg): unique_keys_tracker = UniqueKeysTracker(_UNIQUE_KEYS_CACHE_SIZE) unique_keys_synchronizer, clear_filter_sync, unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes('REDIS', cfg['impressionsMode'], redis_adapter, imp_counter, unique_keys_tracker) + imp_strategy, none_strategy = set_classes('REDIS', cfg['impressionsMode'], redis_adapter, imp_counter, unique_keys_tracker) imp_manager = ImpressionsManager( - imp_strategy, + imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers(None, None, None, None, @@ -858,10 +858,10 @@ async def _build_redis_factory_async(api_key, cfg): unique_keys_tracker = UniqueKeysTrackerAsync(_UNIQUE_KEYS_CACHE_SIZE) unique_keys_synchronizer, clear_filter_sync, unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes_async('REDIS', cfg['impressionsMode'], redis_adapter, imp_counter, unique_keys_tracker) + imp_strategy, none_strategy = set_classes_async('REDIS', cfg['impressionsMode'], redis_adapter, imp_counter, unique_keys_tracker) imp_manager = ImpressionsManager( - imp_strategy, + imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers(None, None, None, None, @@ -936,10 +936,10 @@ def _build_pluggable_factory(api_key, cfg): unique_keys_tracker = UniqueKeysTracker(_UNIQUE_KEYS_CACHE_SIZE) unique_keys_synchronizer, clear_filter_sync, unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes('PLUGGABLE', cfg['impressionsMode'], pluggable_adapter, imp_counter, unique_keys_tracker, storage_prefix) + imp_strategy, none_strategy = set_classes('PLUGGABLE', cfg['impressionsMode'], pluggable_adapter, imp_counter, unique_keys_tracker, storage_prefix) imp_manager = ImpressionsManager( - imp_strategy, + imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers(None, None, None, None, @@ -1017,10 +1017,10 @@ async def _build_pluggable_factory_async(api_key, cfg): unique_keys_tracker = UniqueKeysTrackerAsync(_UNIQUE_KEYS_CACHE_SIZE) unique_keys_synchronizer, clear_filter_sync, unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes_async('PLUGGABLE', cfg['impressionsMode'], pluggable_adapter, imp_counter, unique_keys_tracker, storage_prefix) + imp_strategy, none_strategy = set_classes_async('PLUGGABLE', cfg['impressionsMode'], pluggable_adapter, imp_counter, unique_keys_tracker, storage_prefix) imp_manager = ImpressionsManager( - imp_strategy, + imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers(None, None, None, None, @@ -1123,7 +1123,7 @@ def _build_localhost_factory(cfg): manager.start() recorder = StandardRecorder( - ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer), + ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer), storages['events'], storages['impressions'], telemetry_evaluation_producer, @@ -1192,7 +1192,7 @@ async def _build_localhost_factory_async(cfg): await manager.start() recorder = StandardRecorderAsync( - ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer), + ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer), storages['events'], storages['impressions'], telemetry_evaluation_producer, diff --git a/splitio/engine/impressions/__init__.py b/splitio/engine/impressions/__init__.py index 3e5ae13e..dd76f333 100644 --- a/splitio/engine/impressions/__init__.py +++ b/splitio/engine/impressions/__init__.py @@ -53,24 +53,24 @@ def set_classes(storage_mode, impressions_mode, api_adapter, imp_counter, unique api_impressions_adapter = api_adapter['impressions'] sender_adapter = InMemorySenderAdapter(api_telemetry_adapter) + none_strategy = StrategyNoneMode() + unique_keys_synchronizer = UniqueKeysSynchronizer(sender_adapter, unique_keys_tracker) + unique_keys_task = UniqueKeysSyncTask(unique_keys_synchronizer.send_all) + clear_filter_sync = ClearFilterSynchronizer(unique_keys_tracker) + impressions_count_sync = ImpressionsCountSynchronizer(api_impressions_adapter, imp_counter) + impressions_count_task = ImpressionsCountSyncTask(impressions_count_sync.synchronize_counters) + clear_filter_task = ClearFilterSyncTask(clear_filter_sync.clear_all) + unique_keys_tracker.set_queue_full_hook(unique_keys_task.flush) + if impressions_mode == ImpressionsMode.NONE: imp_strategy = StrategyNoneMode() - unique_keys_synchronizer = UniqueKeysSynchronizer(sender_adapter, unique_keys_tracker) - unique_keys_task = UniqueKeysSyncTask(unique_keys_synchronizer.send_all) - clear_filter_sync = ClearFilterSynchronizer(unique_keys_tracker) - impressions_count_sync = ImpressionsCountSynchronizer(api_impressions_adapter, imp_counter) - impressions_count_task = ImpressionsCountSyncTask(impressions_count_sync.synchronize_counters) - clear_filter_task = ClearFilterSyncTask(clear_filter_sync.clear_all) - unique_keys_tracker.set_queue_full_hook(unique_keys_task.flush) elif impressions_mode == ImpressionsMode.DEBUG: imp_strategy = StrategyDebugMode() else: imp_strategy = StrategyOptimizedMode() - impressions_count_sync = ImpressionsCountSynchronizer(api_impressions_adapter, imp_counter) - impressions_count_task = ImpressionsCountSyncTask(impressions_count_sync.synchronize_counters) return unique_keys_synchronizer, clear_filter_sync, unique_keys_task, clear_filter_task, \ - impressions_count_sync, impressions_count_task, imp_strategy + impressions_count_sync, impressions_count_task, imp_strategy, none_strategy def set_classes_async(storage_mode, impressions_mode, api_adapter, imp_counter, unique_keys_tracker, prefix=None): """ @@ -118,21 +118,20 @@ def set_classes_async(storage_mode, impressions_mode, api_adapter, imp_counter, api_impressions_adapter = api_adapter['impressions'] sender_adapter = InMemorySenderAdapterAsync(api_telemetry_adapter) + unique_keys_synchronizer = UniqueKeysSynchronizerAsync(sender_adapter, unique_keys_tracker) + unique_keys_task = UniqueKeysSyncTaskAsync(unique_keys_synchronizer.send_all) + clear_filter_sync = ClearFilterSynchronizerAsync(unique_keys_tracker) + impressions_count_sync = ImpressionsCountSynchronizerAsync(api_impressions_adapter, imp_counter) + impressions_count_task = ImpressionsCountSyncTaskAsync(impressions_count_sync.synchronize_counters) + clear_filter_task = ClearFilterSyncTaskAsync(clear_filter_sync.clear_all) + unique_keys_tracker.set_queue_full_hook(unique_keys_task.flush) + if impressions_mode == ImpressionsMode.NONE: imp_strategy = StrategyNoneMode() - unique_keys_synchronizer = UniqueKeysSynchronizerAsync(sender_adapter, unique_keys_tracker) - unique_keys_task = UniqueKeysSyncTaskAsync(unique_keys_synchronizer.send_all) - clear_filter_sync = ClearFilterSynchronizerAsync(unique_keys_tracker) - impressions_count_sync = ImpressionsCountSynchronizerAsync(api_impressions_adapter, imp_counter) - impressions_count_task = ImpressionsCountSyncTaskAsync(impressions_count_sync.synchronize_counters) - clear_filter_task = ClearFilterSyncTaskAsync(clear_filter_sync.clear_all) - unique_keys_tracker.set_queue_full_hook(unique_keys_task.flush) elif impressions_mode == ImpressionsMode.DEBUG: imp_strategy = StrategyDebugMode() else: imp_strategy = StrategyOptimizedMode() - impressions_count_sync = ImpressionsCountSynchronizerAsync(api_impressions_adapter, imp_counter) - impressions_count_task = ImpressionsCountSyncTaskAsync(impressions_count_sync.synchronize_counters) return unique_keys_synchronizer, clear_filter_sync, unique_keys_task, clear_filter_task, \ impressions_count_sync, impressions_count_task, imp_strategy diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 096df432..18c33665 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -20,7 +20,7 @@ from splitio.engine.telemetry import TelemetryStorageConsumer, TelemetryStorageProducer, TelemetryStorageProducerAsync from splitio.engine.evaluator import Evaluator from splitio.recorder.recorder import StandardRecorder, StandardRecorderAsync -from splitio.engine.impressions.strategies import StrategyDebugMode +from splitio.engine.impressions.strategies import StrategyDebugMode, StrategyNoneMode, StrategyOptimizedMode from tests.integration import splits_json @@ -43,7 +43,7 @@ def test_get_treatment(self, mocker): mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) class TelemetrySubmitterMock(): def synchronize_config(*_): @@ -74,6 +74,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, + 'track': True } _logger = mocker.Mock() assert client.get_treatment('some_key', 'SPLIT_2') == 'on' @@ -104,7 +105,7 @@ def test_get_treatment_with_config(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() @@ -141,7 +142,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } _logger = mocker.Mock() client._send_impression_to_listener = mocker.Mock() @@ -178,7 +180,7 @@ def test_get_treatments(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -215,7 +217,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -223,7 +226,8 @@ def synchronize_config(*_): } _logger = mocker.Mock() client._send_impression_to_listener = mocker.Mock() - assert client.get_treatments('key', ['SPLIT_2', 'SPLIT_1']) == {'SPLIT_2': 'on', 'SPLIT_1': 'on'} + treatments = client.get_treatments('key', ['SPLIT_2', 'SPLIT_1']) + assert treatments == {'SPLIT_2': 'on', 'SPLIT_1': 'on'} impressions_called = impression_storage.pop_many(100) assert Impression('key', 'SPLIT_2', 'on', 'some_label', 123, None, 1000) in impressions_called @@ -254,7 +258,7 @@ def test_get_treatments_by_flag_set(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -291,7 +295,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -330,7 +335,7 @@ def test_get_treatments_by_flag_sets(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -367,7 +372,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -406,7 +412,7 @@ def test_get_treatments_with_config(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -442,7 +448,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -486,7 +493,7 @@ def test_get_treatments_with_config_by_flag_set(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -522,7 +529,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -563,7 +571,7 @@ def test_get_treatments_with_config_by_flag_sets(self, mocker): segment_storage = InMemorySegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -599,7 +607,8 @@ def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -632,6 +641,182 @@ def _raise(*_): assert client.get_treatments_with_config_by_flag_sets('key', ['set_1']) == {'SPLIT_1': ('control', None), 'SPLIT_2': ('control', None)} factory.destroy() + def test_impression_toggle_optimized(self, mocker): + """Test get_treatment execution paths.""" + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + split_storage = InMemorySplitStorage() + segment_storage = InMemorySegmentStorage() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) + event_storage = mocker.Mock(spec=EventStorage) + + destroyed_property = mocker.PropertyMock() + destroyed_property.return_value = False + + mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) + mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) + + impmanager = ImpressionManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) + recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + class TelemetrySubmitterMock(): + def synchronize_config(*_): + pass + + factory = SplitFactory(mocker.Mock(), + {'splits': split_storage, + 'segments': segment_storage, + 'impressions': impression_storage, + 'events': event_storage}, + mocker.Mock(), + recorder, + mocker.Mock(), + mocker.Mock(), + telemetry_producer, + telemetry_producer.get_telemetry_init_producer(), + TelemetrySubmitterMock(), + ) + + factory.block_until_ready(5) + + split_storage.update([ + from_raw(splits_json['splitChange1_1']['splits'][0]), + from_raw(splits_json['splitChange1_1']['splits'][1]), + from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + client = Client(factory, recorder, True) + assert client.get_treatment('some_key', 'SPLIT_1') == 'off' + assert client.get_treatment('some_key', 'SPLIT_2') == 'on' + assert client.get_treatment('some_key', 'SPLIT_3') == 'on' + + impressions = impression_storage.pop_many(100) + assert len(impressions) == 2 + + found1 = False + found2 = False + for impression in impressions: + if impression[1] == 'SPLIT_1': + found1 = True + if impression[1] == 'SPLIT_2': + found2 = True + assert found1 + assert found2 + factory.destroy() + + def test_impression_toggle_debug(self, mocker): + """Test get_treatment execution paths.""" + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + split_storage = InMemorySplitStorage() + segment_storage = InMemorySegmentStorage() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) + event_storage = mocker.Mock(spec=EventStorage) + + destroyed_property = mocker.PropertyMock() + destroyed_property.return_value = False + + mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) + mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) + + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) + recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + class TelemetrySubmitterMock(): + def synchronize_config(*_): + pass + + factory = SplitFactory(mocker.Mock(), + {'splits': split_storage, + 'segments': segment_storage, + 'impressions': impression_storage, + 'events': event_storage}, + mocker.Mock(), + recorder, + mocker.Mock(), + mocker.Mock(), + telemetry_producer, + telemetry_producer.get_telemetry_init_producer(), + TelemetrySubmitterMock(), + ) + + factory.block_until_ready(5) + + split_storage.update([ + from_raw(splits_json['splitChange1_1']['splits'][0]), + from_raw(splits_json['splitChange1_1']['splits'][1]), + from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + client = Client(factory, recorder, True) + assert client.get_treatment('some_key', 'SPLIT_1') == 'off' + assert client.get_treatment('some_key', 'SPLIT_2') == 'on' + assert client.get_treatment('some_key', 'SPLIT_3') == 'on' + + impressions = impression_storage.pop_many(100) + assert len(impressions) == 2 + + found1 = False + found2 = False + for impression in impressions: + if impression[1] == 'SPLIT_1': + found1 = True + if impression[1] == 'SPLIT_2': + found2 = True + assert found1 + assert found2 + factory.destroy() + + def test_impression_toggle_none(self, mocker): + """Test get_treatment execution paths.""" + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + split_storage = InMemorySplitStorage() + segment_storage = InMemorySegmentStorage() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) + event_storage = mocker.Mock(spec=EventStorage) + + destroyed_property = mocker.PropertyMock() + destroyed_property.return_value = False + + mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) + mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) + non_strategy = StrategyNoneMode() + impmanager = ImpressionManager(non_strategy, non_strategy, telemetry_runtime_producer) + recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + class TelemetrySubmitterMock(): + def synchronize_config(*_): + pass + + factory = SplitFactory(mocker.Mock(), + {'splits': split_storage, + 'segments': segment_storage, + 'impressions': impression_storage, + 'events': event_storage}, + mocker.Mock(), + recorder, + mocker.Mock(), + mocker.Mock(), + telemetry_producer, + telemetry_producer.get_telemetry_init_producer(), + TelemetrySubmitterMock(), + ) + + factory.block_until_ready(5) + + split_storage.update([ + from_raw(splits_json['splitChange1_1']['splits'][0]), + from_raw(splits_json['splitChange1_1']['splits'][1]), + from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + client = Client(factory, recorder, True) + assert client.get_treatment('some_key', 'SPLIT_1') == 'off' + assert client.get_treatment('some_key', 'SPLIT_2') == 'on' + assert client.get_treatment('some_key', 'SPLIT_3') == 'on' + + impressions = impression_storage.pop_many(100) + assert len(impressions) == 0 + factory.destroy() + @mock.patch('splitio.client.factory.SplitFactory.destroy') def test_destroy(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" @@ -717,7 +902,7 @@ def test_evaluations_before_running_post_fork(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) @@ -796,7 +981,7 @@ def test_telemetry_not_ready(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) @@ -930,7 +1115,7 @@ def test_telemetry_method_latency(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) @@ -1049,7 +1234,7 @@ async def test_get_treatment_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) @@ -1085,6 +1270,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, + 'track': True } _logger = mocker.Mock() assert await client.get_treatment('some_key', 'SPLIT_2') == 'on' @@ -1117,7 +1303,7 @@ async def test_get_treatment_with_config_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) @@ -1153,7 +1339,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } _logger = mocker.Mock() client._send_impression_to_listener = mocker.Mock() @@ -1191,7 +1378,7 @@ async def test_get_treatments_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -1227,7 +1414,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -1268,7 +1456,7 @@ async def test_get_treatments_by_flag_set_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -1304,7 +1492,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -1345,7 +1534,7 @@ async def test_get_treatments_by_flag_sets_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -1381,7 +1570,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -1422,7 +1612,7 @@ async def test_get_treatments_with_config(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -1457,7 +1647,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -1503,7 +1694,7 @@ async def test_get_treatments_with_config_by_flag_set(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -1538,7 +1729,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -1584,7 +1776,7 @@ async def test_get_treatments_with_config_by_flag_sets(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) @@ -1619,7 +1811,8 @@ async def synchronize_config(*_): 'impression': { 'label': 'some_label', 'change_number': 123 - } + }, + 'track': True } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -1655,6 +1848,173 @@ def _raise(*_): } await factory.destroy() + @pytest.mark.asyncio + async def test_impression_toggle_optimized(self, mocker): + """Test get_treatment execution paths.""" + telemetry_storage = await InMemoryTelemetryStorageAsync.create() + telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) + split_storage = InMemorySplitStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) + event_storage = mocker.Mock(spec=EventStorage) + + destroyed_property = mocker.PropertyMock() + destroyed_property.return_value = False + + mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) + mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) + + impmanager = ImpressionManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) + recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + factory = SplitFactoryAsync(mocker.Mock(), + {'splits': split_storage, + 'segments': segment_storage, + 'impressions': impression_storage, + 'events': event_storage}, + mocker.Mock(), + recorder, + mocker.Mock(), + telemetry_producer, + telemetry_producer.get_telemetry_init_producer(), + mocker.Mock() + ) + + await factory.block_until_ready(5) + + await split_storage.update([ + from_raw(splits_json['splitChange1_1']['splits'][0]), + from_raw(splits_json['splitChange1_1']['splits'][1]), + from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + client = ClientAsync(factory, recorder, True) + treatment = await client.get_treatment('some_key', 'SPLIT_1') + assert treatment == 'off' + treatment = await client.get_treatment('some_key', 'SPLIT_2') + assert treatment == 'on' + treatment = await client.get_treatment('some_key', 'SPLIT_3') + assert treatment == 'on' + + impressions = await impression_storage.pop_many(100) + assert len(impressions) == 2 + + found1 = False + found2 = False + for impression in impressions: + if impression[1] == 'SPLIT_1': + found1 = True + if impression[1] == 'SPLIT_2': + found2 = True + assert found1 + assert found2 + await factory.destroy() + + @pytest.mark.asyncio + async def test_impression_toggle_debug(self, mocker): + """Test get_treatment execution paths.""" + telemetry_storage = await InMemoryTelemetryStorageAsync.create() + telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) + split_storage = InMemorySplitStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) + event_storage = mocker.Mock(spec=EventStorage) + + destroyed_property = mocker.PropertyMock() + destroyed_property.return_value = False + + mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) + mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) + + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) + recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + factory = SplitFactoryAsync(mocker.Mock(), + {'splits': split_storage, + 'segments': segment_storage, + 'impressions': impression_storage, + 'events': event_storage}, + mocker.Mock(), + recorder, + mocker.Mock(), + telemetry_producer, + telemetry_producer.get_telemetry_init_producer(), + mocker.Mock() + ) + + await factory.block_until_ready(5) + + await split_storage.update([ + from_raw(splits_json['splitChange1_1']['splits'][0]), + from_raw(splits_json['splitChange1_1']['splits'][1]), + from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + client = ClientAsync(factory, recorder, True) + assert await client.get_treatment('some_key', 'SPLIT_1') == 'off' + assert await client.get_treatment('some_key', 'SPLIT_2') == 'on' + assert await client.get_treatment('some_key', 'SPLIT_3') == 'on' + + impressions = await impression_storage.pop_many(100) + assert len(impressions) == 2 + + found1 = False + found2 = False + for impression in impressions: + if impression[1] == 'SPLIT_1': + found1 = True + if impression[1] == 'SPLIT_2': + found2 = True + assert found1 + assert found2 + await factory.destroy() + + @pytest.mark.asyncio + async def test_impression_toggle_none(self, mocker): + """Test get_treatment execution paths.""" + telemetry_storage = await InMemoryTelemetryStorageAsync.create() + telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) + split_storage = InMemorySplitStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) + event_storage = mocker.Mock(spec=EventStorage) + + destroyed_property = mocker.PropertyMock() + destroyed_property.return_value = False + + mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) + mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) + non_strategy = StrategyNoneMode() + impmanager = ImpressionManager(non_strategy, non_strategy, telemetry_runtime_producer) + recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + factory = SplitFactoryAsync(mocker.Mock(), + {'splits': split_storage, + 'segments': segment_storage, + 'impressions': impression_storage, + 'events': event_storage}, + mocker.Mock(), + recorder, + mocker.Mock(), + telemetry_producer, + telemetry_producer.get_telemetry_init_producer(), + mocker.Mock() + ) + + await factory.block_until_ready(5) + + await split_storage.update([ + from_raw(splits_json['splitChange1_1']['splits'][0]), + from_raw(splits_json['splitChange1_1']['splits'][1]), + from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + client = ClientAsync(factory, recorder, True) + assert await client.get_treatment('some_key', 'SPLIT_1') == 'off' + assert await client.get_treatment('some_key', 'SPLIT_2') == 'on' + assert await client.get_treatment('some_key', 'SPLIT_3') == 'on' + + impressions = await impression_storage.pop_many(100) + assert len(impressions) == 0 + await factory.destroy() + @pytest.mark.asyncio async def test_track_async(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" @@ -1712,7 +2072,7 @@ async def test_telemetry_not_ready_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) factory = SplitFactoryAsync('localhost', @@ -1753,7 +2113,7 @@ async def test_telemetry_record_treatment_exception_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) destroyed_property = mocker.PropertyMock() @@ -1825,7 +2185,7 @@ async def test_telemetry_method_latency_async(self, mocker): telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) - impmanager = ImpressionManager(StrategyDebugMode(), telemetry_runtime_producer) + impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) destroyed_property = mocker.PropertyMock() diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index b3ecce57..d80d34f7 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,6 +1,13 @@ -split11 = {"splits": [{"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"]},{"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}],"since": -1,"till": 1675443569027} +split11 = {"splits": [ + {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "trackImpressions": True}, + {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, + {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "trackImpressions": False} + ],"since": -1,"till": 1675443569027} split12 = {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 167544376728} -split13 = {"splits": [{"trafficTypeName": "user","name": "SPLIT_1","trafficAllocation": 100,"trafficAllocationSeed": -1780071202,"seed": -1442762199,"status": "ARCHIVED","killed": False,"defaultTreatment": "off","changeNumber": 1675443984594,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}]},{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443954220,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443767288,"till": 1675443984594} +split13 = {"splits": [ + {"trafficTypeName": "user","name": "SPLIT_1","trafficAllocation": 100,"trafficAllocationSeed": -1780071202,"seed": -1442762199,"status": "ARCHIVED","killed": False,"defaultTreatment": "off","changeNumber": 1675443984594,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}]}, + {"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443954220,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]} + ],"since": 1675443767288,"till": 1675443984594} split41 = split11 split42 = split12 From b9767973a59d7915454d93d1ca016728c81cf536 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Thu, 19 Dec 2024 12:23:40 -0800 Subject: [PATCH 04/12] fixed factory sync classes --- splitio/engine/impressions/__init__.py | 3 ++- tests/client/test_factory.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/splitio/engine/impressions/__init__.py b/splitio/engine/impressions/__init__.py index dd76f333..fdd84211 100644 --- a/splitio/engine/impressions/__init__.py +++ b/splitio/engine/impressions/__init__.py @@ -118,6 +118,7 @@ def set_classes_async(storage_mode, impressions_mode, api_adapter, imp_counter, api_impressions_adapter = api_adapter['impressions'] sender_adapter = InMemorySenderAdapterAsync(api_telemetry_adapter) + none_strategy = StrategyNoneMode() unique_keys_synchronizer = UniqueKeysSynchronizerAsync(sender_adapter, unique_keys_tracker) unique_keys_task = UniqueKeysSyncTaskAsync(unique_keys_synchronizer.send_all) clear_filter_sync = ClearFilterSynchronizerAsync(unique_keys_tracker) @@ -134,4 +135,4 @@ def set_classes_async(storage_mode, impressions_mode, api_adapter, imp_counter, imp_strategy = StrategyOptimizedMode() return unique_keys_synchronizer, clear_filter_sync, unique_keys_task, clear_filter_task, \ - impressions_count_sync, impressions_count_task, imp_strategy + impressions_count_sync, impressions_count_task, imp_strategy, none_strategy diff --git a/tests/client/test_factory.py b/tests/client/test_factory.py index e3bcd092..fbe499d6 100644 --- a/tests/client/test_factory.py +++ b/tests/client/test_factory.py @@ -941,6 +941,6 @@ async def _make_factory_with_apikey(apikey, *_, **__): factory = await get_factory_async("none", config=config) await factory.destroy() - await asyncio.sleep(0.1) + await asyncio.sleep(0.5) assert factory.destroyed assert len(build_redis.mock_calls) == 2 From db626e4c9194e8c441aa548dded0a67051c0527b Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Thu, 19 Dec 2024 12:28:32 -0800 Subject: [PATCH 05/12] polish --- splitio/client/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/splitio/client/client.py b/splitio/client/client.py index 98f621fb..78f00a34 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -231,7 +231,7 @@ def get_treatment(self, key, feature_flag_name, attributes=None): treatment, _ = self._get_treatment(MethodExceptionsAndLatencies.TREATMENT, key, feature_flag_name, attributes) return treatment - except Exception as e: + except: _LOGGER.error('get_treatment failed') return CONTROL @@ -698,7 +698,7 @@ async def get_treatment(self, key, feature_flag_name, attributes=None): treatment, _ = await self._get_treatment(MethodExceptionsAndLatencies.TREATMENT, key, feature_flag_name, attributes) return treatment - except Exception as e: + except: _LOGGER.error('get_treatment failed') return CONTROL From 97ed30ede115c520de0ebdd92966b730e7db6d74 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Mon, 23 Dec 2024 12:09:38 -0800 Subject: [PATCH 06/12] added integrations tests and evaluator fix --- splitio/engine/evaluator.py | 2 +- tests/integration/test_client_e2e.py | 869 +++++++++++++++++++++++++-- 2 files changed, 821 insertions(+), 50 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 3b27ad06..ebae631d 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -68,7 +68,7 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): 'label': label, 'change_number': _change_number }, - 'track': feature.trackImpressions + 'track': feature.trackImpressions if feature else None } def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index f20e4f66..94a11624 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -441,7 +441,7 @@ def _manager_methods(factory): assert len(manager.split_names()) == 7 assert len(manager.splits()) == 7 -class InMemoryIntegrationTests(object): +class InMemoryDebugIntegrationTests(object): """Inmemory storage-based integration tests.""" def setup_method(self): @@ -476,7 +476,7 @@ def setup_method(self): 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer) # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. try: @@ -632,7 +632,7 @@ def setup_method(self): 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } - impmanager = ImpressionsManager(StrategyOptimizedMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer) self.factory = SplitFactory('some_api_key', storages, @@ -766,7 +766,7 @@ def setup_method(self): 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = PipelinedRecorder(redis_client.pipeline, impmanager, storages['events'], storages['impressions'], telemetry_redis_storage) self.factory = SplitFactory('some_api_key', @@ -946,7 +946,7 @@ def setup_method(self): 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = PipelinedRecorder(redis_client.pipeline, impmanager, storages['events'], storages['impressions'], telemetry_redis_storage) self.factory = SplitFactory('some_api_key', @@ -974,103 +974,98 @@ def test_localhost_json_e2e(self): # Tests 1 self.factory._storages['splits'].update([], ['SPLIT_1'], -1) -# self.factory._sync_manager._synchronizer._split_synchronizers._feature_flag_sync._feature_flag_storage.set_change_number(-1) self._update_temp_file(splits_json['splitChange1_1']) self._synchronize_now() - assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' assert client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange1_2']) self._synchronize_now() - assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' assert client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange1_3']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert self.factory.manager().split_names() == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'control' assert client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 3 self.factory._storages['splits'].update([], ['SPLIT_1'], -1) -# self.factory._sync_manager._synchronizer._split_synchronizers._feature_flag_sync._feature_flag_storage.set_change_number(-1) self._update_temp_file(splits_json['splitChange3_1']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert self.factory.manager().split_names() == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange3_2']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert self.factory.manager().split_names() == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_2", None) == 'off' # Tests 4 self.factory._storages['splits'].update([], ['SPLIT_2'], -1) -# self.factory._sync_manager._synchronizer._split_synchronizers._feature_flag_sync._feature_flag_storage.set_change_number(-1) self._update_temp_file(splits_json['splitChange4_1']) self._synchronize_now() - assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' assert client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange4_2']) self._synchronize_now() - assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' assert client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange4_3']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'control' assert client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 5 self.factory._storages['splits'].update([], ['SPLIT_1', 'SPLIT_2'], -1) -# self.factory._sync_manager._synchronizer._split_synchronizers._feature_flag_sync._feature_flag_storage.set_change_number(-1) self._update_temp_file(splits_json['splitChange5_1']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange5_2']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 6 self.factory._storages['splits'].update([], ['SPLIT_2'], -1) -# self.factory._sync_manager._synchronizer._split_synchronizers._feature_flag_sync._feature_flag_storage.set_change_number(-1) self._update_temp_file(splits_json['splitChange6_1']) self._synchronize_now() - assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' assert client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange6_2']) self._synchronize_now() - assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' assert client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange6_3']) self._synchronize_now() - assert self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'control' assert client.get_treatment("key", "SPLIT_2", None) == 'on' @@ -1165,7 +1160,7 @@ def setup_method(self): 'telemetry': telemetry_pluggable_storage } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer) @@ -1352,7 +1347,7 @@ def setup_method(self): 'telemetry': telemetry_pluggable_storage } - impmanager = ImpressionsManager(StrategyOptimizedMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer) @@ -1519,8 +1514,8 @@ def setup_method(self): unique_keys_tracker = UniqueKeysTracker() unique_keys_synchronizer, clear_filter_sync, self.unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes('PLUGGABLE', ImpressionsMode.NONE, self.pluggable_storage_adapter, imp_counter, unique_keys_tracker) - impmanager = ImpressionsManager(imp_strategy, telemetry_runtime_producer) # no listener + imp_strategy, none_strategy = set_classes('PLUGGABLE', ImpressionsMode.NONE, self.pluggable_storage_adapter, imp_counter, unique_keys_tracker) + impmanager = ImpressionsManager(imp_strategy, none_strategy, telemetry_runtime_producer) # no listener recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, unique_keys_tracker=unique_keys_tracker, imp_counter=imp_counter) @@ -1666,6 +1661,381 @@ def test_mtk(self): self.factory.destroy(event) event.wait() +class InMemoryImpressionsToggleIntegrationTests(object): + """InMemory storage-based impressions toggle integration tests.""" + + def test_optimized(self): + split_storage = InMemorySplitStorage() + segment_storage = InMemorySegmentStorage() + + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), + splits.from_raw(splits_json['splitChange1_1']['splits'][1]), + splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_evaluation_producer = telemetry_producer.get_telemetry_evaluation_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), + 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), + } + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, None, UniqueKeysTracker(), ImpressionsCounter()) + # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. + try: + factory = SplitFactory('some_api_key', + storages, + True, + recorder, + None, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + except: + pass + + try: + client = factory.client() + except: + pass + + assert client.get_treatment('user1', 'SPLIT_1') == 'off' + assert client.get_treatment('user1', 'SPLIT_2') == 'on' + assert client.get_treatment('user1', 'SPLIT_3') == 'on' + imp_storage = client._factory._get_storage('impressions') + impressions = imp_storage.pop_many(10) + assert len(impressions) == 2 + assert impressions[0].feature_name == 'SPLIT_1' + assert impressions[1].feature_name == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user1'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + + def test_debug(self): + split_storage = InMemorySplitStorage() + segment_storage = InMemorySegmentStorage() + + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), + splits.from_raw(splits_json['splitChange1_1']['splits'][1]), + splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_evaluation_producer = telemetry_producer.get_telemetry_evaluation_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), + 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), + } + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, None, UniqueKeysTracker(), ImpressionsCounter()) + # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. + try: + factory = SplitFactory('some_api_key', + storages, + True, + recorder, + None, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + except: + pass + + try: + client = factory.client() + except: + pass + + assert client.get_treatment('user1', 'SPLIT_1') == 'off' + assert client.get_treatment('user1', 'SPLIT_2') == 'on' + assert client.get_treatment('user1', 'SPLIT_3') == 'on' + imp_storage = client._factory._get_storage('impressions') + impressions = imp_storage.pop_many(10) + assert len(impressions) == 2 + assert impressions[0].feature_name == 'SPLIT_1' + assert impressions[1].feature_name == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user1'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + + def test_none(self): + split_storage = InMemorySplitStorage() + segment_storage = InMemorySegmentStorage() + + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), + splits.from_raw(splits_json['splitChange1_1']['splits'][1]), + splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + + telemetry_storage = InMemoryTelemetryStorage() + telemetry_producer = TelemetryStorageProducer(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_evaluation_producer = telemetry_producer.get_telemetry_evaluation_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), + 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), + } + impmanager = ImpressionsManager(StrategyNoneMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = StandardRecorder(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, None, UniqueKeysTracker(), ImpressionsCounter()) + # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. + try: + factory = SplitFactory('some_api_key', + storages, + True, + recorder, + None, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + except: + pass + + try: + client = factory.client() + except: + pass + + assert client.get_treatment('user1', 'SPLIT_1') == 'off' + assert client.get_treatment('user1', 'SPLIT_2') == 'on' + assert client.get_treatment('user1', 'SPLIT_3') == 'on' + imp_storage = client._factory._get_storage('impressions') + impressions = imp_storage.pop_many(10) + assert len(impressions) == 0 + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_1': {'user1'}, 'SPLIT_2': {'user1'}, 'SPLIT_3': {'user1'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 3 + assert imps_count[0].feature == 'SPLIT_1' + assert imps_count[0].count == 1 + assert imps_count[1].feature == 'SPLIT_2' + assert imps_count[1].count == 1 + assert imps_count[2].feature == 'SPLIT_3' + assert imps_count[2].count == 1 + +class RedisImpressionsToggleIntegrationTests(object): + """Run impression toggle tests for Redis.""" + + def test_optimized(self): + """Prepare storages with test data.""" + metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') + redis_client = build(DEFAULT_CONFIG.copy()) + split_storage = RedisSplitStorage(redis_client, True) + segment_storage = RedisSegmentStorage(redis_client) + + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) + + telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) + telemetry_producer = TelemetryStorageProducer(telemetry_redis_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': RedisImpressionsStorage(redis_client, metadata), + 'events': RedisEventsStorage(redis_client, metadata), + } + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = PipelinedRecorder(redis_client.pipeline, impmanager, + storages['events'], storages['impressions'], telemetry_redis_storage, unique_keys_tracker=UniqueKeysTracker(), imp_counter=ImpressionsCounter()) + factory = SplitFactory('some_api_key', + storages, + True, + recorder, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + + try: + client = factory.client() + except: + pass + + assert client.get_treatment('user1', 'SPLIT_1') == 'off' + assert client.get_treatment('user2', 'SPLIT_2') == 'on' + assert client.get_treatment('user3', 'SPLIT_3') == 'on' + time.sleep(0.2) + + imp_storage = factory._storages['impressions'] + impressions = [] + while True: + impression = redis_client.lpop(imp_storage.IMPRESSIONS_QUEUE_KEY) + if impression is None: + break + impressions.append(json.loads(impression)) + + assert len(impressions) == 2 + assert impressions[0]['i']['f'] == 'SPLIT_1' + assert impressions[1]['i']['f'] == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user3'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + self.clear_cache() + client.destroy() + + def test_debug(self): + """Prepare storages with test data.""" + metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') + redis_client = build(DEFAULT_CONFIG.copy()) + split_storage = RedisSplitStorage(redis_client, True) + segment_storage = RedisSegmentStorage(redis_client) + + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) + + telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) + telemetry_producer = TelemetryStorageProducer(telemetry_redis_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': RedisImpressionsStorage(redis_client, metadata), + 'events': RedisEventsStorage(redis_client, metadata), + } + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = PipelinedRecorder(redis_client.pipeline, impmanager, + storages['events'], storages['impressions'], telemetry_redis_storage, unique_keys_tracker=UniqueKeysTracker(), imp_counter=ImpressionsCounter()) + factory = SplitFactory('some_api_key', + storages, + True, + recorder, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + + try: + client = factory.client() + except: + pass + + assert client.get_treatment('user1', 'SPLIT_1') == 'off' + assert client.get_treatment('user2', 'SPLIT_2') == 'on' + assert client.get_treatment('user3', 'SPLIT_3') == 'on' + time.sleep(0.2) + + imp_storage = factory._storages['impressions'] + impressions = [] + while True: + impression = redis_client.lpop(imp_storage.IMPRESSIONS_QUEUE_KEY) + if impression is None: + break + impressions.append(json.loads(impression)) + + assert len(impressions) == 2 + assert impressions[0]['i']['f'] == 'SPLIT_1' + assert impressions[1]['i']['f'] == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user3'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + self.clear_cache() + client.destroy() + + def test_none(self): + """Prepare storages with test data.""" + metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') + redis_client = build(DEFAULT_CONFIG.copy()) + split_storage = RedisSplitStorage(redis_client, True) + segment_storage = RedisSegmentStorage(redis_client) + + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) + + telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) + telemetry_producer = TelemetryStorageProducer(telemetry_redis_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': RedisImpressionsStorage(redis_client, metadata), + 'events': RedisEventsStorage(redis_client, metadata), + } + impmanager = ImpressionsManager(StrategyNoneMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = PipelinedRecorder(redis_client.pipeline, impmanager, + storages['events'], storages['impressions'], telemetry_redis_storage, unique_keys_tracker=UniqueKeysTracker(), imp_counter=ImpressionsCounter()) + factory = SplitFactory('some_api_key', + storages, + True, + recorder, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + + try: + client = factory.client() + except: + pass + + assert client.get_treatment('user1', 'SPLIT_1') == 'off' + assert client.get_treatment('user2', 'SPLIT_2') == 'on' + assert client.get_treatment('user3', 'SPLIT_3') == 'on' + time.sleep(0.2) + + imp_storage = factory._storages['impressions'] + impressions = [] + while True: + impression = redis_client.lpop(imp_storage.IMPRESSIONS_QUEUE_KEY) + if impression is None: + break + impressions.append(json.loads(impression)) + + assert len(impressions) == 0 + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_1': {'user1'}, 'SPLIT_2': {'user2'}, 'SPLIT_3': {'user3'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 3 + assert imps_count[0].feature == 'SPLIT_1' + assert imps_count[0].count == 1 + assert imps_count[1].feature == 'SPLIT_2' + assert imps_count[1].count == 1 + assert imps_count[2].feature == 'SPLIT_3' + assert imps_count[2].count == 1 + self.clear_cache() + client.destroy() + + def clear_cache(self): + """Clear redis cache.""" + keys_to_delete = [ + "SPLITIO.split.SPLIT_3", + "SPLITIO.splits.till", + "SPLITIO.split.SPLIT_2", + "SPLITIO.split.SPLIT_1", + "SPLITIO.telemetry.latencies" + ] + + redis_client = RedisAdapter(StrictRedis()) + for key in keys_to_delete: + redis_client.delete(key) + class InMemoryIntegrationAsyncTests(object): """Inmemory storage-based integration tests.""" @@ -1704,7 +2074,7 @@ async def _setup_method(self): 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer) # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. try: @@ -1870,7 +2240,7 @@ async def _setup_method(self): 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } - impmanager = ImpressionsManager(StrategyOptimizedMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, imp_counter = ImpressionsCounter()) # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. @@ -2029,7 +2399,7 @@ async def _setup_method(self): 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = PipelinedRecorderAsync(redis_client.pipeline, impmanager, storages['events'], storages['impressions'], telemetry_redis_storage) self.factory = SplitFactoryAsync('some_api_key', @@ -2243,7 +2613,7 @@ async def _setup_method(self): 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = PipelinedRecorderAsync(redis_client.pipeline, impmanager, storages['events'], storages['impressions'], telemetry_redis_storage) self.factory = SplitFactoryAsync('some_api_key', @@ -2280,21 +2650,21 @@ async def test_localhost_json_e2e(self): self._update_temp_file(splits_json['splitChange1_1']) await self._synchronize_now() - assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange1_2']) await self._synchronize_now() - assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' assert await client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange1_3']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'control' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' @@ -2303,13 +2673,13 @@ async def test_localhost_json_e2e(self): self._update_temp_file(splits_json['splitChange3_1']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange3_2']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_2", None) == 'off' # Tests 4 @@ -2317,21 +2687,21 @@ async def test_localhost_json_e2e(self): self._update_temp_file(splits_json['splitChange4_1']) await self._synchronize_now() - assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange4_2']) await self._synchronize_now() - assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' assert await client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange4_3']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'control' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' @@ -2340,13 +2710,13 @@ async def test_localhost_json_e2e(self): self._update_temp_file(splits_json['splitChange5_1']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange5_2']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 6 @@ -2354,21 +2724,21 @@ async def test_localhost_json_e2e(self): self._update_temp_file(splits_json['splitChange6_1']) await self._synchronize_now() - assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' self._update_temp_file(splits_json['splitChange6_2']) await self._synchronize_now() - assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' assert await client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange6_3']) await self._synchronize_now() - assert await self.factory.manager().split_names() == ["SPLIT_2"] + assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'control' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' @@ -2465,7 +2835,7 @@ async def _setup_method(self): 'telemetry': telemetry_pluggable_storage } - impmanager = ImpressionsManager(StrategyDebugMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_producer.get_telemetry_evaluation_producer(), @@ -2511,7 +2881,6 @@ async def _setup_method(self): async def test_get_treatment(self): """Test client.get_treatment().""" await self.setup_task -# pytest.set_trace() await _get_treatment_async(self.factory) await self.factory.destroy() @@ -2686,7 +3055,7 @@ async def _setup_method(self): 'telemetry': telemetry_pluggable_storage } - impmanager = ImpressionsManager(StrategyOptimizedMode(), telemetry_runtime_producer) # no listener + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_producer.get_telemetry_evaluation_producer(), @@ -2896,8 +3265,8 @@ async def _setup_method(self): unique_keys_tracker = UniqueKeysTrackerAsync() unique_keys_synchronizer, clear_filter_sync, self.unique_keys_task, \ clear_filter_task, impressions_count_sync, impressions_count_task, \ - imp_strategy = set_classes_async('PLUGGABLE', ImpressionsMode.NONE, self.pluggable_storage_adapter, imp_counter, unique_keys_tracker) - impmanager = ImpressionsManager(imp_strategy, telemetry_runtime_producer) # no listener + imp_strategy, none_strategy = set_classes_async('PLUGGABLE', ImpressionsMode.NONE, self.pluggable_storage_adapter, imp_counter, unique_keys_tracker) + impmanager = ImpressionsManager(imp_strategy, none_strategy, telemetry_runtime_producer) # no listener recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, unique_keys_tracker=unique_keys_tracker, imp_counter=imp_counter) @@ -3098,6 +3467,408 @@ async def _teardown_method(self): for key in keys_to_delete: await self.pluggable_storage_adapter.delete(key) +class InMemoryImpressionsToggleIntegrationAsyncTests(object): + """InMemory storage-based impressions toggle integration tests.""" + + @pytest.mark.asyncio + async def test_optimized(self): + split_storage = InMemorySplitStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), + splits.from_raw(splits_json['splitChange1_1']['splits'][1]), + splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + + telemetry_storage = await InMemoryTelemetryStorageAsync.create() + telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_evaluation_producer = telemetry_producer.get_telemetry_evaluation_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), + 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), + } + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, None, UniqueKeysTrackerAsync(), ImpressionsCounter()) + # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. + try: + factory = SplitFactoryAsync('some_api_key', + storages, + True, + recorder, + None, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + except: + pass + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property + + try: + client = factory.client() + except: + pass + + assert await client.get_treatment('user1', 'SPLIT_1') == 'off' + assert await client.get_treatment('user1', 'SPLIT_2') == 'on' + assert await client.get_treatment('user1', 'SPLIT_3') == 'on' + imp_storage = client._factory._get_storage('impressions') + impressions = await imp_storage.pop_many(10) + assert len(impressions) == 2 + assert impressions[0].feature_name == 'SPLIT_1' + assert impressions[1].feature_name == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user1'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + await factory.destroy() + + @pytest.mark.asyncio + async def test_debug(self): + split_storage = InMemorySplitStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), + splits.from_raw(splits_json['splitChange1_1']['splits'][1]), + splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + + telemetry_storage = await InMemoryTelemetryStorageAsync.create() + telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_evaluation_producer = telemetry_producer.get_telemetry_evaluation_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), + 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), + } + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, None, UniqueKeysTrackerAsync(), ImpressionsCounter()) + # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. + try: + factory = SplitFactoryAsync('some_api_key', + storages, + True, + recorder, + None, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + except: + pass + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property + + try: + client = factory.client() + except: + pass + + assert await client.get_treatment('user1', 'SPLIT_1') == 'off' + assert await client.get_treatment('user1', 'SPLIT_2') == 'on' + assert await client.get_treatment('user1', 'SPLIT_3') == 'on' + imp_storage = client._factory._get_storage('impressions') + impressions = await imp_storage.pop_many(10) + assert len(impressions) == 2 + assert impressions[0].feature_name == 'SPLIT_1' + assert impressions[1].feature_name == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user1'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + await factory.destroy() + + @pytest.mark.asyncio + async def test_none(self): + split_storage = InMemorySplitStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), + splits.from_raw(splits_json['splitChange1_1']['splits'][1]), + splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + ], [], -1) + + telemetry_storage = await InMemoryTelemetryStorageAsync.create() + telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_evaluation_producer = telemetry_producer.get_telemetry_evaluation_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), + 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), + } + impmanager = ImpressionsManager(StrategyNoneMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = StandardRecorderAsync(impmanager, storages['events'], storages['impressions'], telemetry_evaluation_producer, telemetry_runtime_producer, None, UniqueKeysTrackerAsync(), ImpressionsCounter()) + # Since we are passing None as SDK_Ready event, the factory will use the Redis telemetry call, using try catch to ignore the exception. + try: + factory = SplitFactoryAsync('some_api_key', + storages, + True, + recorder, + None, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + except: + pass + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property + + try: + client = factory.client() + except: + pass + + assert await client.get_treatment('user1', 'SPLIT_1') == 'off' + assert await client.get_treatment('user1', 'SPLIT_2') == 'on' + assert await client.get_treatment('user1', 'SPLIT_3') == 'on' + imp_storage = client._factory._get_storage('impressions') + impressions = await imp_storage.pop_many(10) + assert len(impressions) == 0 + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_1': {'user1'}, 'SPLIT_2': {'user1'}, 'SPLIT_3': {'user1'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 3 + assert imps_count[0].feature == 'SPLIT_1' + assert imps_count[0].count == 1 + assert imps_count[1].feature == 'SPLIT_2' + assert imps_count[1].count == 1 + assert imps_count[2].feature == 'SPLIT_3' + assert imps_count[2].count == 1 + await factory.destroy() + +class RedisImpressionsToggleIntegrationAsyncTests(object): + """Run impression toggle tests for Redis.""" + + @pytest.mark.asyncio + async def test_optimized(self): + """Prepare storages with test data.""" + metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') + redis_client = await build_async(DEFAULT_CONFIG.copy()) + split_storage = RedisSplitStorageAsync(redis_client, True) + segment_storage = RedisSegmentStorageAsync(redis_client) + + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) + + telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) + telemetry_producer = TelemetryStorageProducerAsync(telemetry_redis_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), + 'events': RedisEventsStorageAsync(redis_client, metadata), + } + impmanager = ImpressionsManager(StrategyOptimizedMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = PipelinedRecorderAsync(redis_client.pipeline, impmanager, + storages['events'], storages['impressions'], telemetry_redis_storage, unique_keys_tracker=UniqueKeysTracker(), imp_counter=ImpressionsCounter()) + factory = SplitFactoryAsync('some_api_key', + storages, + True, + recorder, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property + + try: + client = factory.client() + except: + pass + + assert await client.get_treatment('user1', 'SPLIT_1') == 'off' + assert await client.get_treatment('user2', 'SPLIT_2') == 'on' + assert await client.get_treatment('user3', 'SPLIT_3') == 'on' + await asyncio.sleep(0.2) + + imp_storage = factory._storages['impressions'] + impressions = [] + while True: + impression = await redis_client.lpop(imp_storage.IMPRESSIONS_QUEUE_KEY) + if impression is None: + break + impressions.append(json.loads(impression)) + + assert len(impressions) == 2 + assert impressions[0]['i']['f'] == 'SPLIT_1' + assert impressions[1]['i']['f'] == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user3'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + await self.clear_cache() + await factory.destroy() + + @pytest.mark.asyncio + async def test_debug(self): + """Prepare storages with test data.""" + metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') + redis_client = await build_async(DEFAULT_CONFIG.copy()) + split_storage = RedisSplitStorageAsync(redis_client, True) + segment_storage = RedisSegmentStorageAsync(redis_client) + + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) + + telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) + telemetry_producer = TelemetryStorageProducerAsync(telemetry_redis_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), + 'events': RedisEventsStorageAsync(redis_client, metadata), + } + impmanager = ImpressionsManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = PipelinedRecorderAsync(redis_client.pipeline, impmanager, + storages['events'], storages['impressions'], telemetry_redis_storage, unique_keys_tracker=UniqueKeysTracker(), imp_counter=ImpressionsCounter()) + factory = SplitFactoryAsync('some_api_key', + storages, + True, + recorder, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property + + try: + client = factory.client() + except: + pass + + assert await client.get_treatment('user1', 'SPLIT_1') == 'off' + assert await client.get_treatment('user2', 'SPLIT_2') == 'on' + assert await client.get_treatment('user3', 'SPLIT_3') == 'on' + await asyncio.sleep(0.2) + + imp_storage = factory._storages['impressions'] + impressions = [] + while True: + impression = await redis_client.lpop(imp_storage.IMPRESSIONS_QUEUE_KEY) + if impression is None: + break + impressions.append(json.loads(impression)) + + assert len(impressions) == 2 + assert impressions[0]['i']['f'] == 'SPLIT_1' + assert impressions[1]['i']['f'] == 'SPLIT_2' + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_3': {'user3'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 1 + assert imps_count[0].feature == 'SPLIT_3' + assert imps_count[0].count == 1 + await self.clear_cache() + await factory.destroy() + + @pytest.mark.asyncio + async def test_none(self): + """Prepare storages with test data.""" + metadata = SdkMetadata('python-1.2.3', 'some_ip', 'some_name') + redis_client = await build_async(DEFAULT_CONFIG.copy()) + split_storage = RedisSplitStorageAsync(redis_client, True) + segment_storage = RedisSegmentStorageAsync(redis_client) + + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) + + telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) + telemetry_producer = TelemetryStorageProducerAsync(telemetry_redis_storage) + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() + + storages = { + 'splits': split_storage, + 'segments': segment_storage, + 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), + 'events': RedisEventsStorageAsync(redis_client, metadata), + } + impmanager = ImpressionsManager(StrategyNoneMode(), StrategyNoneMode(), telemetry_runtime_producer) # no listener + recorder = PipelinedRecorderAsync(redis_client.pipeline, impmanager, + storages['events'], storages['impressions'], telemetry_redis_storage, unique_keys_tracker=UniqueKeysTracker(), imp_counter=ImpressionsCounter()) + factory = SplitFactoryAsync('some_api_key', + storages, + True, + recorder, + telemetry_producer=telemetry_producer, + telemetry_init_producer=telemetry_producer.get_telemetry_init_producer(), + ) # pylint:disable=attribute-defined-outside-init + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property + + try: + client = factory.client() + except: + pass + + assert await client.get_treatment('user1', 'SPLIT_1') == 'off' + assert await client.get_treatment('user2', 'SPLIT_2') == 'on' + assert await client.get_treatment('user3', 'SPLIT_3') == 'on' + await asyncio.sleep(0.2) + + imp_storage = factory._storages['impressions'] + impressions = [] + while True: + impression = await redis_client.lpop(imp_storage.IMPRESSIONS_QUEUE_KEY) + if impression is None: + break + impressions.append(json.loads(impression)) + + assert len(impressions) == 0 + assert client._recorder._unique_keys_tracker._cache == {'SPLIT_1': {'user1'}, 'SPLIT_2': {'user2'}, 'SPLIT_3': {'user3'}} + imps_count = client._recorder._imp_counter.pop_all() + assert len(imps_count) == 3 + assert imps_count[0].feature == 'SPLIT_1' + assert imps_count[0].count == 1 + assert imps_count[1].feature == 'SPLIT_2' + assert imps_count[1].count == 1 + assert imps_count[2].feature == 'SPLIT_3' + assert imps_count[2].count == 1 + await self.clear_cache() + await factory.destroy() + + async def clear_cache(self): + """Clear redis cache.""" + keys_to_delete = [ + "SPLITIO.split.SPLIT_3", + "SPLITIO.splits.till", + "SPLITIO.split.SPLIT_2", + "SPLITIO.split.SPLIT_1", + "SPLITIO.telemetry.latencies" + ] + + redis_client = await build_async(DEFAULT_CONFIG.copy()) + for key in keys_to_delete: + await redis_client.delete(key) + async def _validate_last_impressions_async(client, *to_validate): """Validate the last N impressions are present disregarding the order.""" imp_storage = client._factory._get_storage('impressions') From 2546181742b2bbcf11882cf627721b050cb73146 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Thu, 26 Dec 2024 11:55:00 -0800 Subject: [PATCH 07/12] renamed trackImpressions field and updated tests --- splitio/client/client.py | 6 +- splitio/engine/evaluator.py | 2 +- splitio/engine/impressions/impressions.py | 2 +- splitio/models/impressions.py | 2 +- splitio/models/splits.py | 22 +++--- splitio/recorder/recorder.py | 3 +- tests/client/test_client.py | 43 ++++++----- tests/engine/test_evaluator.py | 2 +- tests/engine/test_impressions.py | 92 +++++++++++------------ tests/integration/__init__.py | 4 +- tests/models/test_splits.py | 8 +- 11 files changed, 97 insertions(+), 89 deletions(-) diff --git a/splitio/client/client.py b/splitio/client/client.py index 78f00a34..d4c37fa4 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -23,7 +23,7 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes 'label': Label.EXCEPTION, 'change_number': None, }, - 'track': True + 'impressions_disabled': False } _NON_READY_EVAL_RESULT = { @@ -33,7 +33,7 @@ class ClientBase(object): # pylint: disable=too-many-instance-attributes 'label': Label.NOT_READY, 'change_number': None }, - 'track': True + 'impressions_disabled': False } def __init__(self, factory, recorder, labels_enabled=True): @@ -126,7 +126,7 @@ def _build_impression(self, key, bucketing, feature, result): change_number=result['impression']['change_number'], bucketing_key=bucketing, time=utctime_ms()), - track=result['track']) + disabled=result['impressions_disabled']) def _build_impressions(self, key, bucketing, results): """Build an impression based on evaluation data & it's result.""" diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index ebae631d..f7a15a32 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -68,7 +68,7 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): 'label': label, 'change_number': _change_number }, - 'track': feature.trackImpressions if feature else None + 'impressions_disabled': feature.ImpressionsDisabled if feature else None } def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): diff --git a/splitio/engine/impressions/impressions.py b/splitio/engine/impressions/impressions.py index b4545d1e..428fdd13 100644 --- a/splitio/engine/impressions/impressions.py +++ b/splitio/engine/impressions/impressions.py @@ -43,7 +43,7 @@ def process_impressions(self, impressions_decorated): for_counter_all = [] for_unique_keys_tracker_all = [] for impression_decorated, att in impressions_decorated: - if not impression_decorated.track: + if impression_decorated.disabled: for_log, for_listener, for_counter, for_unique_keys_tracker = self._none_strategy.process_impressions([(impression_decorated.Impression, att)]) else: for_log, for_listener, for_counter, for_unique_keys_tracker = self._strategy.process_impressions([(impression_decorated.Impression, att)]) diff --git a/splitio/models/impressions.py b/splitio/models/impressions.py index 21daacae..9bdfb3a9 100644 --- a/splitio/models/impressions.py +++ b/splitio/models/impressions.py @@ -20,7 +20,7 @@ 'ImpressionDecorated', [ 'Impression', - 'track' + 'disabled' ] ) diff --git a/splitio/models/splits.py b/splitio/models/splits.py index 170327ab..3291fbc8 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -10,7 +10,7 @@ SplitView = namedtuple( 'SplitView', - ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'trackImpressions'] + ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'ImpressionsDisabled'] ) _DEFAULT_CONDITIONS_TEMPLATE = { @@ -74,7 +74,7 @@ def __init__( # pylint: disable=too-many-arguments traffic_allocation_seed=None, configurations=None, sets=None, - trackImpressions=None + ImpressionsDisabled=None ): """ Class constructor. @@ -97,8 +97,8 @@ def __init__( # pylint: disable=too-many-arguments :type traffic_allocation_seed: int :pram sets: list of flag sets :type sets: list - :pram trackImpressions: track impressions flag - :type trackImpressions: boolean + :pram ImpressionsDisabled: track impressions flag + :type ImpressionsDisabled: boolean """ self._name = name self._seed = seed @@ -128,7 +128,7 @@ def __init__( # pylint: disable=too-many-arguments self._configurations = configurations self._sets = set(sets) if sets is not None else set() - self._trackImpressions = trackImpressions if trackImpressions is not None else True + self._ImpressionsDisabled = ImpressionsDisabled if ImpressionsDisabled is not None else False @property def name(self): @@ -191,9 +191,9 @@ def sets(self): return self._sets @property - def trackImpressions(self): - """Return trackImpressions of the split.""" - return self._trackImpressions + def ImpressionsDisabled(self): + """Return ImpressionsDisabled of the split.""" + return self._ImpressionsDisabled def get_configurations_for(self, treatment): """Return the mapping of treatments to configurations.""" @@ -224,7 +224,7 @@ def to_json(self): 'conditions': [c.to_json() for c in self.conditions], 'configurations': self._configurations, 'sets': list(self._sets), - 'trackImpressions': self._trackImpressions + 'ImpressionsDisabled': self._ImpressionsDisabled } def to_split_view(self): @@ -243,7 +243,7 @@ def to_split_view(self): self._configurations if self._configurations is not None else {}, self._default_treatment, list(self._sets) if self._sets is not None else [], - self._trackImpressions + self._ImpressionsDisabled ) def local_kill(self, default_treatment, change_number): @@ -300,5 +300,5 @@ def from_raw(raw_split): traffic_allocation_seed=raw_split.get('trafficAllocationSeed'), configurations=raw_split.get('configurations'), sets=set(raw_split.get('sets')) if raw_split.get('sets') is not None else [], - trackImpressions=raw_split.get('trackImpressions') if raw_split.get('trackImpressions') is not None else True + ImpressionsDisabled=raw_split.get('ImpressionsDisabled') if raw_split.get('ImpressionsDisabled') is not None else False ) diff --git a/splitio/recorder/recorder.py b/splitio/recorder/recorder.py index 4c0ec155..465f79bb 100644 --- a/splitio/recorder/recorder.py +++ b/splitio/recorder/recorder.py @@ -174,8 +174,9 @@ def record_treatment_stats(self, impressions_decorated, latency, operation, meth self._imp_counter.track(for_counter) if len(for_unique_keys_tracker) > 0: [self._unique_keys_tracker.track(item[0], item[1]) for item in for_unique_keys_tracker] - except Exception: # pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except _LOGGER.error('Error recording impressions') + _LOGGER.error(exc) _LOGGER.debug('Error: ', exc_info=True) def record_track_stats(self, event, latency): diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 18c33665..48a0fba2 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -17,6 +17,8 @@ InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, InMemoryTelemetryStorageAsync, InMemoryEventStorageAsync from splitio.models.splits import Split, Status, from_raw from splitio.engine.impressions.impressions import Manager as ImpressionManager +from splitio.engine.impressions.manager import Counter as ImpressionsCounter +from splitio.engine.impressions.unique_keys_tracker import UniqueKeysTracker, UniqueKeysTrackerAsync from splitio.engine.telemetry import TelemetryStorageConsumer, TelemetryStorageProducer, TelemetryStorageProducerAsync from splitio.engine.evaluator import Evaluator from splitio.recorder.recorder import StandardRecorder, StandardRecorderAsync @@ -44,7 +46,9 @@ def test_get_treatment(self, mocker): mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) - recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) + recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer(), + unique_keys_tracker=UniqueKeysTracker(), + imp_counter=ImpressionsCounter()) class TelemetrySubmitterMock(): def synchronize_config(*_): pass @@ -61,7 +65,9 @@ def synchronize_config(*_): telemetry_producer.get_telemetry_init_producer(), TelemetrySubmitterMock(), ) - + ready_property = mocker.PropertyMock() + ready_property.return_value = True + type(factory).ready = ready_property factory.block_until_ready(5) split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) @@ -74,7 +80,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } _logger = mocker.Mock() assert client.get_treatment('some_key', 'SPLIT_2') == 'on' @@ -85,6 +91,7 @@ def synchronize_config(*_): ready_property = mocker.PropertyMock() ready_property.return_value = False type(factory).ready = ready_property + # pytest.set_trace() assert client.get_treatment('some_key', 'SPLIT_2', {'some_attribute': 1}) == 'control' assert impression_storage.pop_many(100) == [Impression('some_key', 'SPLIT_2', 'control', Label.NOT_READY, None, None, 1000)] @@ -143,7 +150,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } _logger = mocker.Mock() client._send_impression_to_listener = mocker.Mock() @@ -218,7 +225,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -296,7 +303,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -373,7 +380,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -449,7 +456,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -530,7 +537,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -608,7 +615,7 @@ def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -1270,7 +1277,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } _logger = mocker.Mock() assert await client.get_treatment('some_key', 'SPLIT_2') == 'on' @@ -1340,7 +1347,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } _logger = mocker.Mock() client._send_impression_to_listener = mocker.Mock() @@ -1415,7 +1422,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -1493,7 +1500,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -1571,7 +1578,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_2': evaluation, @@ -1648,7 +1655,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -1730,7 +1737,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, @@ -1812,7 +1819,7 @@ async def synchronize_config(*_): 'label': 'some_label', 'change_number': 123 }, - 'track': True + 'impressions_disabled': False } client._evaluator.eval_many_with_context.return_value = { 'SPLIT_1': evaluation, diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 89631519..2fc7d032 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -52,7 +52,7 @@ def test_evaluate_treatment_ok(self, mocker): assert result['impression']['change_number'] == 123 assert result['impression']['label'] == 'some_label' assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] - assert result['track'] == mocked_split.trackImpressions + assert result['impressions_disabled'] == mocked_split.ImpressionsDisabled def test_evaluate_treatment_ok_no_config(self, mocker): diff --git a/tests/engine/test_impressions.py b/tests/engine/test_impressions.py index a7b7da68..b9f6a607 100644 --- a/tests/engine/test_impressions.py +++ b/tests/engine/test_impressions.py @@ -112,8 +112,8 @@ def test_standalone_optimized(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert for_unique_keys_tracker == [] @@ -123,7 +123,7 @@ def test_standalone_optimized(self, mocker): # Tracking the same impression a ms later should be empty imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [] assert deduped == 1 @@ -131,7 +131,7 @@ def test_standalone_optimized(self, mocker): # Tracking an impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert deduped == 0 @@ -144,8 +144,8 @@ def test_standalone_optimized(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -158,14 +158,14 @@ def test_standalone_optimized(self, mocker): # Test counting only from the second impression imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert for_counter == [] assert deduped == 0 assert for_unique_keys_tracker == [] imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert for_counter == [Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1, utc_now-1)] assert deduped == 1 @@ -186,8 +186,8 @@ def test_standalone_debug(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] @@ -196,7 +196,7 @@ def test_standalone_debug(self, mocker): # Tracking the same impression a ms later should return the impression imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, utc_now-3)] assert for_counter == [] @@ -204,7 +204,7 @@ def test_standalone_debug(self, mocker): # Tracking a in impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert for_counter == [] @@ -218,8 +218,8 @@ def test_standalone_debug(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -242,8 +242,8 @@ def test_standalone_none(self, mocker): # no impressions are tracked, only counter and mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert imps == [] assert for_counter == [ @@ -254,13 +254,13 @@ def test_standalone_none(self, mocker): # Tracking the same impression a ms later should not return the impression and no change on mtk cache imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [] # Tracking an impression with a different key, will only increase mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k3', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k3', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert imps == [] assert for_unique_keys_tracker == [('k3', 'f1')] @@ -276,8 +276,8 @@ def test_standalone_none(self, mocker): # Track the same impressions but "one hour later", no changes on mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [] assert for_counter == [ @@ -301,8 +301,8 @@ def test_standalone_optimized_listener(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] @@ -313,7 +313,7 @@ def test_standalone_optimized_listener(self, mocker): # Tracking the same impression a ms later should return empty imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [] assert deduped == 1 @@ -322,7 +322,7 @@ def test_standalone_optimized_listener(self, mocker): # Tracking a in impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert deduped == 0 @@ -337,8 +337,8 @@ def test_standalone_optimized_listener(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -356,14 +356,14 @@ def test_standalone_optimized_listener(self, mocker): # Test counting only from the second impression imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert for_counter == [] assert deduped == 0 assert for_unique_keys_tracker == [] imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert for_counter == [ Impression('k3', 'f3', 'on', 'l1', 123, None, utc_now-1, utc_now-1) @@ -387,8 +387,8 @@ def test_standalone_debug_listener(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3)] @@ -398,7 +398,7 @@ def test_standalone_debug_listener(self, mocker): # Tracking the same impression a ms later should return the imp imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, utc_now-3)] assert listen == [(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, utc_now-3), None)] @@ -407,7 +407,7 @@ def test_standalone_debug_listener(self, mocker): # Tracking a in impression with a different key makes it to the queue imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert imps == [Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1)] assert listen == [(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None)] @@ -422,8 +422,8 @@ def test_standalone_debug_listener(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1, old_utc-3), Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2, old_utc-1)] @@ -449,8 +449,8 @@ def test_standalone_none_listener(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should not be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert imps == [] assert listen == [(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), None), @@ -462,7 +462,7 @@ def test_standalone_none_listener(self, mocker): # Tracking the same impression a ms later should return empty, no updates on mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [] assert listen == [(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-2, None), None)] @@ -471,7 +471,7 @@ def test_standalone_none_listener(self, mocker): # Tracking a in impression with a different key update mtk imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None) + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None) ]) assert imps == [] assert listen == [(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-1), None)] @@ -486,8 +486,8 @@ def test_standalone_none_listener(self, mocker): # Track the same impressions but "one hour later" imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), True), None), - (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), False), None), + (ImpressionDecorated(Impression('k2', 'f1', 'on', 'l1', 123, None, utc_now-2), False), None) ]) assert imps == [] assert for_counter == [Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-1), @@ -517,8 +517,8 @@ def test_impression_toggle_optimized(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert for_unique_keys_tracker == [('k1', 'f1')] @@ -542,8 +542,8 @@ def test_impression_toggle_debug(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert for_unique_keys_tracker == [('k1', 'f1')] @@ -567,8 +567,8 @@ def test_impression_toggle_none(self, mocker): # An impression that hasn't happened in the last hour (pt = None) should be tracked imps, deduped, listen, for_counter, for_unique_keys_tracker = manager.process_impressions([ - (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), False), None), - (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), True), None) + (ImpressionDecorated(Impression('k1', 'f1', 'on', 'l1', 123, None, utc_now-3), True), None), + (ImpressionDecorated(Impression('k1', 'f2', 'on', 'l1', 123, None, utc_now-3), False), None) ]) assert for_unique_keys_tracker == [('k1', 'f1'), ('k1', 'f2')] diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index d80d34f7..19fd59ba 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,7 +1,7 @@ split11 = {"splits": [ - {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "trackImpressions": True}, + {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "ImpressionsDisabled": False}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, - {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "trackImpressions": False} + {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "ImpressionsDisabled": True} ],"since": -1,"till": 1675443569027} split12 = {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 167544376728} split13 = {"splits": [ diff --git a/tests/models/test_splits.py b/tests/models/test_splits.py index 66718e71..40976cf9 100644 --- a/tests/models/test_splits.py +++ b/tests/models/test_splits.py @@ -61,7 +61,7 @@ class SplitTests(object): 'on': '{"color": "blue", "size": 13}' }, 'sets': ['set1', 'set2'], - 'trackImpressions': True + 'ImpressionsDisabled': False } def test_from_raw(self): @@ -82,7 +82,7 @@ def test_from_raw(self): assert parsed.get_configurations_for('on') == '{"color": "blue", "size": 13}' assert parsed._configurations == {'on': '{"color": "blue", "size": 13}'} assert parsed.sets == {'set1', 'set2'} - assert parsed.trackImpressions == True + assert parsed.ImpressionsDisabled == False def test_get_segment_names(self, mocker): """Test fetching segment names.""" @@ -109,7 +109,7 @@ def test_to_json(self): assert as_json['algo'] == 2 assert len(as_json['conditions']) == 2 assert sorted(as_json['sets']) == ['set1', 'set2'] - assert as_json['trackImpressions'] is True + assert as_json['ImpressionsDisabled'] is False def test_to_split_view(self): """Test SplitView creation.""" @@ -121,7 +121,7 @@ def test_to_split_view(self): assert as_split_view.traffic_type == self.raw['trafficTypeName'] assert set(as_split_view.treatments) == set(['on', 'off']) assert sorted(as_split_view.sets) == sorted(list(self.raw['sets'])) - assert as_split_view.trackImpressions == self.raw['trackImpressions'] + assert as_split_view.ImpressionsDisabled == self.raw['ImpressionsDisabled'] def test_incorrect_matcher(self): """Test incorrect matcher in split model parsing.""" From 222f23252c6820d8fa9cb1ac2f453beddab8988d Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Thu, 26 Dec 2024 12:23:43 -0800 Subject: [PATCH 08/12] polish --- splitio/engine/evaluator.py | 2 +- splitio/models/splits.py | 22 +++++++++++----------- splitio/recorder/recorder.py | 3 +-- tests/engine/test_evaluator.py | 2 +- tests/integration/__init__.py | 4 ++-- tests/models/test_splits.py | 8 ++++---- 6 files changed, 20 insertions(+), 21 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index f7a15a32..d118eb1c 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -68,7 +68,7 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): 'label': label, 'change_number': _change_number }, - 'impressions_disabled': feature.ImpressionsDisabled if feature else None + 'impressions_disabled': feature.impressionsDisabled if feature else None } def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): diff --git a/splitio/models/splits.py b/splitio/models/splits.py index 3291fbc8..a1e60774 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -10,7 +10,7 @@ SplitView = namedtuple( 'SplitView', - ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'ImpressionsDisabled'] + ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'impressions_disabled'] ) _DEFAULT_CONDITIONS_TEMPLATE = { @@ -74,7 +74,7 @@ def __init__( # pylint: disable=too-many-arguments traffic_allocation_seed=None, configurations=None, sets=None, - ImpressionsDisabled=None + impressionsDisabled=None ): """ Class constructor. @@ -97,8 +97,8 @@ def __init__( # pylint: disable=too-many-arguments :type traffic_allocation_seed: int :pram sets: list of flag sets :type sets: list - :pram ImpressionsDisabled: track impressions flag - :type ImpressionsDisabled: boolean + :pram impressionsDisabled: track impressions flag + :type impressionsDisabled: boolean """ self._name = name self._seed = seed @@ -128,7 +128,7 @@ def __init__( # pylint: disable=too-many-arguments self._configurations = configurations self._sets = set(sets) if sets is not None else set() - self._ImpressionsDisabled = ImpressionsDisabled if ImpressionsDisabled is not None else False + self._impressionsDisabled = impressionsDisabled if impressionsDisabled is not None else False @property def name(self): @@ -191,9 +191,9 @@ def sets(self): return self._sets @property - def ImpressionsDisabled(self): - """Return ImpressionsDisabled of the split.""" - return self._ImpressionsDisabled + def impressionsDisabled(self): + """Return impressionsDisabled of the split.""" + return self._impressionsDisabled def get_configurations_for(self, treatment): """Return the mapping of treatments to configurations.""" @@ -224,7 +224,7 @@ def to_json(self): 'conditions': [c.to_json() for c in self.conditions], 'configurations': self._configurations, 'sets': list(self._sets), - 'ImpressionsDisabled': self._ImpressionsDisabled + 'impressionsDisabled': self._impressionsDisabled } def to_split_view(self): @@ -243,7 +243,7 @@ def to_split_view(self): self._configurations if self._configurations is not None else {}, self._default_treatment, list(self._sets) if self._sets is not None else [], - self._ImpressionsDisabled + self._impressionsDisabled ) def local_kill(self, default_treatment, change_number): @@ -300,5 +300,5 @@ def from_raw(raw_split): traffic_allocation_seed=raw_split.get('trafficAllocationSeed'), configurations=raw_split.get('configurations'), sets=set(raw_split.get('sets')) if raw_split.get('sets') is not None else [], - ImpressionsDisabled=raw_split.get('ImpressionsDisabled') if raw_split.get('ImpressionsDisabled') is not None else False + impressionsDisabled=raw_split.get('impressionsDisabled') if raw_split.get('impressionsDisabled') is not None else False ) diff --git a/splitio/recorder/recorder.py b/splitio/recorder/recorder.py index 465f79bb..4c0ec155 100644 --- a/splitio/recorder/recorder.py +++ b/splitio/recorder/recorder.py @@ -174,9 +174,8 @@ def record_treatment_stats(self, impressions_decorated, latency, operation, meth self._imp_counter.track(for_counter) if len(for_unique_keys_tracker) > 0: [self._unique_keys_tracker.track(item[0], item[1]) for item in for_unique_keys_tracker] - except Exception as exc: # pylint: disable=broad-except + except Exception: # pylint: disable=broad-except _LOGGER.error('Error recording impressions') - _LOGGER.error(exc) _LOGGER.debug('Error: ', exc_info=True) def record_track_stats(self, event, latency): diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 2fc7d032..4aeab839 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -52,7 +52,7 @@ def test_evaluate_treatment_ok(self, mocker): assert result['impression']['change_number'] == 123 assert result['impression']['label'] == 'some_label' assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] - assert result['impressions_disabled'] == mocked_split.ImpressionsDisabled + assert result['impressions_disabled'] == mocked_split.impressionsDisabled def test_evaluate_treatment_ok_no_config(self, mocker): diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index 19fd59ba..ee2475df 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,7 +1,7 @@ split11 = {"splits": [ - {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "ImpressionsDisabled": False}, + {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, - {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "ImpressionsDisabled": True} + {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": True} ],"since": -1,"till": 1675443569027} split12 = {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 167544376728} split13 = {"splits": [ diff --git a/tests/models/test_splits.py b/tests/models/test_splits.py index 40976cf9..f456d90c 100644 --- a/tests/models/test_splits.py +++ b/tests/models/test_splits.py @@ -61,7 +61,7 @@ class SplitTests(object): 'on': '{"color": "blue", "size": 13}' }, 'sets': ['set1', 'set2'], - 'ImpressionsDisabled': False + 'impressionsDisabled': False } def test_from_raw(self): @@ -82,7 +82,7 @@ def test_from_raw(self): assert parsed.get_configurations_for('on') == '{"color": "blue", "size": 13}' assert parsed._configurations == {'on': '{"color": "blue", "size": 13}'} assert parsed.sets == {'set1', 'set2'} - assert parsed.ImpressionsDisabled == False + assert parsed.impressionsDisabled == False def test_get_segment_names(self, mocker): """Test fetching segment names.""" @@ -109,7 +109,7 @@ def test_to_json(self): assert as_json['algo'] == 2 assert len(as_json['conditions']) == 2 assert sorted(as_json['sets']) == ['set1', 'set2'] - assert as_json['ImpressionsDisabled'] is False + assert as_json['impressionsDisabled'] is False def test_to_split_view(self): """Test SplitView creation.""" @@ -121,7 +121,7 @@ def test_to_split_view(self): assert as_split_view.traffic_type == self.raw['trafficTypeName'] assert set(as_split_view.treatments) == set(['on', 'off']) assert sorted(as_split_view.sets) == sorted(list(self.raw['sets'])) - assert as_split_view.ImpressionsDisabled == self.raw['ImpressionsDisabled'] + assert as_split_view.impressions_disabled == self.raw['impressionsDisabled'] def test_incorrect_matcher(self): """Test incorrect matcher in split model parsing.""" From 7977cb86b90bacd5ffb5ac03b94596a5f17ac134 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 1 Jan 2025 03:12:51 +0000 Subject: [PATCH 09/12] Updated License Year --- LICENSE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE.txt b/LICENSE.txt index c022e920..df08de3f 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright © 2024 Split Software, Inc. +Copyright © 2025 Split Software, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From 2fcffce555cd6287957988422d31d85b93a659f8 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Fri, 10 Jan 2025 10:03:39 -0800 Subject: [PATCH 10/12] polish --- splitio/engine/evaluator.py | 2 +- splitio/models/splits.py | 20 ++++++++++---------- tests/engine/test_evaluator.py | 2 +- tests/models/test_splits.py | 2 +- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index d118eb1c..f913ebba 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -68,7 +68,7 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): 'label': label, 'change_number': _change_number }, - 'impressions_disabled': feature.impressionsDisabled if feature else None + 'impressions_disabled': feature.impressions_disabled if feature else None } def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): diff --git a/splitio/models/splits.py b/splitio/models/splits.py index a1e60774..92a277c4 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -74,7 +74,7 @@ def __init__( # pylint: disable=too-many-arguments traffic_allocation_seed=None, configurations=None, sets=None, - impressionsDisabled=None + impressions_disabled=None ): """ Class constructor. @@ -97,8 +97,8 @@ def __init__( # pylint: disable=too-many-arguments :type traffic_allocation_seed: int :pram sets: list of flag sets :type sets: list - :pram impressionsDisabled: track impressions flag - :type impressionsDisabled: boolean + :pram impressions_disabled: track impressions flag + :type impressions_disabled: boolean """ self._name = name self._seed = seed @@ -128,7 +128,7 @@ def __init__( # pylint: disable=too-many-arguments self._configurations = configurations self._sets = set(sets) if sets is not None else set() - self._impressionsDisabled = impressionsDisabled if impressionsDisabled is not None else False + self._impressions_disabled = impressions_disabled if impressions_disabled is not None else False @property def name(self): @@ -191,9 +191,9 @@ def sets(self): return self._sets @property - def impressionsDisabled(self): - """Return impressionsDisabled of the split.""" - return self._impressionsDisabled + def impressions_disabled(self): + """Return impressions_disabled of the split.""" + return self._impressions_disabled def get_configurations_for(self, treatment): """Return the mapping of treatments to configurations.""" @@ -224,7 +224,7 @@ def to_json(self): 'conditions': [c.to_json() for c in self.conditions], 'configurations': self._configurations, 'sets': list(self._sets), - 'impressionsDisabled': self._impressionsDisabled + 'impressionsDisabled': self._impressions_disabled } def to_split_view(self): @@ -243,7 +243,7 @@ def to_split_view(self): self._configurations if self._configurations is not None else {}, self._default_treatment, list(self._sets) if self._sets is not None else [], - self._impressionsDisabled + self._impressions_disabled ) def local_kill(self, default_treatment, change_number): @@ -300,5 +300,5 @@ def from_raw(raw_split): traffic_allocation_seed=raw_split.get('trafficAllocationSeed'), configurations=raw_split.get('configurations'), sets=set(raw_split.get('sets')) if raw_split.get('sets') is not None else [], - impressionsDisabled=raw_split.get('impressionsDisabled') if raw_split.get('impressionsDisabled') is not None else False + impressions_disabled=raw_split.get('impressionsDisabled') if raw_split.get('impressionsDisabled') is not None else False ) diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 4aeab839..67c7387d 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -52,7 +52,7 @@ def test_evaluate_treatment_ok(self, mocker): assert result['impression']['change_number'] == 123 assert result['impression']['label'] == 'some_label' assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] - assert result['impressions_disabled'] == mocked_split.impressionsDisabled + assert result['impressions_disabled'] == mocked_split.impressions_disabled def test_evaluate_treatment_ok_no_config(self, mocker): diff --git a/tests/models/test_splits.py b/tests/models/test_splits.py index f456d90c..442a18d0 100644 --- a/tests/models/test_splits.py +++ b/tests/models/test_splits.py @@ -82,7 +82,7 @@ def test_from_raw(self): assert parsed.get_configurations_for('on') == '{"color": "blue", "size": 13}' assert parsed._configurations == {'on': '{"color": "blue", "size": 13}'} assert parsed.sets == {'set1', 'set2'} - assert parsed.impressionsDisabled == False + assert parsed.impressions_disabled == False def test_get_segment_names(self, mocker): """Test fetching segment names.""" From 9c9fe2ffa411aa78c5c01e9f8ec2b21d29f62176 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Tue, 14 Jan 2025 11:59:07 -0800 Subject: [PATCH 11/12] updated version and changes --- CHANGES.txt | 3 +++ splitio/version.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 5b8e8646..8a89dd3e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,6 @@ +10.2.0 (Jan xx, 2025) +- Added support for the new impressions tracking toggle available on feature flags, both respecting the setting and including the new field being returned on SplitView type objects. Read more in our docs. + 10.1.0 (Aug 7, 2024) - Added support for Kerberos authentication in Spnego and Proxy Kerberos server instances. diff --git a/splitio/version.py b/splitio/version.py index 953a047f..e8137101 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '10.1.0' \ No newline at end of file +__version__ = '10.2.0' \ No newline at end of file From 488423dee82f2f6bcac045bb38e093ca1aaf1799 Mon Sep 17 00:00:00 2001 From: Bilal Al Date: Fri, 17 Jan 2025 08:46:29 -0800 Subject: [PATCH 12/12] updated changes --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 8a89dd3e..52688577 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -10.2.0 (Jan xx, 2025) +10.2.0 (Jan 17, 2025) - Added support for the new impressions tracking toggle available on feature flags, both respecting the setting and including the new field being returned on SplitView type objects. Read more in our docs. 10.1.0 (Aug 7, 2024)