diff --git a/src/sentry/incidents/grouptype.py b/src/sentry/incidents/grouptype.py index 5b01444025b..a17a843e72b 100644 --- a/src/sentry/incidents/grouptype.py +++ b/src/sentry/incidents/grouptype.py @@ -4,17 +4,12 @@ from sentry.issues.grouptype import GroupCategory, GroupType from sentry.ratelimits.sliding_windows import Quota from sentry.types.group import PriorityLevel -from sentry.workflow_engine.models import DataPacket -from sentry.workflow_engine.processors.detector import DetectorEvaluationResult, DetectorHandler +from sentry.workflow_engine.processors.detector import StatefulDetectorHandler # TODO: This will be a stateful detector when we build that abstraction -class MetricAlertDetectorHandler(DetectorHandler[QuerySubscriptionUpdate]): - def evaluate( - self, data_packet: DataPacket[QuerySubscriptionUpdate] - ) -> list[DetectorEvaluationResult]: - # TODO: Implement - return [] +class MetricAlertDetectorHandler(StatefulDetectorHandler[QuerySubscriptionUpdate]): + pass # Example GroupType and detector handler for metric alerts. We don't create these issues yet, but we'll use something diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py index 1b72815cb78..2f293ac3956 100644 --- a/src/sentry/workflow_engine/models/detector.py +++ b/src/sentry/workflow_engine/models/detector.py @@ -1,5 +1,6 @@ from __future__ import annotations +import builtins import logging from typing import TYPE_CHECKING @@ -9,6 +10,7 @@ from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model from sentry.issues import grouptype +from sentry.issues.grouptype import GroupType from sentry.models.owner_base import OwnerModel if TYPE_CHECKING: @@ -53,9 +55,13 @@ def project_id(self): # XXX: Temporary property until we add `project_id` to the model. return 1 + @property + def group_type(self) -> builtins.type[GroupType] | None: + return grouptype.registry.get_by_slug(self.type) + @property def detector_handler(self) -> DetectorHandler | None: - group_type = grouptype.registry.get_by_slug(self.type) + group_type = self.group_type if not group_type: logger.error( "No registered grouptype for detector", diff --git a/src/sentry/workflow_engine/processors/detector.py b/src/sentry/workflow_engine/processors/detector.py index 4e2e9b06a41..898b59345d0 100644 --- a/src/sentry/workflow_engine/processors/detector.py +++ b/src/sentry/workflow_engine/processors/detector.py @@ -14,6 +14,7 @@ from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka from sentry.issues.status_change_message import StatusChangeMessage from sentry.models.group import GroupStatus +from sentry.types.group import PriorityLevel from sentry.utils import metrics, redis from sentry.utils.function_cache import cache_func_for_models from sentry.utils.iterators import chunked @@ -45,7 +46,7 @@ class DetectorEvaluationResult: def process_detectors( data_packet: DataPacket, detectors: list[Detector] -) -> list[tuple[Detector, list[DetectorEvaluationResult]]]: +) -> list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]]: results = [] for detector in detectors: @@ -55,25 +56,11 @@ def process_detectors( continue detector_results = handler.evaluate(data_packet) - detector_group_keys = set() - - for result in detector_results: - if result.group_key in detector_group_keys: - # This shouldn't happen - log an error and continue on, but we should investigate this. - logger.error( - "Duplicate detector state group keys found", - extra={ - "detector_id": detector.id, - "group_key": result.group_key, - }, - ) - continue + for result in detector_results.values(): if result.result is not None: create_issue_occurrence_from_result(result) - detector_group_keys.add(result.group_key) - if detector_results: results.append((detector, detector_results)) @@ -136,7 +123,9 @@ def __init__(self, detector: Detector): self.conditions = [] @abc.abstractmethod - def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult]: + def evaluate( + self, data_packet: DataPacket[T] + ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: pass def commit_state_updates(self): @@ -174,6 +163,12 @@ def get_group_key_values(self, data_packet: DataPacket[T]) -> dict[str, int]: """ pass + @abc.abstractmethod + def build_occurrence_and_event_data( + self, group_key: DetectorGroupKey, value: int, new_status: PriorityLevel + ) -> tuple[IssueOccurrence, dict[str, Any]]: + pass + def build_fingerprint(self, group_key) -> list[str]: """ Builds a fingerprint to uniquely identify a detected issue @@ -228,7 +223,9 @@ def get_state_data( ) return results - def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult]: + def evaluate( + self, data_packet: DataPacket[T] + ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: """ Evaluates a given data packet and returns a list of `DetectorEvaluationResult`. There will be one result for each group key result in the packet, unless the @@ -237,13 +234,13 @@ def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult] dedupe_value = self.get_dedupe_value(data_packet) group_values = self.get_group_key_values(data_packet) all_state_data = self.get_state_data(list(group_values.keys())) - results = [] + results = {} for group_key, group_value in group_values.items(): result = self.evaluate_group_key_value( group_key, group_value, all_state_data[group_key], dedupe_value ) if result: - results.append(result) + results[result.group_key] = result return results def evaluate_group_key_value( @@ -289,7 +286,7 @@ def evaluate_group_key_value( is_active = new_status != DetectorPriorityLevel.OK self.enqueue_state_update(group_key, is_active, new_status) event_data = None - result = None + result: StatusChangeMessage | IssueOccurrence if new_status == DetectorPriorityLevel.OK: # If we've determined that we're now ok, we just want to resolve the issue result = StatusChangeMessage( @@ -298,8 +295,10 @@ def evaluate_group_key_value( new_status=GroupStatus.RESOLVED, new_substatus=None, ) - - # TODO: Add hook here for generating occurrence + else: + result, event_data = self.build_occurrence_and_event_data( + group_key, value, PriorityLevel(new_status) + ) return DetectorEvaluationResult( group_key=group_key, is_active=is_active, diff --git a/tests/sentry/workflow_engine/processors/test_detector.py b/tests/sentry/workflow_engine/processors/test_detector.py index 7ac44a7b364..d56b02d0714 100644 --- a/tests/sentry/workflow_engine/processors/test_detector.py +++ b/tests/sentry/workflow_engine/processors/test_detector.py @@ -1,10 +1,16 @@ import unittest +from datetime import datetime, timezone +from typing import Any from unittest import mock +from unittest.mock import call from sentry.issues.grouptype import GroupCategory, GroupType +from sentry.issues.issue_occurrence import IssueOccurrence +from sentry.issues.producer import PayloadType from sentry.issues.status_change_message import StatusChangeMessage from sentry.testutils.abstract import Abstract -from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.datetime import freeze_time +from sentry.types.group import PriorityLevel from sentry.workflow_engine.models import DataPacket, Detector, DetectorState from sentry.workflow_engine.processors.detector import ( DetectorEvaluationResult, @@ -14,13 +20,34 @@ get_redis_client, process_detectors, ) -from sentry.workflow_engine.types import DetectorPriorityLevel +from sentry.workflow_engine.types import DetectorGroupKey, DetectorPriorityLevel from tests.sentry.issues.test_grouptype import BaseGroupTypeTest -class TestProcessDetectors(BaseGroupTypeTest): +class MockDetectorStateHandler(StatefulDetectorHandler[dict]): + counter_names = ["test1", "test2"] + + def get_dedupe_value(self, data_packet: DataPacket[dict]) -> int: + return data_packet.packet.get("dedupe", 0) + + def get_group_key_values(self, data_packet: DataPacket[dict]) -> dict[str, int]: + return data_packet.packet.get("group_vals", {}) + + def build_occurrence_and_event_data( + self, group_key: DetectorGroupKey, value: int, new_status: PriorityLevel + ) -> tuple[IssueOccurrence, dict[str, Any]]: + return build_mock_occurrence_and_event(self, group_key, value, new_status) + + +class BaseDetectorHandlerTest(BaseGroupTypeTest): + __test__ = Abstract(__module__, __qualname__) + def setUp(self): super().setUp() + self.sm_comp_patcher = mock.patch.object( + StatusChangeMessage, "__eq__", status_change_comparator + ) + self.sm_comp_patcher.__enter__() class NoHandlerGroupType(GroupType): type_id = 1 @@ -29,8 +56,10 @@ class NoHandlerGroupType(GroupType): category = GroupCategory.METRIC_ALERT.value class MockDetectorHandler(DetectorHandler[dict]): - def evaluate(self, data_packet: DataPacket[dict]) -> list[DetectorEvaluationResult]: - return [DetectorEvaluationResult(None, True, DetectorPriorityLevel.HIGH)] + def evaluate( + self, data_packet: DataPacket[dict] + ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: + return {None: DetectorEvaluationResult(None, True, DetectorPriorityLevel.HIGH)} class HandlerGroupType(GroupType): type_id = 2 @@ -39,18 +68,6 @@ class HandlerGroupType(GroupType): category = GroupCategory.METRIC_ALERT.value detector_handler = MockDetectorHandler - class MockDetectorStateHandler(DetectorHandler[dict]): - def evaluate(self, data_packet: DataPacket[dict]) -> list[DetectorEvaluationResult]: - group_keys = data_packet.packet.get("group_keys", [None]) - return [ - DetectorEvaluationResult( - group_key, - True, - DetectorPriorityLevel.HIGH, - ) - for group_key in group_keys - ] - class HandlerStateGroupType(GroupType): type_id = 3 slug = "handler_with_state" @@ -62,9 +79,57 @@ class HandlerStateGroupType(GroupType): self.handler_type = HandlerGroupType self.handler_state_type = HandlerStateGroupType + def tearDown(self): + super().tearDown() + self.sm_comp_patcher.__exit__(None, None, None) + + def create_detector_and_conditions(self, type: str | None = None): + if type is None: + type = "handler_with_state" + detector = self.create_detector( + workflow_condition_group=self.create_data_condition_group(), + type=type, + ) + self.create_data_condition( + condition="gt", + comparison=5, + condition_result=DetectorPriorityLevel.HIGH, + condition_group=detector.workflow_condition_group, + ) + return detector + + def build_handler( + self, detector: Detector | None = None, detector_type=None + ) -> MockDetectorStateHandler: + if detector is None: + detector = self.create_detector_and_conditions(detector_type) + return MockDetectorStateHandler(detector) + + def assert_updates(self, handler, group_key, dedupe_value, counter_updates, active, priority): + if dedupe_value is not None: + assert handler.dedupe_updates.get(group_key) == dedupe_value + else: + assert group_key not in handler.dedupe_updates + if counter_updates is not None: + assert handler.counter_updates.get(group_key) == counter_updates + else: + assert group_key not in handler.counter_updates + if active is not None or priority is not None: + assert handler.state_updates.get(group_key) == (active, priority) + else: + assert group_key not in handler.state_updates + + +@freeze_time() +class TestProcessDetectors(BaseDetectorHandlerTest): + def setUp(self): + super().setUp() + def build_data_packet(self, **kwargs): query_id = "1234" - return DataPacket[dict](query_id, {"query_id": query_id, "some": "data", **kwargs}) + return DataPacket[dict]( + query_id, {"query_id": query_id, "group_vals": {"group_1": 6}, **kwargs} + ) def test(self): detector = self.create_detector(type=self.handler_type.slug) @@ -73,64 +138,88 @@ def test(self): assert results == [ ( detector, - [DetectorEvaluationResult(None, True, DetectorPriorityLevel.HIGH)], + {None: DetectorEvaluationResult(None, True, DetectorPriorityLevel.HIGH)}, ) ] - def test_state_results(self): - detector = self.create_detector(type=self.handler_state_type.slug) - data_packet = self.build_data_packet() + @mock.patch("sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka") + def test_state_results(self, mock_produce_occurrence_to_kafka): + detector = self.create_detector_and_conditions(type=self.handler_state_type.slug) + data_packet = DataPacket("1", {"dedupe": 2, "group_vals": {None: 6}}) results = process_detectors(data_packet, [detector]) + occurrence, event_data = build_mock_occurrence_and_event( + detector.detector_handler, None, 6, PriorityLevel.HIGH + ) + result = DetectorEvaluationResult( None, True, DetectorPriorityLevel.HIGH, + occurrence, + event_data, ) assert results == [ ( detector, - [result], + {result.group_key: result}, ) ] + mock_produce_occurrence_to_kafka.assert_called_once_with( + payload_type=PayloadType.OCCURRENCE, + occurrence=occurrence, + status_change=None, + event_data=event_data, + ) - def test_state_results_multi_group(self): - detector = self.create_detector(type=self.handler_state_type.slug) - data_packet = self.build_data_packet(group_keys=["group_1", "group_2"]) + @mock.patch("sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka") + def test_state_results_multi_group(self, mock_produce_occurrence_to_kafka): + detector = self.create_detector_and_conditions(type=self.handler_state_type.slug) + data_packet = DataPacket("1", {"dedupe": 2, "group_vals": {"group_1": 6, "group_2": 10}}) results = process_detectors(data_packet, [detector]) + occurrence, event_data = build_mock_occurrence_and_event( + detector.detector_handler, "group_1", 6, PriorityLevel.HIGH + ) + result_1 = DetectorEvaluationResult( "group_1", True, DetectorPriorityLevel.HIGH, + occurrence, + event_data, + ) + occurrence_2, event_data_2 = build_mock_occurrence_and_event( + detector.detector_handler, "group_2", 6, PriorityLevel.HIGH ) result_2 = DetectorEvaluationResult( "group_2", True, DetectorPriorityLevel.HIGH, + occurrence_2, + event_data_2, ) assert results == [ ( detector, - [result_1, result_2], + {result_1.group_key: result_1, result_2.group_key: result_2}, ) ] - - def test_state_results_multi_group_dupe(self): - detector = self.create_detector(type=self.handler_state_type.slug) - data_packet = self.build_data_packet(group_keys=["dupe", "dupe"]) - with mock.patch("sentry.workflow_engine.processors.detector.logger") as mock_logger: - results = process_detectors(data_packet, [detector]) - assert mock_logger.error.call_args[0][0] == "Duplicate detector state group keys found" - result = DetectorEvaluationResult( - "dupe", - True, - DetectorPriorityLevel.HIGH, + mock_produce_occurrence_to_kafka.assert_has_calls( + [ + call( + payload_type=PayloadType.OCCURRENCE, + occurrence=occurrence, + status_change=None, + event_data=event_data, + ), + call( + payload_type=PayloadType.OCCURRENCE, + occurrence=occurrence_2, + status_change=None, + event_data=event_data_2, + ), + ], + any_order=True, ) - assert results == [ - ( - detector, - [result, result], - ) - ] def test_no_issue_type(self): detector = self.create_detector(type="invalid slug") @@ -152,14 +241,38 @@ def test_no_handler(self): assert results == [] -class MockDetectorStateHandler(StatefulDetectorHandler[dict]): - counter_names = ["test1", "test2"] - - def get_dedupe_value(self, data_packet: DataPacket[dict]) -> int: - return data_packet.packet.get("dedupe", 0) - - def get_group_key_values(self, data_packet: DataPacket[dict]) -> dict[str, int]: - return data_packet.packet.get("group_vals", {}) +def build_mock_occurrence_and_event( + handler: StatefulDetectorHandler, + group_key: DetectorGroupKey, + value: int, + new_status: PriorityLevel, +) -> tuple[IssueOccurrence, dict[str, Any]]: + assert handler.detector.group_type is not None + occurrence = IssueOccurrence( + id="eb4b0acffadb4d098d48cb14165ab578", + project_id=handler.detector.project_id, + event_id="43878ab4419f4ab181f6379ac376d5aa", + fingerprint=handler.build_fingerprint(group_key), + issue_title="Some Issue", + subtitle="Some subtitle", + resource_id=None, + evidence_data={}, + evidence_display=[], + type=handler.detector.group_type, + detection_time=datetime.now(timezone.utc), + level="error", + culprit="Some culprit", + initial_issue_priority=new_status.value, + ) + event_data = { + "timestamp": occurrence.detection_time, + "project_id": occurrence.project_id, + "event_id": occurrence.event_id, + "platform": "python", + "received": occurrence.detection_time, + "tags": {}, + } + return occurrence, event_data class TestKeyBuilders(unittest.TestCase): @@ -211,49 +324,7 @@ def status_change_comparator(self: StatusChangeMessage, other: StatusChangeMessa ) -class StatefulDetectorHandlerTestMixin(TestCase): - __test__ = Abstract(__module__, __qualname__) - - def setUp(self): - super().setUp() - self.sm_comp_patcher = mock.patch.object( - StatusChangeMessage, "__eq__", status_change_comparator - ) - self.sm_comp_patcher.__enter__() - - def tearDown(self): - super().tearDown() - self.sm_comp_patcher.__exit__(None, None, None) - - def build_handler(self, detector: Detector | None = None) -> MockDetectorStateHandler: - if detector is None: - detector = self.create_detector( - workflow_condition_group=self.create_data_condition_group() - ) - self.create_data_condition( - condition="gt", - comparison=5, - condition_result=DetectorPriorityLevel.HIGH, - condition_group=detector.workflow_condition_group, - ) - return MockDetectorStateHandler(detector) - - def assert_updates(self, handler, group_key, dedupe_value, counter_updates, active, priority): - if dedupe_value is not None: - assert handler.dedupe_updates.get(group_key) == dedupe_value - else: - assert group_key not in handler.dedupe_updates - if counter_updates is not None: - assert handler.counter_updates.get(group_key) == counter_updates - else: - assert group_key not in handler.counter_updates - if active is not None or priority is not None: - assert handler.state_updates.get(group_key) == (active, priority) - else: - assert group_key not in handler.state_updates - - -class TestGetStateData(StatefulDetectorHandlerTestMixin): +class TestGetStateData(BaseDetectorHandlerTest): def test_new(self): handler = self.build_handler() key = "test_key" @@ -305,7 +376,7 @@ def test_multi(self): } -class TestCommitStateUpdateData(StatefulDetectorHandlerTestMixin): +class TestCommitStateUpdateData(BaseDetectorHandlerTest): def test(self): handler = self.build_handler() redis = get_redis_client() @@ -348,35 +419,46 @@ def test(self): assert redis.get(counter_key_2) == "20" -class TestEvaluate(StatefulDetectorHandlerTestMixin): +@freeze_time() +class TestEvaluate(BaseDetectorHandlerTest): def test(self): handler = self.build_handler() - assert handler.evaluate(DataPacket("1", {"dedupe": 1})) == [] - assert handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 6}})) == [ - DetectorEvaluationResult( + assert handler.evaluate(DataPacket("1", {"dedupe": 1})) == {} + occurrence, event_data = build_mock_occurrence_and_event( + handler, "val1", 6, PriorityLevel.HIGH + ) + assert handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 6}})) == { + "val1": DetectorEvaluationResult( group_key="val1", is_active=True, priority=DetectorPriorityLevel.HIGH, + result=occurrence, + event_data=event_data, ) - ] + } self.assert_updates(handler, "val1", 2, {}, True, DetectorPriorityLevel.HIGH) def test_above_below_threshold(self): handler = self.build_handler() - assert handler.evaluate(DataPacket("1", {"dedupe": 1, "group_vals": {"val1": 0}})) == [] + assert handler.evaluate(DataPacket("1", {"dedupe": 1, "group_vals": {"val1": 0}})) == {} handler.commit_state_updates() - assert handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 6}})) == [ - DetectorEvaluationResult( + occurrence, event_data = build_mock_occurrence_and_event( + handler, "val1", 6, PriorityLevel.HIGH + ) + assert handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 6}})) == { + "val1": DetectorEvaluationResult( group_key="val1", is_active=True, priority=DetectorPriorityLevel.HIGH, + result=occurrence, + event_data=event_data, ) - ] + } handler.commit_state_updates() - assert handler.evaluate(DataPacket("1", {"dedupe": 3, "group_vals": {"val1": 6}})) == [] + assert handler.evaluate(DataPacket("1", {"dedupe": 3, "group_vals": {"val1": 6}})) == {} handler.commit_state_updates() - assert handler.evaluate(DataPacket("1", {"dedupe": 4, "group_vals": {"val1": 0}})) == [ - DetectorEvaluationResult( + assert handler.evaluate(DataPacket("1", {"dedupe": 4, "group_vals": {"val1": 0}})) == { + "val1": DetectorEvaluationResult( group_key="val1", is_active=False, result=StatusChangeMessage( @@ -387,14 +469,14 @@ def test_above_below_threshold(self): ), priority=DetectorPriorityLevel.OK, ) - ] + } def test_no_condition_group(self): detector = self.create_detector() handler = MockDetectorStateHandler(detector) with mock.patch("sentry.workflow_engine.processors.detector.metrics") as mock_metrics: assert ( - handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 100}})) == [] + handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 100}})) == {} ) mock_metrics.incr.assert_called_once_with( "workflow_engine.detector.skipping_invalid_condition_group" @@ -404,46 +486,62 @@ def test_no_condition_group(self): def test_results_on_change(self): handler = self.build_handler() result = handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 100}})) - assert result == [ - DetectorEvaluationResult( + occurrence, event_data = build_mock_occurrence_and_event( + handler, "val1", 6, PriorityLevel.HIGH + ) + assert result == { + "val1": DetectorEvaluationResult( group_key="val1", is_active=True, priority=DetectorPriorityLevel.HIGH, + result=occurrence, + event_data=event_data, ) - ] + } self.assert_updates(handler, "val1", 2, {}, True, DetectorPriorityLevel.HIGH) handler.commit_state_updates() # This detector is already active, so no status change occurred. Should be no result - assert handler.evaluate(DataPacket("1", {"dedupe": 3, "group_vals": {"val1": 200}})) == [] + assert handler.evaluate(DataPacket("1", {"dedupe": 3, "group_vals": {"val1": 200}})) == {} def test_dedupe(self): handler = self.build_handler() result = handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 8}})) - assert result == [ - DetectorEvaluationResult( + occurrence, event_data = build_mock_occurrence_and_event( + handler, "val1", 6, PriorityLevel.HIGH + ) + assert result == { + "val1": DetectorEvaluationResult( group_key="val1", is_active=True, priority=DetectorPriorityLevel.HIGH, + result=occurrence, + event_data=event_data, ) - ] + } self.assert_updates(handler, "val1", 2, {}, True, DetectorPriorityLevel.HIGH) handler.commit_state_updates() with mock.patch("sentry.workflow_engine.processors.detector.metrics") as mock_metrics: - assert handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 0}})) == [] + assert handler.evaluate(DataPacket("1", {"dedupe": 2, "group_vals": {"val1": 0}})) == {} mock_metrics.incr.assert_called_once_with( "workflow_engine.detector.skipping_already_processed_update" ) self.assert_updates(handler, "val1", None, None, None, None) -class TestEvaluateGroupKeyValue(StatefulDetectorHandlerTestMixin): +@freeze_time() +class TestEvaluateGroupKeyValue(BaseDetectorHandlerTest): def test_dedupe(self): handler = self.build_handler() with mock.patch("sentry.workflow_engine.processors.detector.metrics") as mock_metrics: + occurrence, event_data = build_mock_occurrence_and_event( + handler, "val1", 6, PriorityLevel.HIGH + ) expected_result = DetectorEvaluationResult( "group_key", True, DetectorPriorityLevel.HIGH, + result=occurrence, + event_data=event_data, ) assert ( handler.evaluate_group_key_value(