Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions src/sentry/incidents/grouptype.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,12 @@
from sentry.issues.grouptype import GroupCategory, GroupType
from sentry.ratelimits.sliding_windows import Quota
from sentry.types.group import PriorityLevel
from sentry.workflow_engine.models import DataPacket
from sentry.workflow_engine.processors.detector import DetectorEvaluationResult, DetectorHandler
from sentry.workflow_engine.processors.detector import StatefulDetectorHandler


# TODO: This will be a stateful detector when we build that abstraction
class MetricAlertDetectorHandler(DetectorHandler[QuerySubscriptionUpdate]):
def evaluate(
self, data_packet: DataPacket[QuerySubscriptionUpdate]
) -> list[DetectorEvaluationResult]:
# TODO: Implement
return []
class MetricAlertDetectorHandler(StatefulDetectorHandler[QuerySubscriptionUpdate]):
pass
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Broken Inheritance: Abstract Methods Missing

MetricAlertDetectorHandler now extends StatefulDetectorHandler but doesn't implement the required abstract methods: counter_names, get_dedupe_value, get_group_key_values, and build_occurrence_and_event_data. This makes the class non-instantiable and will cause a TypeError when attempting to create instances.

Fix in Cursor Fix in Web



# Example GroupType and detector handler for metric alerts. We don't create these issues yet, but we'll use something
Expand Down
8 changes: 7 additions & 1 deletion src/sentry/workflow_engine/models/detector.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import builtins
import logging
from typing import TYPE_CHECKING

Expand All @@ -9,6 +10,7 @@
from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model
from sentry.issues import grouptype
from sentry.issues.grouptype import GroupType
from sentry.models.owner_base import OwnerModel

if TYPE_CHECKING:
Expand Down Expand Up @@ -53,9 +55,13 @@ def project_id(self):
# XXX: Temporary property until we add `project_id` to the model.
return 1

@property
def group_type(self) -> builtins.type[GroupType] | None:
return grouptype.registry.get_by_slug(self.type)

@property
def detector_handler(self) -> DetectorHandler | None:
group_type = grouptype.registry.get_by_slug(self.type)
group_type = self.group_type
if not group_type:
logger.error(
"No registered grouptype for detector",
Expand Down
45 changes: 22 additions & 23 deletions src/sentry/workflow_engine/processors/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
from sentry.issues.status_change_message import StatusChangeMessage
from sentry.models.group import GroupStatus
from sentry.types.group import PriorityLevel
from sentry.utils import metrics, redis
from sentry.utils.function_cache import cache_func_for_models
from sentry.utils.iterators import chunked
Expand Down Expand Up @@ -45,7 +46,7 @@ class DetectorEvaluationResult:

def process_detectors(
data_packet: DataPacket, detectors: list[Detector]
) -> list[tuple[Detector, list[DetectorEvaluationResult]]]:
) -> list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]]:
results = []

for detector in detectors:
Expand All @@ -55,25 +56,11 @@ def process_detectors(
continue

detector_results = handler.evaluate(data_packet)
detector_group_keys = set()

for result in detector_results:
if result.group_key in detector_group_keys:
# This shouldn't happen - log an error and continue on, but we should investigate this.
logger.error(
"Duplicate detector state group keys found",
extra={
"detector_id": detector.id,
"group_key": result.group_key,
},
)
continue

for result in detector_results.values():
if result.result is not None:
create_issue_occurrence_from_result(result)

detector_group_keys.add(result.group_key)

if detector_results:
results.append((detector, detector_results))

Expand Down Expand Up @@ -136,7 +123,9 @@ def __init__(self, detector: Detector):
self.conditions = []

@abc.abstractmethod
def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult]:
def evaluate(
self, data_packet: DataPacket[T]
) -> dict[DetectorGroupKey, DetectorEvaluationResult]:
pass

def commit_state_updates(self):
Expand Down Expand Up @@ -174,6 +163,12 @@ def get_group_key_values(self, data_packet: DataPacket[T]) -> dict[str, int]:
"""
pass

@abc.abstractmethod
def build_occurrence_and_event_data(
self, group_key: DetectorGroupKey, value: int, new_status: PriorityLevel
) -> tuple[IssueOccurrence, dict[str, Any]]:
pass

def build_fingerprint(self, group_key) -> list[str]:
"""
Builds a fingerprint to uniquely identify a detected issue
Expand Down Expand Up @@ -228,7 +223,9 @@ def get_state_data(
)
return results

def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult]:
def evaluate(
self, data_packet: DataPacket[T]
) -> dict[DetectorGroupKey, DetectorEvaluationResult]:
"""
Evaluates a given data packet and returns a list of `DetectorEvaluationResult`.
There will be one result for each group key result in the packet, unless the
Expand All @@ -237,13 +234,13 @@ def evaluate(self, data_packet: DataPacket[T]) -> list[DetectorEvaluationResult]
dedupe_value = self.get_dedupe_value(data_packet)
group_values = self.get_group_key_values(data_packet)
all_state_data = self.get_state_data(list(group_values.keys()))
results = []
results = {}
for group_key, group_value in group_values.items():
result = self.evaluate_group_key_value(
group_key, group_value, all_state_data[group_key], dedupe_value
)
if result:
results.append(result)
results[result.group_key] = result
return results

def evaluate_group_key_value(
Expand Down Expand Up @@ -289,7 +286,7 @@ def evaluate_group_key_value(
is_active = new_status != DetectorPriorityLevel.OK
self.enqueue_state_update(group_key, is_active, new_status)
event_data = None
result = None
result: StatusChangeMessage | IssueOccurrence
if new_status == DetectorPriorityLevel.OK:
# If we've determined that we're now ok, we just want to resolve the issue
result = StatusChangeMessage(
Expand All @@ -298,8 +295,10 @@ def evaluate_group_key_value(
new_status=GroupStatus.RESOLVED,
new_substatus=None,
)

# TODO: Add hook here for generating occurrence
else:
result, event_data = self.build_occurrence_and_event_data(
group_key, value, PriorityLevel(new_status)
)
return DetectorEvaluationResult(
group_key=group_key,
is_active=is_active,
Expand Down
Loading