Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions src/sentry/explore/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,20 @@ def is_logs_enabled(organization: Organization, actor: User | None = None) -> bo
This replaces individual feature flag checks for consolidated ourlogs features.
"""
return features.has("organizations:ourlogs-enabled", organization, actor=actor)


def is_trace_metrics_enabled(organization: Organization, actor: User | None = None) -> bool:
"""
Check if trace metrics are enabled for the given organization.
This replaces individual feature flag checks for consolidated tracemetrics features.
"""
return features.has("organizations:tracemetrics-enabled", organization, actor=actor)


def is_trace_metrics_alerts_enabled(organization: Organization, actor: User | None = None) -> bool:
"""
Check if trace metrics alerts are enabled for the given organization.
"""
return features.has(
"organizations:tracemetrics-enabled", organization, actor=actor
) and features.has("organizations:tracemetrics-alerts", organization, actor=actor)
23 changes: 17 additions & 6 deletions src/sentry/incidents/endpoints/serializers/alert_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,14 +268,25 @@ def serialize(
obj.organization,
actor=user,
)
# Temporary: Translate aggregate back here from `tags[sentry:user]` to `user` for the frontend.
aggregate = translate_aggregate_field(
obj.snuba_query.aggregate,
reverse=True,
allow_mri=allow_mri,
allow_eap=obj.snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value,

# Trace metrics have complicated aggregated validation that require EAP SearchResolver and do NOT need translation as they do not have tags in the old format (eg. tags[sentry:user))
is_trace_metric = (
obj.snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value
and obj.snuba_query.event_types
and SnubaQueryEventType.EventType.TRACE_ITEM_METRIC in obj.snuba_query.event_types
)

# Temporary: Translate aggregate back here from `tags[sentry:user]` to `user` for the frontend.
if not is_trace_metric:
aggregate = translate_aggregate_field(
obj.snuba_query.aggregate,
reverse=True,
allow_mri=allow_mri,
allow_eap=obj.snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value,
)
else:
aggregate = obj.snuba_query.aggregate

# Apply transparency: Convert upsampled_count() back to count() for user-facing responses
# This hides the internal upsampling implementation from users
if aggregate == "upsampled_count()":
Expand Down
18 changes: 18 additions & 0 deletions src/sentry/incidents/metric_issue_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from sentry.incidents.logic import enable_disable_subscriptions
from sentry.incidents.models.alert_rule import AlertRuleDetectionType
from sentry.relay.config.metric_extraction import on_demand_metrics_feature_flags
from sentry.search.eap.trace_metrics.validator import validate_trace_metrics_aggregate
from sentry.seer.anomaly_detection.delete_rule import delete_data_in_seer_for_detector
from sentry.seer.anomaly_detection.store_data_workflow_engine import (
send_new_detector_data,
Expand Down Expand Up @@ -194,6 +195,20 @@ class MetricIssueDetectorValidator(BaseDetectorTypeValidator):
)
condition_group = MetricIssueConditionGroupValidator(required=True)

def validate_eap_rule(self, attrs):
"""
Validate EAP rule data.
"""
data_sources = attrs.get("data_sources", [])
for data_source in data_sources:
event_types = data_source.get("event_types", [])
if (
data_source.get("dataset") == Dataset.EventsAnalyticsPlatform
and SnubaQueryEventType.EventType.TRACE_ITEM_METRIC in event_types
):
aggregate = data_source.get("aggregate")
validate_trace_metrics_aggregate(aggregate)

def validate(self, attrs):
attrs = super().validate(attrs)

Expand All @@ -202,6 +217,9 @@ def validate(self, attrs):
if len(conditions) > 3:
raise serializers.ValidationError("Too many conditions")

if "data_sources" in attrs:
self.validate_eap_rule(attrs)

return attrs

def _validate_transaction_dataset_deprecation(self, dataset: Dataset) -> None:
Expand Down
15 changes: 14 additions & 1 deletion src/sentry/incidents/serializers/alert_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@
AlertRuleTrigger,
)
from sentry.incidents.utils.subscription_limits import get_max_metric_alert_subscriptions
from sentry.search.eap.trace_metrics.validator import validate_trace_metrics_aggregate
from sentry.snuba.dataset import Dataset
from sentry.snuba.models import QuerySubscription
from sentry.snuba.models import QuerySubscription, SnubaQueryEventType
from sentry.snuba.snuba_query_validator import SnubaQueryValidator
from sentry.workflow_engine.migration_helpers.alert_rule import (
dual_delete_migrated_alert_rule_trigger,
Expand Down Expand Up @@ -140,6 +141,16 @@ def validate_aggregate(self, aggregate):
)
return aggregate

def validate_eap_rule(self, data):
"""
Validate EAP rule data.
"""
event_types = data.get("event_types", [])

if SnubaQueryEventType.EventType.TRACE_ITEM_METRIC in event_types:
aggregate = data.get("aggregate")
validate_trace_metrics_aggregate(aggregate)

def validate(self, data):
"""
Performs validation on an alert rule's data.
Expand All @@ -149,6 +160,8 @@ def validate(self, data):
> or < the value depends on threshold type).
"""
data = super().validate(data)
if data.get("dataset") == Dataset.EventsAnalyticsPlatform:
self.validate_eap_rule(data)

triggers = data.get("triggers", [])
if not triggers:
Expand Down
71 changes: 71 additions & 0 deletions src/sentry/search/eap/trace_metrics/validator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import logging
from datetime import datetime, timedelta, timezone

from rest_framework import serializers

from sentry.exceptions import InvalidSearchQuery
from sentry.search.eap.resolver import SearchResolver
from sentry.search.eap.trace_metrics.config import TraceMetricsSearchResolverConfig
from sentry.search.eap.trace_metrics.definitions import TRACE_METRICS_DEFINITIONS
from sentry.search.eap.trace_metrics.types import TraceMetric
from sentry.search.events.types import SnubaParams

logger = logging.getLogger(__name__)


def extract_trace_metric_from_aggregate(aggregate: str) -> TraceMetric | None:
"""
Extract trace metric information from an aggregate string using SearchResolver.
Args:
aggregate: The aggregate function string
Returns:
TraceMetric | None: The extracted trace metric or None if no specific metric
Raises:
InvalidSearchQuery: If the aggregate is invalid
"""

now = datetime.now(tz=timezone.utc)
snuba_params = SnubaParams(
projects=[],
organization=None,
start=now - timedelta(hours=1),
end=now,
)

resolver = SearchResolver(
params=snuba_params,
config=TraceMetricsSearchResolverConfig(
metric=None
), # It's fine to not pass a metric here since that way of using metrics is deprecated.
definitions=TRACE_METRICS_DEFINITIONS,
)

resolved_function, _ = resolver.resolve_function(aggregate)

return getattr(resolved_function, "trace_metric", None)


def validate_trace_metrics_aggregate(aggregate: str) -> None:
"""
Validate a trace metrics aggregate using the SearchResolver.
Args:
aggregate: The aggregate function to validate
Raises:
serializers.ValidationError: If the aggregate is invalid
"""
try:
trace_metric = extract_trace_metric_from_aggregate(aggregate)
if trace_metric is None:
raise InvalidSearchQuery(
f"Trace metrics aggregate {aggregate} must specify metric name, type, and unit"
)
except InvalidSearchQuery as e:
logger.exception("Invalid trace metrics aggregate: %s %s", aggregate, e)
raise serializers.ValidationError(
{"aggregate": f"Invalid trace metrics aggregate: {aggregate}"}
)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Validation error loses specific error message details

Low Severity

When validate_trace_metrics_aggregate catches an InvalidSearchQuery exception, it discards the specific error message from e and replaces it with a generic message. This includes cases where trace_metric is None (which raises an InvalidSearchQuery with "must specify metric name, type, and unit") and errors from resolve_function. The original error is only logged but not returned to the user, making it difficult to understand why a trace metrics aggregate is invalid.

Fix in Cursor Fix in Web

28 changes: 24 additions & 4 deletions src/sentry/snuba/entity_subscription.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from sentry.models.environment import Environment
from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.search.eap.trace_metrics.config import TraceMetricsSearchResolverConfig
from sentry.search.eap.types import SearchResolverConfig
from sentry.search.events.builder.base import BaseQueryBuilder
from sentry.search.events.builder.discover import DiscoverQueryBuilder
Expand All @@ -35,6 +36,7 @@
from sentry.snuba.referrer import Referrer
from sentry.snuba.rpc_dataset_common import RPCBase
from sentry.snuba.spans_rpc import Spans
from sentry.snuba.trace_metrics import TraceMetrics
from sentry.utils import metrics

# TODO: If we want to support security events here we'll need a way to
Expand Down Expand Up @@ -296,6 +298,10 @@ def build_rpc_request(
params = {}

params["project_id"] = project_ids
is_trace_metric = (
self.event_types
and self.event_types[0] == SnubaQueryEventType.EventType.TRACE_ITEM_METRIC
)

query = apply_dataset_query_conditions(self.query_type, query, self.event_types)
if environment:
Expand All @@ -304,6 +310,8 @@ def build_rpc_request(
dataset_module: type[RPCBase]
if self.event_types and self.event_types[0] == SnubaQueryEventType.EventType.TRACE_ITEM_LOG:
dataset_module = OurLogs
elif is_trace_metric:
dataset_module = TraceMetrics
else:
dataset_module = Spans
now = datetime.now(tz=timezone.utc)
Expand All @@ -322,12 +330,24 @@ def build_rpc_request(
model_mode = ExtrapolationMode(self.extrapolation_mode)
proto_extrapolation_mode = MODEL_TO_PROTO_EXTRAPOLATION_MODE.get(model_mode)

search_resolver = dataset_module.get_resolver(
snuba_params,
SearchResolverConfig(
if is_trace_metric:
search_config: SearchResolverConfig = TraceMetricsSearchResolverConfig(
metric=None,
auto_fields=False,
use_aggregate_conditions=True,
disable_aggregate_extrapolation=False,
extrapolation_mode=proto_extrapolation_mode,
stable_timestamp_quantization=False,
)
else:
search_config = SearchResolverConfig(
stable_timestamp_quantization=False,
extrapolation_mode=proto_extrapolation_mode,
),
)

search_resolver = dataset_module.get_resolver(
snuba_params,
search_config,
)

rpc_request, _, _ = dataset_module.get_timeseries_query(
Expand Down
1 change: 1 addition & 0 deletions src/sentry/snuba/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ class EventType(Enum):
TRANSACTION = 2
TRACE_ITEM_SPAN = 3
TRACE_ITEM_LOG = 4
TRACE_ITEM_METRIC = 5

snuba_query = FlexibleForeignKey("sentry.SnubaQuery")
type = models.SmallIntegerField()
Expand Down
48 changes: 34 additions & 14 deletions src/sentry/snuba/snuba_query_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,15 @@
InvalidSearchQuery,
UnsupportedQuerySubscription,
)
from sentry.explore.utils import is_logs_enabled
from sentry.explore.utils import is_logs_enabled, is_trace_metrics_alerts_enabled
from sentry.incidents.logic import (
check_aggregate_column_support,
get_column_from_aggregate,
query_datasets_to_type,
translate_aggregate_field,
)
from sentry.incidents.utils.constants import INCIDENTS_SNUBA_SUBSCRIPTION_TYPE
from sentry.search.eap.trace_metrics.validator import validate_trace_metrics_aggregate
from sentry.snuba.dataset import Dataset
from sentry.snuba.entity_subscription import (
ENTITY_TIME_COLUMNS,
Expand Down Expand Up @@ -68,6 +69,7 @@
SnubaQueryEventType.EventType.TRANSACTION,
SnubaQueryEventType.EventType.TRACE_ITEM_LOG,
SnubaQueryEventType.EventType.TRACE_ITEM_SPAN,
SnubaQueryEventType.EventType.TRACE_ITEM_METRIC,
},
}

Expand Down Expand Up @@ -165,6 +167,13 @@ def validate_event_types(self, value: Sequence[str]) -> list[SnubaQueryEventType
) and any([v for v in validated if v == SnubaQueryEventType.EventType.TRACE_ITEM_LOG]):
raise serializers.ValidationError("You do not have access to the log alerts feature.")

if not is_trace_metrics_alerts_enabled(
self.context["organization"], actor=self.context.get("user", None)
) and any([v for v in validated if v == SnubaQueryEventType.EventType.TRACE_ITEM_METRIC]):
raise serializers.ValidationError(
"You do not have access to the metrics alerts feature."
)

return validated

def validate_extrapolation_mode(self, extrapolation_mode: str) -> ExtrapolationMode | None:
Expand Down Expand Up @@ -207,6 +216,7 @@ def validate(self, data):
def _validate_aggregate(self, data):
dataset = data.setdefault("dataset", Dataset.Events)
aggregate = data.get("aggregate")
event_types = data.get("event_types", [])
allow_mri = features.has(
"organizations:custom-metrics",
self.context["organization"],
Expand All @@ -217,22 +227,32 @@ def _validate_aggregate(self, data):
actor=self.context.get("user", None),
)
allow_eap = dataset == Dataset.EventsAnalyticsPlatform
try:
if not check_aggregate_column_support(
aggregate,
allow_mri=allow_mri,
allow_eap=allow_eap,
):
raise serializers.ValidationError(
{"aggregate": _("Invalid Metric: We do not currently support this field.")}
)
except InvalidSearchQuery as e:
raise serializers.ValidationError({"aggregate": _(f"Invalid Metric: {e}")})

data["aggregate"] = translate_aggregate_field(
aggregate, allow_mri=allow_mri, allow_eap=allow_eap
# Check if this is a trace metrics query
is_trace_metrics = (
dataset == Dataset.EventsAnalyticsPlatform
and event_types
and SnubaQueryEventType.EventType.TRACE_ITEM_METRIC in event_types
)

if is_trace_metrics:
validate_trace_metrics_aggregate(aggregate)
else:
try:
if not check_aggregate_column_support(
aggregate,
allow_mri=allow_mri,
allow_eap=allow_eap,
):
raise serializers.ValidationError(
{"aggregate": _("Invalid Metric: We do not currently support this field.")}
)
except InvalidSearchQuery as e:
raise serializers.ValidationError({"aggregate": _(f"Invalid Metric: {e}")})
data["aggregate"] = translate_aggregate_field(
aggregate, allow_mri=allow_mri, allow_eap=allow_eap
)

def _validate_query(self, data):
dataset = data.setdefault("dataset", Dataset.Events)

Expand Down
Loading
Loading