diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index e0ee3093baa450..713c8e38c4c769 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -192,7 +192,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==2.27.0 +sentry-sdk==2.28.0 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 16bc157de5f8a0..5f1da08ca62017 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -130,7 +130,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==2.27.0 +sentry-sdk==2.28.0 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 diff --git a/src/sentry/api/endpoints/organization_event_details.py b/src/sentry/api/endpoints/organization_event_details.py index f9045e09c68da1..eb888bb056d6ac 100644 --- a/src/sentry/api/endpoints/organization_event_details.py +++ b/src/sentry/api/endpoints/organization_event_details.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta from typing import Any -import sentry_sdk from rest_framework.request import Request from rest_framework.response import Response from snuba_sdk import Column, Condition, Function, Op @@ -23,6 +22,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer +from sentry.utils.sdk import set_measurement VALID_AVERAGE_COLUMNS = {"span.self_time", "span.duration"} @@ -39,7 +39,7 @@ def add_comparison_to_event(event, average_columns, request: Request): group_to_span_map[group].append(span) # Nothing to add comparisons to - sentry_sdk.set_measurement("query.groups", len(group_to_span_map)) + set_measurement("query.groups", len(group_to_span_map)) if len(group_to_span_map) == 0: return @@ -77,7 +77,7 @@ def add_comparison_to_event(event, average_columns, request: Request): ), ) ) - sentry_sdk.set_measurement("query.groups_found", len(result["data"])) + set_measurement("query.groups_found", len(result["data"])) for row in result["data"]: group = row["span.group"] for span in group_to_span_map[group]: diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index 7935b46bffe737..ab472235cfff77 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -761,7 +761,7 @@ def build_span_query(trace_id: str, spans_params: SnubaParams, query_spans: list # Performance improvement, snuba's parser is extremely slow when we're sending thousands of # span_ids here, using a `splitByChar` means that snuba will not parse the giant list of spans span_minimum = options.get("performance.traces.span_query_minimum_spans") - sentry_sdk.set_measurement("trace_view.spans.span_minimum", span_minimum) + set_measurement("trace_view.spans.span_minimum", span_minimum) sentry_sdk.set_tag("trace_view.split_by_char.optimization", len(query_spans) > span_minimum) if len(query_spans) > span_minimum: # TODO: because we're not doing an IN on a list of literals, snuba will not optimize the query with the HexInt @@ -813,14 +813,14 @@ def augment_transactions_with_spans( projects.add(error["project.id"]) ts_params = find_timestamp_params(transactions) time_buffer = options.get("performance.traces.span_query_timebuffer_hours") - sentry_sdk.set_measurement("trace_view.spans.time_buffer", time_buffer) + set_measurement("trace_view.spans.time_buffer", time_buffer) if ts_params["min"]: params.start = ts_params["min"] - timedelta(hours=time_buffer) if ts_params["max"]: params.end = ts_params["max"] + timedelta(hours=time_buffer) if ts_params["max"] and ts_params["min"]: - sentry_sdk.set_measurement( + set_measurement( "trace_view.trace_duration", (ts_params["max"] - ts_params["min"]).total_seconds() ) sentry_sdk.set_tag("trace_view.missing_timestamp_constraints", False) @@ -899,7 +899,7 @@ def augment_transactions_with_spans( total_chunks = 3 else: total_chunks = 4 - sentry_sdk.set_measurement("trace_view.span_query.total_chunks", total_chunks) + set_measurement("trace_view.span_query.total_chunks", total_chunks) chunks = chunked(list_spans, (len(list_spans) // total_chunks) + 1) queries = [build_span_query(trace_id, spans_params, chunk) for chunk in chunks] results = bulk_snuba_queries( diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index f58aa1498e2a1e..56362c975cacc2 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -44,7 +44,7 @@ from sentry.silo.base import SiloMode from sentry.types.region import get_local_region from sentry.utils.dates import parse_stats_period -from sentry.utils.sdk import capture_exception, merge_context_into_scope +from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_measurement from sentry.utils.snuba import ( DatasetSelectionError, QueryConnectionFailed, @@ -454,7 +454,7 @@ def update_snuba_params_with_timestamp( # While possible, the majority of traces shouldn't take more than a week # Starting with 3d for now, but potentially something we can increase if this becomes a problem time_buffer = options.get("performance.traces.transaction_query_timebuffer_days") - sentry_sdk.set_measurement("trace_view.transactions.time_buffer", time_buffer) + set_measurement("trace_view.transactions.time_buffer", time_buffer) example_start = example_timestamp - timedelta(days=time_buffer) example_end = example_timestamp + timedelta(days=time_buffer) # If timestamp is being passed it should always overwrite the statsperiod or start & end diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index ee3612079a952c..32f3ce0853db87 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -9,6 +9,7 @@ from sentry.ingest.transaction_clusterer.rule_validator import RuleValidator from sentry.models.project import Project from sentry.utils import metrics +from sentry.utils.sdk import set_measurement from .base import ReplacementRule @@ -145,7 +146,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet: sorted_rules = [rule for rule in sorted_rules if rule[1] >= last_seen_deadline] if self.MERGE_MAX_RULES < len(rules): - sentry_sdk.set_measurement("discarded_rules", len(rules) - self.MERGE_MAX_RULES) + set_measurement("discarded_rules", len(rules) - self.MERGE_MAX_RULES) sentry_sdk.Scope.get_isolation_scope().set_context( "clustering_rules_max", { diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py index f6e32329b9135f..c4b4ff9d36a10e 100644 --- a/src/sentry/models/release.py +++ b/src/sentry/models/release.py @@ -45,6 +45,7 @@ from sentry.utils.db import atomic_transaction from sentry.utils.hashlib import hash_values, md5_text from sentry.utils.numbers import validate_bigint +from sentry.utils.sdk import set_measurement logger = logging.getLogger(__name__) @@ -644,7 +645,7 @@ def set_commits(self, commit_list): This will clear any existing commit log and replace it with the given commits. """ - sentry_sdk.set_measurement("release.set_commits", len(commit_list)) + set_measurement("release.set_commits", len(commit_list)) from sentry.models.releases.set_commits import set_commits set_commits(self, commit_list) diff --git a/src/sentry/profiles/utils.py b/src/sentry/profiles/utils.py index 5e30d7568e8e38..50daa02e0d8767 100644 --- a/src/sentry/profiles/utils.py +++ b/src/sentry/profiles/utils.py @@ -114,7 +114,7 @@ def get_from_profiling_service( with sentry_sdk.start_span(op="json.dumps"): data = json.dumps(json_data).encode("utf-8") - set_measurement("payload.size", len(data), unit="byte") + set_measurement("payload.size", len(data)) if metric: metric_name, metric_tags = metric metrics.distribution(metric_name, len(data), tags=metric_tags) diff --git a/src/sentry/snuba/rpc_dataset_common.py b/src/sentry/snuba/rpc_dataset_common.py index a0f72f1df24ea7..8356eea764151d 100644 --- a/src/sentry/snuba/rpc_dataset_common.py +++ b/src/sentry/snuba/rpc_dataset_common.py @@ -31,6 +31,7 @@ from sentry.search.events.fields import get_function_alias from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaData, SnubaParams from sentry.utils import json, snuba_rpc +from sentry.utils.sdk import set_measurement from sentry.utils.snuba import process_value logger = logging.getLogger("sentry.snuba.spans_rpc") @@ -313,9 +314,7 @@ def process_table_response( assert len(column_value.results) == len(column_value.reliabilities), Exception( "Length of rpc results do not match length of rpc reliabilities" ) - sentry_sdk.set_measurement( - f"SearchResolver.result_size.{attribute}", len(column_value.results) - ) + set_measurement(f"SearchResolver.result_size.{attribute}", len(column_value.results)) while len(final_data) < len(column_value.results): final_data.append({}) @@ -333,7 +332,7 @@ def process_table_response( final_confidence[index][attribute] = CONFIDENCES.get( column_value.reliabilities[index], None ) - sentry_sdk.set_measurement("SearchResolver.result_size.final_data", len(final_data)) + set_measurement("SearchResolver.result_size.final_data", len(final_data)) if debug: final_meta["query"] = json.loads(MessageToJson(table_request.rpc_request)) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 278cffd88af1f9..df3a127d6bdd32 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -695,11 +695,11 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str: ) -def set_measurement(measurement_name, value, unit=None): +def set_measurement(measurement_name, value): try: transaction = sentry_sdk.Scope.get_current_scope().transaction if transaction is not None: - transaction.set_measurement(measurement_name, value, unit) + transaction.set_data(measurement_name, value) except Exception: pass