From f5010eee623957e6eb52e403c8e3710759f117d6 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 20 May 2025 10:43:36 +0200 Subject: [PATCH 01/66] Update requirements --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 8 +++++++- requirements-frozen.txt | 9 ++++++++- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 519060224af0a7..6b9b93a79936bc 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -71,7 +71,7 @@ sentry-ophio>=1.1.3 sentry-protos==0.2.0 sentry-redis-tools>=0.5.0 sentry-relay>=0.9.9 -sentry-sdk[http2]>=2.25.1 +sentry-sdk[http2]==3.0.0a1 slack-sdk>=3.27.2 snuba-sdk>=3.0.43 simplejson>=3.17.6 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 8937819e48ee8f..3b392a4a59ddc7 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -37,6 +37,7 @@ cryptography==44.0.1 cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 +deprecated==1.2.18 devservices==1.1.5 distlib==0.3.8 distro==1.8.0 @@ -82,6 +83,7 @@ httpx==0.25.2 hyperframe==6.1.0 identify==2.6.1 idna==3.7 +importlib-metadata==8.6.1 inflection==0.5.1 iniconfig==1.1.1 iso3166==2.1.1 @@ -114,6 +116,9 @@ openapi-core==0.18.2 openapi-pydantic==0.4.0 openapi-schema-validator==0.6.2 openapi-spec-validator==0.7.1 +opentelemetry-api==1.33.1 +opentelemetry-sdk==1.33.1 +opentelemetry-semantic-conventions==0.54b1 orjson==3.10.10 outcome==1.2.0 packaging==24.1 @@ -192,7 +197,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==2.27.0 +sentry-sdk==3.0.0a1 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 @@ -248,6 +253,7 @@ wheel==0.38.4 wrapt==1.17.0 wsproto==1.1.0 xmlsec==1.3.14 +zipp==3.21.0 zstandard==0.18.0 # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 6e832e6bb52037..c79c2a8e28d112 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -32,6 +32,7 @@ cryptography==44.0.1 cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 +deprecated==1.2.18 distro==1.8.0 django==5.2.1 django-crispy-forms==1.14.0 @@ -67,6 +68,7 @@ httpcore==1.0.2 httpx==0.25.2 hyperframe==6.1.0 idna==3.7 +importlib-metadata==8.6.1 inflection==0.5.1 iso3166==2.1.1 isodate==0.6.1 @@ -82,6 +84,9 @@ mmh3==4.0.0 msgpack==1.1.0 oauthlib==3.1.0 openai==1.3.5 +opentelemetry-api==1.33.1 +opentelemetry-sdk==1.33.1 +opentelemetry-semantic-conventions==0.54b1 orjson==3.10.10 packaging==24.1 parsimonious==0.10.0 @@ -130,7 +135,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==2.27.0 +sentry-sdk==3.0.0a1 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 @@ -156,7 +161,9 @@ urllib3==2.2.2 vine==5.1.0 vroomrs==0.1.4 wcwidth==0.2.10 +wrapt==1.17.2 xmlsec==1.3.14 +zipp==3.21.0 zstandard==0.18.0 # The following packages are considered to be unsafe in a requirements file: From 109670e3d07a0718e06f7ae12f97fa8e3a6555c1 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 10:13:56 +0200 Subject: [PATCH 02/66] Remove propagate_hub because it is not needed because its the default behavior now --- src/sentry/utils/sdk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 278cffd88af1f9..f74062164c5d7a 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -486,7 +486,7 @@ def flush( LoggingIntegration(event_level=None, sentry_logs_level=logging.INFO), RustInfoIntegration(), RedisIntegration(), - ThreadingIntegration(propagate_hub=True), + ThreadingIntegration(), ], **sdk_options, ) From d1c81fb8607f63ccf8046dcf51f588768d1b1e9b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 10:14:22 +0200 Subject: [PATCH 03/66] Removed metrics_noop because there are no metrics anymore and thus not needed --- src/sentry/metrics/minimetrics.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/sentry/metrics/minimetrics.py b/src/sentry/metrics/minimetrics.py index 446b878d002bbb..e83ed2673ab41d 100644 --- a/src/sentry/metrics/minimetrics.py +++ b/src/sentry/metrics/minimetrics.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta, timezone import sentry_sdk -from sentry_sdk.metrics import metrics_noop from sentry_sdk.tracing import Span from sentry.metrics.base import MetricsBackend, Tags @@ -14,7 +13,6 @@ def _attach_tags(span: Span, tags: Tags | None) -> None: span.set_data(tag_key, tag_value) -@metrics_noop def _set_metric_on_span(key: str, value: float | int, op: str, tags: Tags | None = None) -> None: span_or_tx = sentry_sdk.get_current_span() if span_or_tx is None: From 2a204f41ed475fab24f19c7e4879ccc0e60382a3 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 11:41:27 +0200 Subject: [PATCH 04/66] Make tests run locally until pytest-sentry is 3.0 compatible --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index ff0410eb556ab3..9dbf8aea58269a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,10 @@ filterwarnings = [ # pytest has not yet implemented the replacement for this yet "ignore:The --looponfail command line argument.*", + + # TODO-anton: just for testing locally, remove before merging + "ignore::DeprecationWarning:setuptools._distutils.version", + "ignore::DeprecationWarning:redis.connection" ] looponfailroots = ["src", "tests"] From f3d56a1e640420046dd082fb952588133fd05487 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 11:48:15 +0200 Subject: [PATCH 05/66] Replace .set_data with .set_attribute --- src/sentry/api/base.py | 4 +++- src/sentry/api/bases/organization.py | 4 ++-- .../api/endpoints/group_current_release.py | 4 ++-- .../endpoints/organization_events_facets.py | 2 +- .../organization_events_facets_performance.py | 8 ++++---- .../api/endpoints/organization_events_meta.py | 2 +- .../api/endpoints/organization_events_stats.py | 2 +- .../organization_events_timeseries.py | 2 +- ...ation_on_demand_metrics_estimation_stats.py | 2 +- src/sentry/api/endpoints/organization_users.py | 4 ++-- src/sentry/api/serializers/base.py | 2 +- src/sentry/api/serializers/models/project.py | 2 +- src/sentry/auth/access.py | 8 ++++---- src/sentry/debug_files/upload.py | 2 +- .../rules/helpers/latest_releases.py | 4 ++-- src/sentry/features/manager.py | 8 ++++---- src/sentry/hybridcloud/models/outbox.py | 4 ++-- .../ingest/transaction_clusterer/tasks.py | 2 +- .../endpoints/organization_issues_count.py | 2 +- src/sentry/issues/grouptype.py | 2 +- src/sentry/metrics/minimetrics.py | 4 ++-- src/sentry/models/project.py | 4 ++-- src/sentry/relay/config/metric_extraction.py | 2 +- src/sentry/reprocessing2.py | 2 +- src/sentry/search/events/builder/base.py | 2 +- src/sentry/search/snuba/executors.py | 8 ++++---- src/sentry/snuba/discover.py | 4 ++-- src/sentry/snuba/errors.py | 2 +- src/sentry/snuba/functions.py | 2 +- src/sentry/snuba/metrics/fields/histogram.py | 4 ++-- .../snuba/query_subscriptions/consumer.py | 18 +++++++++--------- src/sentry/snuba/spans_indexed.py | 2 +- src/sentry/snuba/tasks.py | 4 ++-- src/sentry/stacktraces/processing.py | 10 +++++----- src/sentry/tagstore/snuba/backend.py | 12 ++++++------ src/sentry/tasks/store.py | 4 ++-- src/sentry/tasks/symbolication.py | 2 +- src/sentry/taskworker/registry.py | 6 +++--- src/sentry/taskworker/workerchild.py | 12 ++++++------ src/sentry/tempest/tasks.py | 4 ++-- src/sentry/utils/db.py | 2 +- src/sentry/utils/pagination_factory.py | 2 +- src/sentry/utils/sdk.py | 2 +- src/sentry/utils/snuba.py | 2 +- src/sentry/utils/snuba_rpc.py | 2 +- 45 files changed, 95 insertions(+), 93 deletions(-) diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 18d1e1c00ac94c..cc4cae9ec6c600 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -441,7 +441,9 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response: op="base.dispatch.sleep", name=type(self).__name__, ) as span: - span.set_data("SENTRY_API_RESPONSE_DELAY", settings.SENTRY_API_RESPONSE_DELAY) + span.set_attribute( + "SENTRY_API_RESPONSE_DELAY", settings.SENTRY_API_RESPONSE_DELAY + ) time.sleep(settings.SENTRY_API_RESPONSE_DELAY / 1000.0 - duration) # Only enforced in dev environment diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index 0b891cabb7a95f..f2d4594ddc098d 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -405,7 +405,7 @@ def get_projects( with sentry_sdk.start_span(op="fetch_organization_projects") as span: projects = list(qs) - span.set_data("Project Count", len(projects)) + span.set_attribute("Project Count", len(projects)) filter_by_membership = not bool(ids) and not bool(slugs) filtered_projects = self._filter_projects_by_permissions( @@ -431,7 +431,7 @@ def _filter_projects_by_permissions( include_all_accessible: bool = False, ) -> list[Project]: with sentry_sdk.start_span(op="apply_project_permissions") as span: - span.set_data("Project Count", len(projects)) + span.set_attribute("Project Count", len(projects)) if force_global_perms: span.set_tag("mode", "force_global_perms") return projects diff --git a/src/sentry/api/endpoints/group_current_release.py b/src/sentry/api/endpoints/group_current_release.py index e8c93360f08d83..ad2d89b3d3e546 100644 --- a/src/sentry/api/endpoints/group_current_release.py +++ b/src/sentry/api/endpoints/group_current_release.py @@ -65,8 +65,8 @@ def get(self, request: Request, group) -> Response: environments = get_environments(request, group.project.organization) with sentry_sdk.start_span(op="CurrentReleaseEndpoint.get.current_release") as span: - span.set_data("Environment Count", len(environments)) - span.set_data( + span.set_attribute("Environment Count", len(environments)) + span.set_attribute( "Raw Parameters", { "group.id": group.id, diff --git a/src/sentry/api/endpoints/organization_events_facets.py b/src/sentry/api/endpoints/organization_events_facets.py index 42637597b0633e..d1ea0444144e15 100644 --- a/src/sentry/api/endpoints/organization_events_facets.py +++ b/src/sentry/api/endpoints/organization_events_facets.py @@ -56,7 +56,7 @@ def data_fn(offset, limit): ) with sentry_sdk.start_span(op="discover.endpoint", name="populate_results") as span: - span.set_data("facet_count", len(facets or [])) + span.set_attribute("facet_count", len(facets or [])) resp: dict[str, _KeyTopValues] resp = defaultdict(lambda: {"key": "", "topValues": []}) for row in facets: diff --git a/src/sentry/api/endpoints/organization_events_facets_performance.py b/src/sentry/api/endpoints/organization_events_facets_performance.py index d2ed58692709eb..c1476d9c189ad5 100644 --- a/src/sentry/api/endpoints/organization_events_facets_performance.py +++ b/src/sentry/api/endpoints/organization_events_facets_performance.py @@ -262,7 +262,7 @@ def query_tag_data( Returns None if query was not successful which causes the endpoint to return early """ with sentry_sdk.start_span(op="discover.discover", name="facets.filter_transform") as span: - span.set_data("query", filter_query) + span.set_attribute("query", filter_query) tag_query = DiscoverQueryBuilder( dataset=Dataset.Discover, params={}, @@ -391,7 +391,7 @@ def query_facet_performance( tag_key_limit = limit if tag_key else 1 with sentry_sdk.start_span(op="discover.discover", name="facets.filter_transform") as span: - span.set_data("query", filter_query) + span.set_attribute("query", filter_query) tag_query = DiscoverQueryBuilder( dataset=Dataset.Discover, params={}, @@ -417,8 +417,8 @@ def query_facet_performance( ) with sentry_sdk.start_span(op="discover.discover", name="facets.aggregate_tags"): - span.set_data("sample_rate", sample_rate) - span.set_data("target_sample", target_sample) + span.set_attribute("sample_rate", sample_rate) + span.set_attribute("target_sample", target_sample) aggregate_comparison = transaction_aggregate * 1.005 if transaction_aggregate else 0 aggregate_column = Function("avg", [translated_aggregate_column], "aggregate") tag_query.where.append(excluded_tags) diff --git a/src/sentry/api/endpoints/organization_events_meta.py b/src/sentry/api/endpoints/organization_events_meta.py index cb11565982ec63..7943fea8d8adde 100644 --- a/src/sentry/api/endpoints/organization_events_meta.py +++ b/src/sentry/api/endpoints/organization_events_meta.py @@ -155,7 +155,7 @@ def get(self, request: Request, organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", name="serialize_results") as span: results = list(results_cursor) - span.set_data("result_length", len(results)) + span.set_attribute("result_length", len(results)) context = serialize( results, request.user, diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 0c0aaae1365901..f85bb2c8b4b1a8 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -190,7 +190,7 @@ def get(self, request: Request, organization: Organization) -> Response: query_source = self.get_request_source(request) with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: - span.set_data("organization", organization) + span.set_attribute("organization", organization) top_events = 0 diff --git a/src/sentry/api/endpoints/organization_events_timeseries.py b/src/sentry/api/endpoints/organization_events_timeseries.py index f5b36a2c6ed9d7..6cc50a96c01a3b 100644 --- a/src/sentry/api/endpoints/organization_events_timeseries.py +++ b/src/sentry/api/endpoints/organization_events_timeseries.py @@ -152,7 +152,7 @@ def get_comparison_delta(self, request: Request) -> timedelta | None: def get(self, request: Request, organization: Organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: - span.set_data("organization", organization) + span.set_attribute("organization", organization) top_events = self.get_top_events(request) comparison_delta = self.get_comparison_delta(request) diff --git a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py index cb2a9310a375e6..6250942232a019 100644 --- a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py +++ b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py @@ -67,7 +67,7 @@ def get(self, request: Request, organization: Organization) -> Response: return Response({"detail": "missing required parameter yAxis"}, status=400) with sentry_sdk.start_span(op="discover.metrics.endpoint", name="get_full_metrics") as span: - span.set_data("organization", organization) + span.set_attribute("organization", organization) try: # the discover stats diff --git a/src/sentry/api/endpoints/organization_users.py b/src/sentry/api/endpoints/organization_users.py index c37e81d046f539..29f351feb6dcfc 100644 --- a/src/sentry/api/endpoints/organization_users.py +++ b/src/sentry/api/endpoints/organization_users.py @@ -47,8 +47,8 @@ def get(self, request: Request, organization) -> Response: organization_members = list(qs) - span.set_data("Project Count", len(projects)) - span.set_data("Member Count", len(organization_members)) + span.set_attribute("Project Count", len(projects)) + span.set_attribute("Member Count", len(organization_members)) return Response( serialize( diff --git a/src/sentry/api/serializers/base.py b/src/sentry/api/serializers/base.py index 6242e26c073868..3195cbe9726e6c 100644 --- a/src/sentry/api/serializers/base.py +++ b/src/sentry/api/serializers/base.py @@ -65,7 +65,7 @@ def serialize( else: return objects with sentry_sdk.start_span(op="serialize", name=type(serializer).__name__) as span: - span.set_data("Object Count", len(objects)) + span.set_attribute("Object Count", len(objects)) with sentry_sdk.start_span(op="serialize.get_attrs", name=type(serializer).__name__): attrs = serializer.get_attrs( diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index ff3f534a8bd506..248c992593f96f 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -339,7 +339,7 @@ def get_attrs( ) -> dict[Project, dict[str, Any]]: def measure_span(op_tag): span = sentry_sdk.start_span(op=f"serialize.get_attrs.project.{op_tag}") - span.set_data("Object Count", len(item_list)) + span.set_attribute("Object Count", len(item_list)) return span with measure_span("preamble"): diff --git a/src/sentry/auth/access.py b/src/sentry/auth/access.py index fd8a9e19e01335..3f50d95b683274 100644 --- a/src/sentry/auth/access.py +++ b/src/sentry/auth/access.py @@ -273,8 +273,8 @@ def project_ids_with_team_membership(self) -> frozenset[int]: .distinct() .values_list("id", flat=True) ) - span.set_data("Project Count", len(projects)) - span.set_data("Team Count", len(teams)) + span.set_attribute("Project Count", len(projects)) + span.set_attribute("Team Count", len(teams)) return projects @@ -353,7 +353,7 @@ def has_any_project_scope(self, project: Project, scopes: Collection[str]) -> bo ] span.set_tag("organization", self._member.organization.id) span.set_tag("organization.slug", self._member.organization.slug) - span.set_data("membership_count", len(memberships)) + span.set_attribute("membership_count", len(memberships)) for membership in memberships: team_scopes = membership.get_scopes() @@ -564,7 +564,7 @@ def has_any_project_scope(self, project: Project, scopes: Collection[str]) -> bo span.set_tag( "organization.slug", self.rpc_user_organization_context.organization.slug ) - span.set_data("membership_count", len(orgmember_teams)) + span.set_attribute("membership_count", len(orgmember_teams)) for member_team in orgmember_teams: if not member_team.role: diff --git a/src/sentry/debug_files/upload.py b/src/sentry/debug_files/upload.py index cb5fde4bc84425..b28bba7d0ba939 100644 --- a/src/sentry/debug_files/upload.py +++ b/src/sentry/debug_files/upload.py @@ -10,7 +10,7 @@ def find_missing_chunks(organization_id: int, chunks: set[str]): """Returns a list of chunks which are missing for an org.""" with sentry_sdk.start_span(op="find_missing_chunks") as span: span.set_tag("organization_id", organization_id) - span.set_data("chunks_size", len(chunks)) + span.set_attribute("chunks_size", len(chunks)) if not chunks: return [] diff --git a/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py b/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py index c4e72b6c27a65e..d60892defe8716 100644 --- a/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py +++ b/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py @@ -385,8 +385,8 @@ def on_release_boosted() -> None: "dynamic_sampling.observe_release_status", "(release, environment) pair observed and boosted", ) - span.set_data("release", release.id) - span.set_data("environment", environment) + span.set_attribute("release", release.id) + span.set_attribute("environment", environment) schedule_invalidate_project_config( project_id=project.id, diff --git a/src/sentry/features/manager.py b/src/sentry/features/manager.py index 15f98d10998f96..baee3e505818cb 100644 --- a/src/sentry/features/manager.py +++ b/src/sentry/features/manager.py @@ -114,9 +114,9 @@ def has_for_batch( name=f"{type(handler).__name__} ({name})", ) as span: batch_size = len(remaining) - span.set_data("Batch Size", batch_size) - span.set_data("Feature Name", name) - span.set_data("Handler Type", type(handler).__name__) + span.set_attribute("Batch Size", batch_size) + span.set_attribute("Feature Name", name) + span.set_attribute("Handler Type", type(handler).__name__) batch = FeatureCheckBatch(self, name, organization, remaining, actor) handler_result = handler.has_for_batch(batch) @@ -124,7 +124,7 @@ def has_for_batch( if flag is not None: remaining.remove(obj) result[obj] = flag - span.set_data("Flags Found", batch_size - len(remaining)) + span.set_attribute("Flags Found", batch_size - len(remaining)) default_flag = settings.SENTRY_FEATURES.get(name, False) for obj in remaining: diff --git a/src/sentry/hybridcloud/models/outbox.py b/src/sentry/hybridcloud/models/outbox.py index 55d481d66a8b1e..8eefdd352ff270 100644 --- a/src/sentry/hybridcloud/models/outbox.py +++ b/src/sentry/hybridcloud/models/outbox.py @@ -289,8 +289,8 @@ def process_coalesced( def _set_span_data_for_coalesced_message(self, span: Span, message: OutboxBase) -> None: tag_for_outbox = OutboxScope.get_tag_name(message.shard_scope) span.set_tag(tag_for_outbox, message.shard_identifier) - span.set_data("outbox_id", message.id) - span.set_data("outbox_shard_id", message.shard_identifier) + span.set_attribute("outbox_id", message.id) + span.set_attribute("outbox_shard_id", message.shard_identifier) span.set_tag("outbox_category", OutboxCategory(message.category).name) span.set_tag("outbox_scope", OutboxScope(message.shard_scope).name) diff --git a/src/sentry/ingest/transaction_clusterer/tasks.py b/src/sentry/ingest/transaction_clusterer/tasks.py index d61cbcd997abbf..9cfac20a8d86a7 100644 --- a/src/sentry/ingest/transaction_clusterer/tasks.py +++ b/src/sentry/ingest/transaction_clusterer/tasks.py @@ -89,7 +89,7 @@ def cluster_projects(project_ids: Sequence[int]) -> None: try: for project in projects: with sentry_sdk.start_span(op="txcluster_project") as span: - span.set_data("project_id", project.id) + span.set_attribute("project_id", project.id) tx_names = list(redis.get_transaction_names(project)) new_rules = [] if len(tx_names) >= MERGE_THRESHOLD: diff --git a/src/sentry/issues/endpoints/organization_issues_count.py b/src/sentry/issues/endpoints/organization_issues_count.py index 8bd60902176c17..40126fbef70a69 100644 --- a/src/sentry/issues/endpoints/organization_issues_count.py +++ b/src/sentry/issues/endpoints/organization_issues_count.py @@ -63,7 +63,7 @@ def _count( query_kwargs["actor"] = request.user with start_span(op="start_search") as span: - span.set_data("query_kwargs", query_kwargs) + span.set_attribute("query_kwargs", query_kwargs) result = search.backend.query(**query_kwargs) return result.hits diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index afb4b414cd66de..bb5341cf7f6ff9 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -126,7 +126,7 @@ def get_visible( span.set_tag("has_batch_features", batch_features is not None) span.set_tag("released", released) span.set_tag("enabled", enabled) - span.set_data("feature_to_grouptype", feature_to_grouptype) + span.set_attribute("feature_to_grouptype", feature_to_grouptype) return released + enabled def get_all_group_type_ids(self) -> set[int]: diff --git a/src/sentry/metrics/minimetrics.py b/src/sentry/metrics/minimetrics.py index e83ed2673ab41d..6eab805c72cbb3 100644 --- a/src/sentry/metrics/minimetrics.py +++ b/src/sentry/metrics/minimetrics.py @@ -10,7 +10,7 @@ def _attach_tags(span: Span, tags: Tags | None) -> None: if tags: for tag_key, tag_value in tags.items(): - span.set_data(tag_key, tag_value) + span.set_attribute(tag_key, tag_value) def _set_metric_on_span(key: str, value: float | int, op: str, tags: Tags | None = None) -> None: @@ -18,7 +18,7 @@ def _set_metric_on_span(key: str, value: float | int, op: str, tags: Tags | None if span_or_tx is None: return - span_or_tx.set_data(key, value) + span_or_tx.set_attribute(key, value) _attach_tags(span_or_tx, tags) diff --git a/src/sentry/models/project.py b/src/sentry/models/project.py index 81bb959f67029c..0e9a43a323a416 100644 --- a/src/sentry/models/project.py +++ b/src/sentry/models/project.py @@ -368,8 +368,8 @@ def next_short_id(self, delta: int = 1) -> int: sentry_sdk.start_span(op="project.next_short_id") as span, metrics.timer("project.next_short_id"), ): - span.set_data("project_id", self.id) - span.set_data("project_slug", self.slug) + span.set_attribute("project_id", self.id) + span.set_attribute("project_slug", self.slug) return Counter.increment(self, delta) def _save_project(self, *args, **kwargs): diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index c17886d080a3ff..9a094fe7005d10 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -793,7 +793,7 @@ def _convert_aggregate_and_query_to_metrics( } with sentry_sdk.start_span(op="converting_aggregate_and_query") as span: - span.set_data("widget_query_args", {"query": query, "aggregate": aggregate}) + span.set_attribute("widget_query_args", {"query": query, "aggregate": aggregate}) # Create as many specs as we support for spec_version in OnDemandMetricSpecVersioning.get_spec_versions(): try: diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index ba3a99f5f29efc..88b0bbe2046821 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -220,7 +220,7 @@ def reprocess_event(project_id: int, event_id: str, start_time: float) -> None: for attachment_id, attachment in enumerate(attachments): with sentry_sdk.start_span(op="reprocess_event._copy_attachment_into_cache") as span: - span.set_data("attachment_id", attachment.id) + span.set_attribute("attachment_id", attachment.id) attachment_objects.append( _copy_attachment_into_cache( attachment_id=attachment_id, diff --git a/src/sentry/search/events/builder/base.py b/src/sentry/search/events/builder/base.py index 829a1225abcfdc..186fb6ea5f8033 100644 --- a/src/sentry/search/events/builder/base.py +++ b/src/sentry/search/events/builder/base.py @@ -1562,7 +1562,7 @@ def run_query( def process_results(self, results: Any) -> EventsResponse: with sentry_sdk.start_span(op="QueryBuilder", name="process_results") as span: - span.set_data("result_count", len(results.get("data", []))) + span.set_attribute("result_count", len(results.get("data", []))) translated_columns = self.alias_to_typed_tag_map if self.builder_config.transform_alias_to_input_format: translated_columns.update( diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py index 4ab58f147c8f43..f33abd00e91a05 100644 --- a/src/sentry/search/snuba/executors.py +++ b/src/sentry/search/snuba/executors.py @@ -885,8 +885,8 @@ def query( group_ids = list( group_queryset.using_replica().values_list("id", flat=True)[: max_candidates + 1] ) - span.set_data("Max Candidates", max_candidates) - span.set_data("Result Size", len(group_ids)) + span.set_attribute("Max Candidates", max_candidates) + span.set_attribute("Result Size", len(group_ids)) metrics.distribution("snuba.search.num_candidates", len(group_ids)) too_many_candidates = False if not group_ids: @@ -1702,8 +1702,8 @@ def query( : max_candidates + 1 ] ) - span.set_data("Max Candidates", max_candidates) - span.set_data("Result Size", len(group_ids_to_pass_to_snuba)) + span.set_attribute("Max Candidates", max_candidates) + span.set_attribute("Result Size", len(group_ids_to_pass_to_snuba)) if too_many_candidates := (len(group_ids_to_pass_to_snuba) > max_candidates): metrics.incr( diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index d83480c276db80..1e0fdf7ca9a83f 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -560,7 +560,7 @@ def top_events_timeseries( rollup, ) with sentry_sdk.start_span(op="discover.discover", name="top_events.transform_results") as span: - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) result = top_events_builder.process_results(result) issues: Mapping[int, str | None] = {} @@ -734,7 +734,7 @@ def get_facets( individual_tags.append(tag) with sentry_sdk.start_span(op="discover.discover", name="facets.individual_tags") as span: - span.set_data("tag_count", len(individual_tags)) + span.set_attribute("tag_count", len(individual_tags)) for tag_name in individual_tags: tag = f"tags[{tag_name}]" tag_value_builder = DiscoverQueryBuilder( diff --git a/src/sentry/snuba/errors.py b/src/sentry/snuba/errors.py index 50c6868226199e..d63f6d7df07f78 100644 --- a/src/sentry/snuba/errors.py +++ b/src/sentry/snuba/errors.py @@ -322,7 +322,7 @@ def top_events_timeseries( rollup, ) with sentry_sdk.start_span(op="discover.errors", name="top_events.transform_results") as span: - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) result = top_events_builder.process_results(result) issues: Mapping[int, str | None] = {} diff --git a/src/sentry/snuba/functions.py b/src/sentry/snuba/functions.py index 43fbf6279a530e..85cfd0c0c7ad3a 100644 --- a/src/sentry/snuba/functions.py +++ b/src/sentry/snuba/functions.py @@ -238,7 +238,7 @@ def format_top_events_timeseries_results( with sentry_sdk.start_span(op="discover.discover", name="top_events.transform_results") as span: result = query_builder.strip_alias_prefix(result) - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) processed_result = query_builder.process_results(result) if result_key_order is None: diff --git a/src/sentry/snuba/metrics/fields/histogram.py b/src/sentry/snuba/metrics/fields/histogram.py index b5a3be0b8eab3b..5418a89067b212 100644 --- a/src/sentry/snuba/metrics/fields/histogram.py +++ b/src/sentry/snuba/metrics/fields/histogram.py @@ -52,8 +52,8 @@ def rebucket_histogram( with sentry_sdk.start_span( op="sentry.snuba.metrics.fields.histogram.rebucket_histogram" ) as span: - span.set_data("len_data", len(data)) - span.set_data("len_rv", len(rv)) + span.set_attribute("len_data", len(data)) + span.set_attribute("len_rv", len(rv)) # XXX: quadratic function assert len(data) < 300 diff --git a/src/sentry/snuba/query_subscriptions/consumer.py b/src/sentry/snuba/query_subscriptions/consumer.py index f05291ede2262a..6302c659981347 100644 --- a/src/sentry/snuba/query_subscriptions/consumer.py +++ b/src/sentry/snuba/query_subscriptions/consumer.py @@ -163,15 +163,15 @@ def handle_message( tags={"dataset": dataset}, ), ): - span.set_data("payload", contents) - span.set_data("subscription_dataset", subscription.snuba_query.dataset) - span.set_data("subscription_query", subscription.snuba_query.query) - span.set_data("subscription_aggregation", subscription.snuba_query.aggregate) - span.set_data("subscription_time_window", subscription.snuba_query.time_window) - span.set_data("subscription_resolution", subscription.snuba_query.resolution) - span.set_data("message_offset", message_offset) - span.set_data("message_partition", message_partition) - span.set_data("message_value", message_value) + span.set_attribute("payload", contents) + span.set_attribute("subscription_dataset", subscription.snuba_query.dataset) + span.set_attribute("subscription_query", subscription.snuba_query.query) + span.set_attribute("subscription_aggregation", subscription.snuba_query.aggregate) + span.set_attribute("subscription_time_window", subscription.snuba_query.time_window) + span.set_attribute("subscription_resolution", subscription.snuba_query.resolution) + span.set_attribute("message_offset", message_offset) + span.set_attribute("message_partition", message_partition) + span.set_attribute("message_value", message_value) callback(contents, subscription) diff --git a/src/sentry/snuba/spans_indexed.py b/src/sentry/snuba/spans_indexed.py index 5cbab6d25172cf..d47c1372fb8e45 100644 --- a/src/sentry/snuba/spans_indexed.py +++ b/src/sentry/snuba/spans_indexed.py @@ -245,7 +245,7 @@ def top_events_timeseries( rollup, ) with sentry_sdk.start_span(op="spans_indexed", name="top_events.transform_results") as span: - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) result = top_events_builder.process_results(result) issues: dict[int, str | None] = {} diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py index 16c87f07a54858..804c9b8fd90d14 100644 --- a/src/sentry/snuba/tasks.py +++ b/src/sentry/snuba/tasks.py @@ -359,8 +359,8 @@ def subscription_checker(**kwargs): date_updated__lt=timezone.now() - SUBSCRIPTION_STATUS_MAX_AGE, ): with sentry_sdk.start_span(op="repair_subscription") as span: - span.set_data("subscription_id", subscription.id) - span.set_data("status", subscription.status) + span.set_attribute("subscription_id", subscription.id) + span.set_attribute("status", subscription.status) count += 1 if subscription.status == QuerySubscription.Status.CREATING.value: create_subscription_in_snuba.delay(query_subscription_id=subscription.id) diff --git a/src/sentry/stacktraces/processing.py b/src/sentry/stacktraces/processing.py index efb2368a6212c6..20bd892decc189 100644 --- a/src/sentry/stacktraces/processing.py +++ b/src/sentry/stacktraces/processing.py @@ -620,10 +620,10 @@ def process_stacktraces( with sentry_sdk.start_span( op="stacktraces.processing.process_stacktraces.preprocess_step" ) as span: - span.set_data("processor", processor.__class__.__name__) + span.set_attribute("processor", processor.__class__.__name__) if processor.preprocess_step(processing_task): changed = True - span.set_data("data_changed", True) + span.set_attribute("data_changed", True) # Process all stacktraces for stacktrace_info, processable_frames in processing_task.iter_processable_stacktraces(): @@ -633,10 +633,10 @@ def process_stacktraces( with sentry_sdk.start_span( op="stacktraces.processing.process_stacktraces.process_exception" ) as span: - span.set_data("processor", processor.__class__.__name__) + span.set_attribute("processor", processor.__class__.__name__) if processor.process_exception(stacktrace_info.container): changed = True - span.set_data("data_changed", True) + span.set_attribute("data_changed", True) # If the stacktrace is empty we skip it for processing if not stacktrace_info.stacktrace: @@ -650,7 +650,7 @@ def process_stacktraces( if new_frames is not None: stacktrace_info.stacktrace["frames"] = new_frames changed = True - span.set_data("data_changed", True) + span.set_attribute("data_changed", True) if ( set_raw_stacktrace and new_raw_frames is not None diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index 11a00972820058..6f38059b8e5231 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -335,14 +335,14 @@ def __get_tag_keys_for_projects( ) as span: result = cache.get(cache_key, None) - span.set_data("cache.key", [cache_key]) + span.set_attribute("cache.key", [cache_key]) if result is not None: - span.set_data("cache.hit", True) - span.set_data("cache.item_size", len(str(result))) + span.set_attribute("cache.hit", True) + span.set_attribute("cache.item_size", len(str(result))) metrics.incr("testing.tagstore.cache_tag_key.hit") else: - span.set_data("cache.hit", False) + span.set_attribute("cache.hit", False) metrics.incr("testing.tagstore.cache_tag_key.miss") if result is None: @@ -364,8 +364,8 @@ def __get_tag_keys_for_projects( op="cache.put", name="sentry.tagstore.cache.__get_tag_keys_for_projects" ) as span: cache.set(cache_key, result, 300) - span.set_data("cache.key", [cache_key]) - span.set_data("cache.item_size", len(str(result))) + span.set_attribute("cache.key", [cache_key]) + span.set_attribute("cache.item_size", len(str(result))) metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) ctor: _KeyCallable[TagKey, Never] | _KeyCallable[GroupTagKey, Never] diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index d688edcb4ce45c..b10580f7610356 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -386,8 +386,8 @@ def _continue_to_save_event() -> None: # Default event processors. for plugin in plugins.all(version=2): with sentry_sdk.start_span(op="task.store.process_event.preprocessors") as span: - span.set_data("plugin", plugin.slug) - span.set_data("from_symbolicate", from_symbolicate) + span.set_attribute("plugin", plugin.slug) + span.set_attribute("from_symbolicate", from_symbolicate) processors = safe_execute(plugin.get_event_preprocessors, data=data) for processor in processors or (): try: diff --git a/src/sentry/tasks/symbolication.py b/src/sentry/tasks/symbolication.py index 68c4d0dd4001b9..8f688549977a01 100644 --- a/src/sentry/tasks/symbolication.py +++ b/src/sentry/tasks/symbolication.py @@ -185,7 +185,7 @@ def on_symbolicator_request(): ): try: symbolicated_data = symbolication_function(symbolicator, data) - span.set_data("symbolicated_data", bool(symbolicated_data)) + span.set_attribute("symbolicated_data", bool(symbolicated_data)) if symbolicated_data: data = symbolicated_data diff --git a/src/sentry/taskworker/registry.py b/src/sentry/taskworker/registry.py index b78aedb239c42f..d970791db814d5 100644 --- a/src/sentry/taskworker/registry.py +++ b/src/sentry/taskworker/registry.py @@ -151,9 +151,9 @@ def send_task(self, activation: TaskActivation, wait_for_delivery: bool = False) origin="taskworker", ) as span: # TODO(taskworker) add monitor headers - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) - span.set_data(SPANDATA.MESSAGING_SYSTEM, "taskworker") + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) + span.set_attribute(SPANDATA.MESSAGING_SYSTEM, "taskworker") produce_future = self._producer(topic).produce( ArroyoTopic(name=topic.value), diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index f5e66a0ac24099..7d8722e7a64bd5 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -291,7 +291,7 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - track_memory_usage("taskworker.worker.memory_change"), sentry_sdk.start_transaction(transaction), ): - transaction.set_data( + transaction.set_attribute( "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} ) task_added_time = activation.received_at.ToDatetime().timestamp() @@ -302,13 +302,13 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - name=activation.taskname, origin="taskworker", ) as span: - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) - span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) - span.set_data( + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) + span.set_attribute( SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, activation.retry_state.attempts ) - span.set_data(SPANDATA.MESSAGING_SYSTEM, "taskworker") + span.set_attribute(SPANDATA.MESSAGING_SYSTEM, "taskworker") try: task_func(*args, **kwargs) diff --git a/src/sentry/tempest/tasks.py b/src/sentry/tempest/tasks.py index 9bbe07984c84d3..ab70770c994794 100644 --- a/src/sentry/tempest/tasks.py +++ b/src/sentry/tempest/tasks.py @@ -193,7 +193,7 @@ def fetch_latest_id_from_tempest( span = sentry_sdk.get_current_span() if span is not None: - span.set_data("response_text", response.text) + span.set_attribute("response_text", response.text) return response @@ -231,6 +231,6 @@ def fetch_items_from_tempest( span = sentry_sdk.get_current_span() if span is not None: - span.set_data("response_text", response.text) + span.set_attribute("response_text", response.text) return response diff --git a/src/sentry/utils/db.py b/src/sentry/utils/db.py index e6035f46b5acfe..2b8977556edea3 100644 --- a/src/sentry/utils/db.py +++ b/src/sentry/utils/db.py @@ -59,7 +59,7 @@ def setup_once(): def _enter(self): self._sentry_sdk_span = sentry_sdk.start_span(op="transaction.atomic") - self._sentry_sdk_span.set_data("using", self.using) + self._sentry_sdk_span.set_attribute("using", self.using) self._sentry_sdk_span.__enter__() return original_enter(self) diff --git a/src/sentry/utils/pagination_factory.py b/src/sentry/utils/pagination_factory.py index 7c59c0a96546bf..1a105c3c6e7023 100644 --- a/src/sentry/utils/pagination_factory.py +++ b/src/sentry/utils/pagination_factory.py @@ -67,6 +67,6 @@ def get_paginator( def annotate_span_with_pagination_args(span: Span, per_page: int) -> None: - span.set_data("Limit", per_page) + span.set_attribute("Limit", per_page) sentry_sdk.set_tag("query.per_page", per_page) sentry_sdk.set_tag("query.per_page.grouped", format_grouped_length(per_page, [1, 10, 50, 100])) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 36da3e42ee6320..7898b56710c522 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -707,7 +707,7 @@ def set_measurement(measurement_name, value, unit=None): def set_span_data(data_name, value): span = sentry_sdk.get_current_span() if span is not None: - span.set_data(data_name, value) + span.set_attribute(data_name, value) def merge_context_into_scope( diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index d112d1e3902668..2c02f290ec1637 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1208,7 +1208,7 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: allocation_policy_prefix = "allocation_policy." bytes_scanned = body.get("profile", {}).get("progress_bytes", None) if bytes_scanned is not None: - span.set_data(f"{allocation_policy_prefix}.bytes_scanned", bytes_scanned) + span.set_attribute(f"{allocation_policy_prefix}.bytes_scanned", bytes_scanned) if _is_rejected_query(body): quota_allowance_summary = body["quota_allowance"]["summary"] for k, v in quota_allowance_summary.items(): diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index 42c92e4400f74f..7f6d2609adc787 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -266,7 +266,7 @@ def _make_rpc_request( with sentry_sdk.start_span(op="snuba_rpc.run", name=req.__class__.__name__) as span: if referrer: span.set_tag("snuba.referrer", referrer) - span.set_data("snuba.query", req) + span.set_attribute("snuba.query", req) try: http_resp = _snuba_pool.urlopen( "POST", From fcc75acad711ee03bc5832d2d85f8242f875b1bc Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 13:09:35 +0200 Subject: [PATCH 06/66] changed add_attachment --- Makefile | 2 +- src/sentry/consumers/validate_schema.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index ca09d4e297e76c..a730185673796e 100644 --- a/Makefile +++ b/Makefile @@ -130,7 +130,7 @@ COV_ARGS = --cov-report="xml:.artifacts/python.coverage.xml" test-python-ci: @echo "--> Running CI Python tests" python3 -b -m pytest \ - tests \ + tests/relay_integration/test_sdk.py \ --ignore tests/acceptance \ --ignore tests/apidocs \ --ignore tests/js \ diff --git a/src/sentry/consumers/validate_schema.py b/src/sentry/consumers/validate_schema.py index e5138b690b9c1d..0f2b21caad3e6c 100644 --- a/src/sentry/consumers/validate_schema.py +++ b/src/sentry/consumers/validate_schema.py @@ -46,8 +46,8 @@ def submit(self, message: Message[KafkaPayload]) -> None: now = time.time() if self.__last_record_time is None or self.__last_record_time + 1.0 < now: with sentry_sdk.isolation_scope() as scope: - scope.add_attachment(bytes=message.payload.value, filename="message.txt") scope.set_tag("topic", self.__topic) + sentry_sdk.add_attachment(bytes=message.payload.value, filename="message.txt") if self.__codec is None: logger.warning("No validator configured for topic") From b75ec43590ae2b4a7209ec5290634850a1b0e4b5 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 13:14:57 +0200 Subject: [PATCH 07/66] Changed start_transaction to start_span and custom_sampling_context to attributes --- src/sentry/ingest/consumer/processors.py | 2 +- src/sentry/issues/occurrence_consumer.py | 4 ++-- .../consumers/incident_occurrences_consumer.py | 2 +- .../monitors/consumers/monitor_consumer.py | 4 ++-- src/sentry/processing/backpressure/monitor.py | 2 +- src/sentry/relay/config/__init__.py | 2 +- .../consumers/result_consumer.py | 2 +- src/sentry/replays/consumers/recording.py | 8 ++++---- src/sentry/replays/usecases/ingest/__init__.py | 4 ++-- src/sentry/runner/commands/cleanup.py | 2 +- src/sentry/runner/commands/devserver.py | 2 +- .../api/endpoints/sentry_app_components.py | 2 +- .../consumers/indexer/processing.py | 4 ++-- src/sentry/snuba/query_subscriptions/run.py | 4 ++-- src/sentry/taskworker/workerchild.py | 2 +- tests/sentry/metrics/test_minimetrics.py | 16 ++++++++-------- tests/sentry/taskworker/test_task.py | 4 ++-- 17 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index 490708ab31c5e9..27735d3f6929fd 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -59,7 +59,7 @@ def inner(*args, **kwargs): "custom_sampling_context", {"sample_rate": sample_rate}, ) - with sentry_sdk.start_transaction(**span_kwargs): + with sentry_sdk.start_span(**span_kwargs): return f(*args, **kwargs) return inner diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index db49bdb2989bb0..1999f69d3dd56a 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -394,7 +394,7 @@ def _process_message( :raises InvalidEventPayloadError: when the message is invalid :raises EventLookupError: when the provided event_id in the message couldn't be found. """ - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="_process_message", name="issues.occurrence_consumer", ) as txn: @@ -463,7 +463,7 @@ def process_occurrence_batch( # Number of groups we've collected to be processed in parallel metrics.gauge("occurrence_consumer.checkin.parallel_batch_groups", len(occcurrence_mapping)) # Submit occurrences & status changes for processing - with sentry_sdk.start_transaction(op="process_batch", name="occurrence.occurrence_consumer"): + with sentry_sdk.start_span(op="process_batch", name="occurrence.occurrence_consumer"): futures = [ worker.submit(process_occurrence_group, group) for group in occcurrence_mapping.values() ] diff --git a/src/sentry/monitors/consumers/incident_occurrences_consumer.py b/src/sentry/monitors/consumers/incident_occurrences_consumer.py index 73e546b57da8c5..e20f5a63356ec7 100644 --- a/src/sentry/monitors/consumers/incident_occurrences_consumer.py +++ b/src/sentry/monitors/consumers/incident_occurrences_consumer.py @@ -126,7 +126,7 @@ def has_all(checkins: list[MonitorCheckIn | None]) -> TypeGuard[list[MonitorChec def process_incident_occurrence(message: Message[KafkaPayload | FilteredPayload]): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="_process_incident_occurrence", name="monitors.incident_occurrence_consumer", ) as txn: diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index a7a2f3462c77a6..1ed7ff6d41e7cb 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -934,7 +934,7 @@ def process_checkin(item: CheckinItem) -> None: Process an individual check-in """ try: - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="_process_checkin", name="monitors.monitor_consumer", ) as txn: @@ -1004,7 +1004,7 @@ def process_batch( metrics.gauge("monitors.checkin.parallel_batch_groups", len(checkin_mapping)) # Submit check-in groups for processing - with sentry_sdk.start_transaction(op="process_batch", name="monitors.monitor_consumer"): + with sentry_sdk.start_span(op="process_batch", name="monitors.monitor_consumer"): futures = [ executor.submit(process_checkin_group, group) for group in checkin_mapping.values() ] diff --git a/src/sentry/processing/backpressure/monitor.py b/src/sentry/processing/backpressure/monitor.py index f6a9e5e386908c..39f0bed5ef90c3 100644 --- a/src/sentry/processing/backpressure/monitor.py +++ b/src/sentry/processing/backpressure/monitor.py @@ -128,7 +128,7 @@ def start_service_monitoring() -> None: time.sleep(options.get("backpressure.monitoring.interval")) continue - with sentry_sdk.start_transaction(name="backpressure.monitoring", sampled=True): + with sentry_sdk.start_span(name="backpressure.monitoring", sampled=True): # first, check each base service and record its health unhealthy_services = check_service_health(services) diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py index f872fd4484701f..aceb3c5406cde6 100644 --- a/src/sentry/relay/config/__init__.py +++ b/src/sentry/relay/config/__init__.py @@ -294,7 +294,7 @@ def get_project_config( with sentry_sdk.isolation_scope() as scope: scope.set_tag("project", project.id) with ( - sentry_sdk.start_transaction(name="get_project_config"), + sentry_sdk.start_span(name="get_project_config"), metrics.timer("relay.config.get_project_config.duration"), ): return _get_project_config(project, project_keys=project_keys) diff --git a/src/sentry/remote_subscriptions/consumers/result_consumer.py b/src/sentry/remote_subscriptions/consumers/result_consumer.py index 04098a9dfa8a2a..0feb4a72b3aff4 100644 --- a/src/sentry/remote_subscriptions/consumers/result_consumer.py +++ b/src/sentry/remote_subscriptions/consumers/result_consumer.py @@ -272,7 +272,7 @@ def process_batch(self, message: Message[ValuesBatch[KafkaPayload]]): partitioned_values = self.partition_message_batch(message) # Submit groups for processing - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="process_batch", name=f"monitors.{self.identifier}.result_consumer" ): futures = [ diff --git a/src/sentry/replays/consumers/recording.py b/src/sentry/replays/consumers/recording.py index 02e270d894bf62..f376f888ddc3d1 100644 --- a/src/sentry/replays/consumers/recording.py +++ b/src/sentry/replays/consumers/recording.py @@ -62,10 +62,10 @@ def create_with_partitions( def process_message(message: Message[KafkaPayload]) -> ProcessedRecordingMessage | FilteredPayload: - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="replays.consumer.recording_buffered.process_message", op="replays.consumer.recording_buffered.process_message", - custom_sampling_context={ + attributes={ "sample_rate": getattr(settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0) }, ): @@ -81,10 +81,10 @@ def process_message(message: Message[KafkaPayload]) -> ProcessedRecordingMessage def commit_message(message: Message[ProcessedRecordingMessage]) -> None: isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() with sentry_sdk.scope.use_isolation_scope(isolation_scope): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="replays.consumer.recording_buffered.commit_message", op="replays.consumer.recording_buffered.commit_message", - custom_sampling_context={ + attributes={ "sample_rate": getattr( settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0 ) diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py index 1e44af9acde5e6..9a1b8ec6ba694b 100644 --- a/src/sentry/replays/usecases/ingest/__init__.py +++ b/src/sentry/replays/usecases/ingest/__init__.py @@ -95,10 +95,10 @@ def ingest_recording(message_bytes: bytes) -> None: isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() with sentry_sdk.scope.use_isolation_scope(isolation_scope): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="replays.consumer.process_recording", op="replays.consumer", - custom_sampling_context={ + attributes={ "sample_rate": getattr( settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0 ) diff --git a/src/sentry/runner/commands/cleanup.py b/src/sentry/runner/commands/cleanup.py index 7e4bc6ce989b94..353b18e3c9fa81 100644 --- a/src/sentry/runner/commands/cleanup.py +++ b/src/sentry/runner/commands/cleanup.py @@ -178,7 +178,7 @@ def cleanup( transaction = None # Making sure we're not running in local dev to prevent a local error if not os.environ.get("SENTRY_DEVENV_HOME"): - transaction = sentry_sdk.start_transaction(op="cleanup", name="cleanup") + transaction = sentry_sdk.start_span(op="cleanup", name="cleanup") transaction.__enter__() transaction.set_tag("router", router) transaction.set_tag("model", model) diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index 5f5f23427df00a..ffed19f3e3cdf1 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -178,7 +178,7 @@ def devserver( dsn=os.environ.get("SENTRY_DEVSERVICES_DSN", ""), traces_sample_rate=1.0, ) - with sentry_sdk.start_transaction(op="command", name="sentry.devserver"): + with sentry_sdk.start_span(op="command", name="sentry.devserver"): passed_options = { p.name: ctx.params[p.name] for p in ctx.command.params diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py index efef233bbee2ec..e0b9ae7052acb2 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py @@ -61,7 +61,7 @@ def get( components = [] errors = {} - with sentry_sdk.start_transaction(name="sentry.api.sentry_app_components.get"): + with sentry_sdk.start_span(name="sentry.api.sentry_app_components.get"): with sentry_sdk.start_span(op="sentry-app-components.get_installs"): installs = SentryAppInstallation.objects.get_installed_for_organization( organization.id diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py index 43b04ad8a5cb25..2bd435d773942f 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/processing.py +++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py @@ -78,9 +78,9 @@ def process_messages(self, outer_message: Message[MessageBatch]) -> IndexerOutpu settings.SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE * settings.SENTRY_BACKEND_APM_SAMPLING ) - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="sentry.sentry_metrics.consumers.indexer.processing.process_messages", - custom_sampling_context={"sample_rate": sample_rate}, + attributes={"sample_rate": sample_rate}, ): return self._process_messages_impl(outer_message) diff --git a/src/sentry/snuba/query_subscriptions/run.py b/src/sentry/snuba/query_subscriptions/run.py index fe3e95fc82ea63..db6d33c42df6c0 100644 --- a/src/sentry/snuba/query_subscriptions/run.py +++ b/src/sentry/snuba/query_subscriptions/run.py @@ -74,10 +74,10 @@ def process_message( from sentry.utils import metrics with ( - sentry_sdk.start_transaction( + sentry_sdk.start_span( op="handle_message", name="query_subscription_consumer_process_message", - custom_sampling_context={"sample_rate": options.get("subscriptions-query.sample-rate")}, + attributes={"sample_rate": options.get("subscriptions-query.sample-rate")}, ), metrics.timer("snuba_query_subscriber.handle_message", tags={"dataset": dataset.value}), ): diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index 7d8722e7a64bd5..dd150625717b88 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -289,7 +289,7 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - ) with ( track_memory_usage("taskworker.worker.memory_change"), - sentry_sdk.start_transaction(transaction), + sentry_sdk.start_span(transaction), ): transaction.set_attribute( "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} diff --git a/tests/sentry/metrics/test_minimetrics.py b/tests/sentry/metrics/test_minimetrics.py index 4091b7f4d464ac..a348126b86489d 100644 --- a/tests/sentry/metrics/test_minimetrics.py +++ b/tests/sentry/metrics/test_minimetrics.py @@ -67,7 +67,7 @@ def backend(): def test_incr(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="foo") @@ -79,7 +79,7 @@ def test_incr(backend, scope): def test_incr_with_tag(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="foo", tags={"x": "y"}) @@ -92,7 +92,7 @@ def test_incr_with_tag(backend, scope): def test_incr_multi(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="foo", tags={"x": "y"}) backend.incr(key="foo", tags={"x": "z"}) @@ -106,7 +106,7 @@ def test_incr_multi(backend, scope): def test_gauge(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.gauge(key="foo", value=0) backend.gauge(key="foo", value=42.0) @@ -119,7 +119,7 @@ def test_gauge(backend, scope): def test_distribution(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.distribution(key="foo", value=0) backend.distribution(key="foo", value=42.0) @@ -132,7 +132,7 @@ def test_distribution(backend, scope): def test_timing(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.timing(key="foo", value=42.1, tags={"x": "y"}) @@ -150,7 +150,7 @@ def test_timing(backend, scope): def test_timing_duplicate(backend, scope): - with scope.start_transaction(): + with scope.start_span(): # We often manually track a span + a timer with same name. In this case # we want no additional span. with scope.start_span(op="test"): @@ -188,7 +188,7 @@ def __getattr__(self, name): # make sure the backend feeds back to itself with mock.patch("sentry.utils.metrics.backend", new=TrackingCompositeBackend()) as backend: - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="sentrytest.composite", tags={"x": "bar"}) full_flush(scope) diff --git a/tests/sentry/taskworker/test_task.py b/tests/sentry/taskworker/test_task.py index e435771be0e213..626f9c6176801e 100644 --- a/tests/sentry/taskworker/test_task.py +++ b/tests/sentry/taskworker/test_task.py @@ -258,7 +258,7 @@ def test_create_activation_tracing(task_namespace: TaskNamespace) -> None: def with_parameters(one: str, two: int, org_id: int) -> None: raise NotImplementedError - with sentry_sdk.start_transaction(op="test.task"): + with sentry_sdk.start_span(op="test.task"): activation = with_parameters.create_activation(["one", 22], {"org_id": 99}) headers = activation.headers @@ -271,7 +271,7 @@ def test_create_activation_tracing_headers(task_namespace: TaskNamespace) -> Non def with_parameters(one: str, two: int, org_id: int) -> None: raise NotImplementedError - with sentry_sdk.start_transaction(op="test.task"): + with sentry_sdk.start_span(op="test.task"): activation = with_parameters.create_activation( ["one", 22], {"org_id": 99}, {"key": "value"} ) From 420b0e8b20b286e8cbcf3de523967ad0e27ec9e8 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 13:21:09 +0200 Subject: [PATCH 08/66] Changed continue_trace to use context manager --- src/sentry/taskworker/workerchild.py | 68 ++++++++++++++-------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index dd150625717b88..9f03c69f1d5ac8 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -281,41 +281,41 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - kwargs = parameters.get("kwargs", {}) headers = {k: v for k, v in activation.headers.items()} - transaction = sentry_sdk.continue_trace( - environ_or_headers=headers, - op="queue.task.taskworker", - name=f"{activation.namespace}:{activation.taskname}", - origin="taskworker", - ) - with ( - track_memory_usage("taskworker.worker.memory_change"), - sentry_sdk.start_span(transaction), - ): - transaction.set_attribute( - "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} - ) - task_added_time = activation.received_at.ToDatetime().timestamp() - latency = time.time() - task_added_time - - with sentry_sdk.start_span( - op=OP.QUEUE_PROCESS, - name=activation.taskname, - origin="taskworker", - ) as span: - span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) - span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) - span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) - span.set_attribute( - SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, activation.retry_state.attempts + with sentry_sdk.continue_trace(headers): + with ( + track_memory_usage("taskworker.worker.memory_change"), + sentry_sdk.start_span( + environ_or_headers=headers, + op="queue.task.taskworker", + name=f"{activation.namespace}:{activation.taskname}", + origin="taskworker", + ) as root_span, + ): + root_span.set_attribute( + "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} ) - span.set_attribute(SPANDATA.MESSAGING_SYSTEM, "taskworker") - - try: - task_func(*args, **kwargs) - transaction.set_status(SPANSTATUS.OK) - except Exception: - transaction.set_status(SPANSTATUS.INTERNAL_ERROR) - raise + task_added_time = activation.received_at.ToDatetime().timestamp() + latency = time.time() - task_added_time + + with sentry_sdk.start_span( + op=OP.QUEUE_PROCESS, + name=activation.taskname, + origin="taskworker", + ) as span: + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, activation.retry_state.attempts + ) + span.set_attribute(SPANDATA.MESSAGING_SYSTEM, "taskworker") + + try: + task_func(*args, **kwargs) + root_span.set_status(SPANSTATUS.OK) + except Exception: + root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + raise def record_task_execution( activation: TaskActivation, From 4896797dfaae79829b85dd2fb7b77c2eaa7a10a0 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 13:29:31 +0200 Subject: [PATCH 09/66] Removed set_measurements helper function because it is not used in codebase and also gone in SDK 3.0 --- src/sentry/utils/sdk.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 7898b56710c522..d02b25a7028b07 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -695,15 +695,6 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str: ) -def set_measurement(measurement_name, value, unit=None): - try: - transaction = sentry_sdk.Scope.get_current_scope().transaction - if transaction is not None: - transaction.set_measurement(measurement_name, value, unit) - except Exception: - pass - - def set_span_data(data_name, value): span = sentry_sdk.get_current_span() if span is not None: @@ -746,6 +737,5 @@ def merge_context_into_scope( "patch_transport_for_instrumentation", "isolation_scope", "set_current_event_project", - "set_measurement", "traces_sampler", ) From 47401ba4ea7ff938e2a7bb9a54f6d99f3cb34224 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 13:49:09 +0200 Subject: [PATCH 10/66] oops --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index a730185673796e..ca09d4e297e76c 100644 --- a/Makefile +++ b/Makefile @@ -130,7 +130,7 @@ COV_ARGS = --cov-report="xml:.artifacts/python.coverage.xml" test-python-ci: @echo "--> Running CI Python tests" python3 -b -m pytest \ - tests/relay_integration/test_sdk.py \ + tests \ --ignore tests/acceptance \ --ignore tests/apidocs \ --ignore tests/js \ From ea9f4463e7ce317ef8e0d0fa1b5332e504ad7837 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 14:08:36 +0200 Subject: [PATCH 11/66] marked places where dict-attributes are --- src/sentry/api/endpoints/group_current_release.py | 1 + src/sentry/api/endpoints/organization_events_stats.py | 1 + src/sentry/api/endpoints/organization_events_timeseries.py | 1 + .../organization_on_demand_metrics_estimation_stats.py | 1 + src/sentry/issues/grouptype.py | 1 + src/sentry/snuba/query_subscriptions/consumer.py | 1 + src/sentry/tagstore/snuba/backend.py | 2 ++ 7 files changed, 8 insertions(+) diff --git a/src/sentry/api/endpoints/group_current_release.py b/src/sentry/api/endpoints/group_current_release.py index ad2d89b3d3e546..b48886fcd36d2b 100644 --- a/src/sentry/api/endpoints/group_current_release.py +++ b/src/sentry/api/endpoints/group_current_release.py @@ -66,6 +66,7 @@ def get(self, request: Request, group) -> Response: with sentry_sdk.start_span(op="CurrentReleaseEndpoint.get.current_release") as span: span.set_attribute("Environment Count", len(environments)) + # TODO-anton: split dict into multiple attributes span.set_attribute( "Raw Parameters", { diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index f85bb2c8b4b1a8..db1dab6a06f5ca 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -190,6 +190,7 @@ def get(self, request: Request, organization: Organization) -> Response: query_source = self.get_request_source(request) with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: + # TODO-anton: split dict into multiple attributes span.set_attribute("organization", organization) top_events = 0 diff --git a/src/sentry/api/endpoints/organization_events_timeseries.py b/src/sentry/api/endpoints/organization_events_timeseries.py index 6cc50a96c01a3b..5d2d54ee5162f0 100644 --- a/src/sentry/api/endpoints/organization_events_timeseries.py +++ b/src/sentry/api/endpoints/organization_events_timeseries.py @@ -152,6 +152,7 @@ def get_comparison_delta(self, request: Request) -> timedelta | None: def get(self, request: Request, organization: Organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: + # TODO-anton: split dict into multiple attributes span.set_attribute("organization", organization) top_events = self.get_top_events(request) diff --git a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py index 6250942232a019..b16751fc006fc1 100644 --- a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py +++ b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py @@ -67,6 +67,7 @@ def get(self, request: Request, organization: Organization) -> Response: return Response({"detail": "missing required parameter yAxis"}, status=400) with sentry_sdk.start_span(op="discover.metrics.endpoint", name="get_full_metrics") as span: + # TODO-anton: split dict into multiple attributes span.set_attribute("organization", organization) try: diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index bb5341cf7f6ff9..60e3acefa4674e 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -126,6 +126,7 @@ def get_visible( span.set_tag("has_batch_features", batch_features is not None) span.set_tag("released", released) span.set_tag("enabled", enabled) + # TODO-anton: split dict into multiple attributes span.set_attribute("feature_to_grouptype", feature_to_grouptype) return released + enabled diff --git a/src/sentry/snuba/query_subscriptions/consumer.py b/src/sentry/snuba/query_subscriptions/consumer.py index 6302c659981347..4fa7525881a077 100644 --- a/src/sentry/snuba/query_subscriptions/consumer.py +++ b/src/sentry/snuba/query_subscriptions/consumer.py @@ -163,6 +163,7 @@ def handle_message( tags={"dataset": dataset}, ), ): + # TODO-anton: split dict into multiple attributes span.set_attribute("payload", contents) span.set_attribute("subscription_dataset", subscription.snuba_query.dataset) span.set_attribute("subscription_query", subscription.snuba_query.query) diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index 6f38059b8e5231..be9def0ece12ee 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -335,6 +335,7 @@ def __get_tag_keys_for_projects( ) as span: result = cache.get(cache_key, None) + # TODO-anton: are array attributes really supported? span.set_attribute("cache.key", [cache_key]) if result is not None: @@ -364,6 +365,7 @@ def __get_tag_keys_for_projects( op="cache.put", name="sentry.tagstore.cache.__get_tag_keys_for_projects" ) as span: cache.set(cache_key, result, 300) + # TODO-anton: are array attributes really supported? span.set_attribute("cache.key", [cache_key]) span.set_attribute("cache.item_size", len(str(result))) metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) From 58062aac6abf7ba79a92ab1e273facac567905d3 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 14:20:44 +0200 Subject: [PATCH 12/66] Replaced with --- src/sentry/cache/base.py | 6 +++--- src/sentry/utils/snuba.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sentry/cache/base.py b/src/sentry/cache/base.py index bbbf28ca961316..b608d09c51b5f6 100644 --- a/src/sentry/cache/base.py +++ b/src/sentry/cache/base.py @@ -46,9 +46,9 @@ def _mark_transaction(self, op): if not self.is_default_cache: return - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() # Do not set this tag if we're in the global scope (which roughly - # equates to having a transaction). - if scope.transaction: + # equates to having a root span). + if scope.root_span: sentry_sdk.set_tag(f"{op}_default_cache", "true") sentry_sdk.set_tag("used_default_cache", "true") diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 2c02f290ec1637..2aec815d5551ed 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1091,9 +1091,9 @@ def _apply_cache_and_build_results( use_cache: bool | None = False, ) -> ResultSet: parent_api: str = "" - scope = sentry_sdk.Scope.get_current_scope() - if scope.transaction: - parent_api = scope.transaction.name + scope = sentry_sdk.get_current_scope() + if scope.root_span: + parent_api = scope.root_span.name # Store the original position of the query so that we can maintain the order snuba_requests_list: list[tuple[int, SnubaRequest]] = [] From 9ac4468beac69f96a693ff43f835c9ad1b5f81e9 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 14:46:11 +0200 Subject: [PATCH 13/66] one more comment --- src/sentry/taskworker/workerchild.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index 9f03c69f1d5ac8..d2847ff2e51896 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -291,6 +291,7 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - origin="taskworker", ) as root_span, ): + # TODO-anton: split dict into multiple attributes root_span.set_attribute( "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} ) From 20c7098e273eed5f5eba817332360ee9f9b07ae1 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 21 May 2025 16:03:29 +0200 Subject: [PATCH 14/66] more comments --- src/sentry/issues/endpoints/organization_issues_count.py | 1 + src/sentry/relay/config/metric_extraction.py | 1 + 2 files changed, 2 insertions(+) diff --git a/src/sentry/issues/endpoints/organization_issues_count.py b/src/sentry/issues/endpoints/organization_issues_count.py index 40126fbef70a69..3f833ab4de18a0 100644 --- a/src/sentry/issues/endpoints/organization_issues_count.py +++ b/src/sentry/issues/endpoints/organization_issues_count.py @@ -63,6 +63,7 @@ def _count( query_kwargs["actor"] = request.user with start_span(op="start_search") as span: + # TODO-anton: split dict into multiple attributes span.set_attribute("query_kwargs", query_kwargs) result = search.backend.query(**query_kwargs) return result.hits diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index 9a094fe7005d10..4158c933b2720e 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -793,6 +793,7 @@ def _convert_aggregate_and_query_to_metrics( } with sentry_sdk.start_span(op="converting_aggregate_and_query") as span: + # TODO-anton: split dict into multiple attributes span.set_attribute("widget_query_args", {"query": query, "aggregate": aggregate}) # Create as many specs as we support for spec_version in OnDemandMetricSpecVersioning.get_spec_versions(): From 22bfae5b87d7d118b0d40dfa2054e7838db8895f Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 23 May 2025 12:31:33 +0200 Subject: [PATCH 15/66] Merge branch 'master' into antonpirker/code-changes-for-sdk-3.0.0 From d72b7c388d9139b204ae3d5e1892b15c7f74d1db Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 23 May 2025 12:32:08 +0200 Subject: [PATCH 16/66] bump pytest-sentry --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index f73849148f1f64..f18ba808ca4e82 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,7 +16,7 @@ pytest-django>=4.9.0 pytest-fail-slow>=0.3.0 pytest-json-report>=1.5.0 pytest-rerunfailures>=15 -pytest-sentry>=0.3.0,<0.4.0 +pytest-sentry>=0.4.0 pytest-workaround-12888 pytest-xdist>=3 responses>=0.23.1 From cbc0ab54c518862c4d593ce072c2c0faa94d40a4 Mon Sep 17 00:00:00 2001 From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 10:33:25 +0000 Subject: [PATCH 17/66] :snowflake: re-freeze requirements --- requirements-dev-frozen.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 616c88f0569747..ed1964613291dd 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -156,7 +156,7 @@ pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 pytest-rerunfailures==15.0 -pytest-sentry==0.3.0 +pytest-sentry==0.4.0 pytest-workaround-12888==1.0.0 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 From 6a67a425603e5fd505a04a4716dc34aeb942580a Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 23 May 2025 13:06:03 +0200 Subject: [PATCH 18/66] trigger ci From 6be5d4f61053e43ea86a89bebe5f71559aca89d4 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 23 May 2025 13:24:37 +0200 Subject: [PATCH 19/66] update pytest-sentry --- requirements-dev-frozen.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index ed1964613291dd..841930c11e449a 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -156,7 +156,7 @@ pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 pytest-rerunfailures==15.0 -pytest-sentry==0.4.0 +pytest-sentry==0.4.1 pytest-workaround-12888==1.0.0 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 From fbb57947ceb29578a8bae08073d2a29376789227 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 23 May 2025 14:00:07 +0200 Subject: [PATCH 20/66] Replaced .transaction with .root_span --- src/sentry/scim/endpoints/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/sentry/scim/endpoints/utils.py b/src/sentry/scim/endpoints/utils.py index b40562e373f7ec..54e9ddaa725aca 100644 --- a/src/sentry/scim/endpoints/utils.py +++ b/src/sentry/scim/endpoints/utils.py @@ -25,9 +25,9 @@ class SCIMApiError(APIException): def __init__(self, detail, status_code=400): - transaction = sentry_sdk.Scope.get_current_scope().transaction - if transaction is not None: - transaction.set_tag("http.status_code", status_code) + root_span = sentry_sdk.get_current_scope().root_span + if root_span is not None: + root_span.set_tag("http.status_code", status_code) super().__init__({"schemas": [SCIM_API_ERROR], "detail": detail}) self.status_code = status_code From 262d7ec2d3337a6fb958cccc67964b6530cb1fe0 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 23 May 2025 14:01:11 +0200 Subject: [PATCH 21/66] Always use top level api to get scope --- src/sentry/data_export/tasks.py | 2 +- src/sentry/hybridcloud/rpc/service.py | 2 +- src/sentry/ingest/transaction_clusterer/rules.py | 2 +- .../source_code_management/commit_context.py | 2 +- .../source_code_management/repository.py | 2 +- src/sentry/integrations/utils/scope.py | 2 +- .../web/organization_integration_setup.py | 2 +- .../middleware/integrations/classifications.py | 2 +- src/sentry/replays/consumers/recording.py | 4 ++-- src/sentry/replays/usecases/ingest/__init__.py | 4 ++-- src/sentry/reprocessing2.py | 2 +- src/sentry/scim/endpoints/members.py | 2 +- .../api/endpoints/sentry_app_authorizations.py | 2 +- src/sentry/shared_integrations/client/base.py | 2 +- src/sentry/testutils/pytest/sentry.py | 4 ++-- src/sentry/utils/concurrent.py | 8 ++++---- src/sentry/utils/sdk.py | 2 +- src/sentry/utils/snuba.py | 12 ++++++------ src/sentry/utils/snuba_rpc.py | 14 ++++++-------- tests/relay_integration/test_sdk.py | 10 +++++----- .../integrations/api/bases/test_integration.py | 2 +- tests/sentry/metrics/test_minimetrics.py | 2 +- tests/sentry/utils/test_sdk.py | 4 ++-- 23 files changed, 44 insertions(+), 46 deletions(-) diff --git a/src/sentry/data_export/tasks.py b/src/sentry/data_export/tasks.py index d070749e84f87a..65657ee09b6e96 100644 --- a/src/sentry/data_export/tasks.py +++ b/src/sentry/data_export/tasks.py @@ -381,7 +381,7 @@ def merge_export_blobs(data_export_id, **kwargs): def _set_data_on_scope(data_export): - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if data_export.user_id: user = dict(id=data_export.user_id) scope.set_user(user) diff --git a/src/sentry/hybridcloud/rpc/service.py b/src/sentry/hybridcloud/rpc/service.py index d630c0ca9862ea..b117ccdebdf84e 100644 --- a/src/sentry/hybridcloud/rpc/service.py +++ b/src/sentry/hybridcloud/rpc/service.py @@ -596,7 +596,7 @@ def _remote_exception(self, message: str) -> RpcRemoteException: def _raise_from_response_status_error(self, response: requests.Response) -> NoReturn: rpc_method = f"{self.service_name}.{self.method_name}" - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("rpc_method", rpc_method) scope.set_tag("rpc_status_code", response.status_code) diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index 901694971dd7da..4d08193dcd2824 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -147,7 +147,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet: if self.MERGE_MAX_RULES < len(rules): set_span_data("discarded_rules", len(rules) - self.MERGE_MAX_RULES) - sentry_sdk.Scope.get_isolation_scope().set_context( + sentry_sdk.get_isolation_scope().set_context( "clustering_rules_max", { "num_existing_rules": len(rules), diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py index b0ac2f586aa7c0..0aefd81f55ad4a 100644 --- a/src/sentry/integrations/source_code_management/commit_context.py +++ b/src/sentry/integrations/source_code_management/commit_context.py @@ -266,7 +266,7 @@ def queue_pr_comment_task_if_needed( ), extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, ) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("queue_comment_check.merge_commit_sha", commit.key) scope.set_tag("queue_comment_check.organization_id", commit.organization_id) diff --git a/src/sentry/integrations/source_code_management/repository.py b/src/sentry/integrations/source_code_management/repository.py index 91d97e36d5deb8..386b289d0cc021 100644 --- a/src/sentry/integrations/source_code_management/repository.py +++ b/src/sentry/integrations/source_code_management/repository.py @@ -178,7 +178,7 @@ def get_stacktrace_link( "organization_id": repo.organization_id, } ) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("stacktrace_link.tried_version", False) def encode_url(url: str) -> str: diff --git a/src/sentry/integrations/utils/scope.py b/src/sentry/integrations/utils/scope.py index 9a99e3349deaac..efccbe0f2fb44d 100644 --- a/src/sentry/integrations/utils/scope.py +++ b/src/sentry/integrations/utils/scope.py @@ -24,7 +24,7 @@ def clear_tags_and_context() -> None: """Clear certain tags and context since it should not be set.""" reset_values = False - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() for tag in ["organization", "organization.slug"]: if tag in scope._tags: diff --git a/src/sentry/integrations/web/organization_integration_setup.py b/src/sentry/integrations/web/organization_integration_setup.py index fc207e76199eee..2b583a22ae2b2e 100644 --- a/src/sentry/integrations/web/organization_integration_setup.py +++ b/src/sentry/integrations/web/organization_integration_setup.py @@ -21,7 +21,7 @@ class OrganizationIntegrationSetupView(ControlSiloOrganizationView): csrf_protect = False def handle(self, request: HttpRequest, organization, provider_id) -> HttpResponseBase: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_transaction_name(f"integration.{provider_id}", source=TransactionSource.VIEW) pipeline = IntegrationPipeline( diff --git a/src/sentry/middleware/integrations/classifications.py b/src/sentry/middleware/integrations/classifications.py index cf1dbf119f7f69..5200e0b492ec70 100644 --- a/src/sentry/middleware/integrations/classifications.py +++ b/src/sentry/middleware/integrations/classifications.py @@ -124,7 +124,7 @@ def get_response(self, request: HttpRequest) -> HttpResponseBase: parser_class = self.integration_parsers.get(provider) if not parser_class: - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("provider", provider) scope.set_tag("path", request.path) sentry_sdk.capture_exception( diff --git a/src/sentry/replays/consumers/recording.py b/src/sentry/replays/consumers/recording.py index f376f888ddc3d1..bb611eecfa52e1 100644 --- a/src/sentry/replays/consumers/recording.py +++ b/src/sentry/replays/consumers/recording.py @@ -79,8 +79,8 @@ def process_message(message: Message[KafkaPayload]) -> ProcessedRecordingMessage def commit_message(message: Message[ProcessedRecordingMessage]) -> None: - isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() - with sentry_sdk.scope.use_isolation_scope(isolation_scope): + isolation_scope = sentry_sdk.get_isolation_scope().fork() + with sentry_sdk.use_isolation_scope(isolation_scope): with sentry_sdk.start_span( name="replays.consumer.recording_buffered.commit_message", op="replays.consumer.recording_buffered.commit_message", diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py index 9a1b8ec6ba694b..d1ebacbfd6f85c 100644 --- a/src/sentry/replays/usecases/ingest/__init__.py +++ b/src/sentry/replays/usecases/ingest/__init__.py @@ -92,9 +92,9 @@ class RecordingIngestMessage: def ingest_recording(message_bytes: bytes) -> None: """Ingest non-chunked recording messages.""" - isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() + isolation_scope = sentry_sdk.get_isolation_scope().fork() - with sentry_sdk.scope.use_isolation_scope(isolation_scope): + with sentry_sdk.use_isolation_scope(isolation_scope): with sentry_sdk.start_span( name="replays.consumer.process_recording", op="replays.consumer", diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index 88b0bbe2046821..73635481fa1606 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -372,7 +372,7 @@ def buffered_delete_old_primary_hash( old_primary_hashes.add(old_primary_hash) reprocessing_store.add_hash(project_id, group_id, old_primary_hash) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("project_id", project_id) scope.set_tag("old_group_id", group_id) scope.set_tag("old_primary_hash", old_primary_hash) diff --git a/src/sentry/scim/endpoints/members.py b/src/sentry/scim/endpoints/members.py index 147c68a9db8160..5cf88a7c4c6740 100644 --- a/src/sentry/scim/endpoints/members.py +++ b/src/sentry/scim/endpoints/members.py @@ -530,7 +530,7 @@ def post(self, request: Request, organization) -> Response: """ update_role = False - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if "sentryOrgRole" in request.data and request.data["sentryOrgRole"]: role = request.data["sentryOrgRole"].lower() diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py index 00126c0d5dcf07..a9cefb385e6856 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py @@ -39,7 +39,7 @@ class SentryAppAuthorizationsEndpoint(SentryAppAuthorizationsBaseEndpoint): } def post(self, request: Request, installation) -> Response: - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("organization", installation.organization_id) scope.set_tag("sentry_app_id", installation.sentry_app.id) diff --git a/src/sentry/shared_integrations/client/base.py b/src/sentry/shared_integrations/client/base.py index 11f0a6cc32d701..e8560f8cf06102 100644 --- a/src/sentry/shared_integrations/client/base.py +++ b/src/sentry/shared_integrations/client/base.py @@ -231,7 +231,7 @@ def _request( ) if self.integration_type: - sentry_sdk.Scope.get_isolation_scope().set_tag(self.integration_type, self.name) + sentry_sdk.get_isolation_scope().set_tag(self.integration_type, self.name) request = Request( method=method.upper(), diff --git a/src/sentry/testutils/pytest/sentry.py b/src/sentry/testutils/pytest/sentry.py index 09fc1412fab558..fde3c33130819e 100644 --- a/src/sentry/testutils/pytest/sentry.py +++ b/src/sentry/testutils/pytest/sentry.py @@ -286,7 +286,7 @@ def pytest_configure(config: pytest.Config) -> None: from sentry.runner.initializer import initialize_app initialize_app({"settings": settings, "options": None}) - sentry_sdk.Scope.get_global_scope().set_client(None) + sentry_sdk.get_global_scope().set_client(None) register_extensions() from sentry.utils.redis import clusters @@ -362,7 +362,7 @@ def pytest_runtest_teardown(item: pytest.Item) -> None: ProjectOption.objects.clear_local_cache() UserOption.objects.clear_local_cache() - sentry_sdk.Scope.get_global_scope().set_client(None) + sentry_sdk.get_global_scope().set_client(None) def _shuffle(items: list[pytest.Item], r: random.Random) -> None: diff --git a/src/sentry/utils/concurrent.py b/src/sentry/utils/concurrent.py index 82e1b706d2fa47..4f3ebab76b6077 100644 --- a/src/sentry/utils/concurrent.py +++ b/src/sentry/utils/concurrent.py @@ -202,8 +202,8 @@ def __worker(self): while True: priority, item = queue.get(True) thread_isolation_scope, thread_current_scope, function, future = item - with sentry_sdk.scope.use_isolation_scope(thread_isolation_scope): - with sentry_sdk.scope.use_scope(thread_current_scope): + with sentry_sdk.use_isolation_scope(thread_isolation_scope): + with sentry_sdk.use_scope(thread_current_scope): if not future.set_running_or_notify_cancel(): continue try: @@ -247,8 +247,8 @@ def submit[ task = PriorityTask( priority, ( - sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_isolation_scope(), + sentry_sdk.get_current_scope(), callable, future, ), diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 2d3f7b9e1118c0..6cad880a1aa445 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -580,7 +580,7 @@ def check_current_scope_transaction( Note: Ignores scope `transaction` values with `source = "custom"`, indicating a value which has been set maunually. """ - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() transaction_from_request = get_transaction_name_from_request(request) if ( diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 2aec815d5551ed..463ec50afcc893 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1161,8 +1161,8 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: _snuba_query, [ ( - sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_isolation_scope(), + sentry_sdk.get_current_scope(), snuba_request, ) for snuba_request in snuba_requests_list @@ -1174,8 +1174,8 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: query_results = [ _snuba_query( ( - sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_isolation_scope(), + sentry_sdk.get_current_scope(), snuba_requests_list[0], ) ) @@ -1285,8 +1285,8 @@ def _snuba_query( # Eventually we can get rid of this wrapper, but for now it's cleaner to unwrap # the params here than in the calling function. (bc of thread .map) thread_isolation_scope, thread_current_scope, snuba_request = params - with sentry_sdk.scope.use_isolation_scope(thread_isolation_scope): - with sentry_sdk.scope.use_scope(thread_current_scope): + with sentry_sdk.use_isolation_scope(thread_isolation_scope): + with sentry_sdk.use_scope(thread_current_scope): headers = snuba_request.headers request = snuba_request.request try: diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index 7f6d2609adc787..eda933f969b55f 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -122,8 +122,8 @@ def _make_rpc_requests( # Sets the thread parameters once so we're not doing it in the map repeatedly partial_request = partial( _make_rpc_request, - thread_isolation_scope=sentry_sdk.Scope.get_isolation_scope(), - thread_current_scope=sentry_sdk.Scope.get_current_scope(), + thread_isolation_scope=sentry_sdk.get_isolation_scope(), + thread_current_scope=sentry_sdk.get_current_scope(), ) response = [ result @@ -248,21 +248,19 @@ def _make_rpc_request( thread_current_scope: sentry_sdk.Scope | None = None, ) -> BaseHTTPResponse: thread_isolation_scope = ( - sentry_sdk.Scope.get_isolation_scope() + sentry_sdk.get_isolation_scope() if thread_isolation_scope is None else thread_isolation_scope ) thread_current_scope = ( - sentry_sdk.Scope.get_current_scope() - if thread_current_scope is None - else thread_current_scope + sentry_sdk.get_current_scope() if thread_current_scope is None else thread_current_scope ) if SNUBA_INFO: from google.protobuf.json_format import MessageToJson log_snuba_info(f"{referrer}.body:\n{MessageToJson(req)}") # type: ignore[arg-type] - with sentry_sdk.scope.use_isolation_scope(thread_isolation_scope): - with sentry_sdk.scope.use_scope(thread_current_scope): + with sentry_sdk.use_isolation_scope(thread_isolation_scope): + with sentry_sdk.use_scope(thread_current_scope): with sentry_sdk.start_span(op="snuba_rpc.run", name=req.__class__.__name__) as span: if referrer: span.set_tag("snuba.referrer", referrer) diff --git a/tests/relay_integration/test_sdk.py b/tests/relay_integration/test_sdk.py index d9853f27447b21..ee8bac7c907956 100644 --- a/tests/relay_integration/test_sdk.py +++ b/tests/relay_integration/test_sdk.py @@ -46,8 +46,8 @@ def inner(*args, **kwargs): assert event_id is not None sentry_sdk.flush() - with sentry_sdk.scope.use_scope(current_scope): - with sentry_sdk.scope.use_isolation_scope(isolation_scope): + with sentry_sdk.use_scope(current_scope): + with sentry_sdk.use_isolation_scope(isolation_scope): return wait_for_ingest_consumer( lambda: eventstore.backend.get_event_by_id(settings.SENTRY_PROJECT, event_id) ) @@ -108,7 +108,7 @@ def test_bind_organization_context(default_organization): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization"] == default_organization.id assert scope._tags["organization.slug"] == default_organization.slug assert scope._contexts["organization"] == { @@ -130,7 +130,7 @@ def add_context(scope, organization, **kwargs): with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization.test"] == "1" @@ -146,5 +146,5 @@ def add_context(scope, organization, **kwargs): with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization"] == default_organization.id diff --git a/tests/sentry/integrations/api/bases/test_integration.py b/tests/sentry/integrations/api/bases/test_integration.py index 362cc430fa90dd..451dc6437caf15 100644 --- a/tests/sentry/integrations/api/bases/test_integration.py +++ b/tests/sentry/integrations/api/bases/test_integration.py @@ -15,7 +15,7 @@ class IntegrationEndpointTest(TestCase): # Since both `IntegrationEndpoint.handle_exception_with_details` and `Endpoint.handle_exception_with_details` potentially # run, and they both call their own module's copy of `capture_exception`, in order to prove that # neither one is not called, we assert on the underlying method from the SDK - @patch("sentry_sdk.Scope.capture_exception") + @patch("sentry_sdk.capture_exception") def test_handle_rest_framework_exception( self, mock_capture_exception: MagicMock, mock_stderror_write: MagicMock ): diff --git a/tests/sentry/metrics/test_minimetrics.py b/tests/sentry/metrics/test_minimetrics.py index a348126b86489d..f7739b703c70b3 100644 --- a/tests/sentry/metrics/test_minimetrics.py +++ b/tests/sentry/metrics/test_minimetrics.py @@ -51,7 +51,7 @@ def scope(): traces_sample_rate=1.0, ), ) - with sentry_sdk.scope.use_scope(scope): + with sentry_sdk.use_scope(scope): yield scope diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 1609caa4ed86d4..592547b863d681 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -216,7 +216,7 @@ def test_scope_has_correct_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/dogs/{name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch is None @@ -225,7 +225,7 @@ def test_scope_has_wrong_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/tricks/{trick_name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch == { "scope_transaction": "/tricks/{trick_name}/", From bb55c2cde3eac43b167b225a4c95ad51617a0ac2 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 10:55:25 +0200 Subject: [PATCH 22/66] use sample_rate in attributes instead of custom_sampling_context --- src/sentry/ingest/consumer/processors.py | 7 ++----- src/sentry/utils/sdk.py | 5 +++++ 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index 27735d3f6929fd..ac8cc765396984 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -54,11 +54,8 @@ def inner(*args, **kwargs): default=getattr(settings, "SENTRY_INGEST_CONSUMER_APM_SAMPLING", 0), ) ) - # New behavior is to add a custom `sample_rate` that is picked up by `traces_sampler` - span_kwargs.setdefault( - "custom_sampling_context", - {"sample_rate": sample_rate}, - ) + # New behavior is to add a custom `sentry.sample_rate` that is picked up by `traces_sampler` + span_kwargs.setdefault("attributes", {}).setdefault("sentry.sample_rate", sample_rate) with sentry_sdk.start_span(**span_kwargs): return f(*args, **kwargs) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 6cad880a1aa445..cbf17fa0dce76a 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -183,6 +183,11 @@ def traces_sampler(sampling_context): if wsgi_path and wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] + # make it fail to I know what tests use this. + custom_sample_rate = sampling_context.get("attributes", {}).get("sentry.sample_rate") + if custom_sample_rate is not None: + raise Exception("some error") + # Apply sample_rate from custom_sampling_context custom_sample_rate = sampling_context.get("sample_rate") if custom_sample_rate is not None: From 767ff19af26421aedaa979c57c1bb77edba24111 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 12:33:28 +0200 Subject: [PATCH 23/66] trigger ci From d0713ae9e28414d039f04cf8d13dfef24d89ac59 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 13:00:40 +0200 Subject: [PATCH 24/66] Replaced .containing_transaction with .root_span --- .../performance_issues/performance_detection.py | 4 ++-- tests/sentry/event_manager/test_event_manager.py | 12 ++++++------ .../experiments/test_m_n_plus_one_db_detector.py | 2 +- .../test_m_n_plus_one_db_detector.py | 2 +- .../performance_issues/test_performance_detection.py | 4 ++-- .../web/frontend/test_newest_performance_issue.py | 4 ++-- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py index d7f4c6daa4e7a0..f3026a4d99749a 100644 --- a/src/sentry/utils/performance_issues/performance_detection.py +++ b/src/sentry/utils/performance_issues/performance_detection.py @@ -503,8 +503,8 @@ def report_metrics_for_detectors( sdk_name = get_sdk_name(event) try: - # Setting a tag isn't critical, the transaction doesn't exist sometimes, if it's called outside prod code (eg. load-mocks / tests) - set_tag = sdk_span.containing_transaction.set_tag + # Setting a tag isn't critical, the root span doesn't exist sometimes, if it's called outside prod code (eg. load-mocks / tests) + set_tag = sdk_span.root_span.set_tag except AttributeError: set_tag = lambda *args: None diff --git a/tests/sentry/event_manager/test_event_manager.py b/tests/sentry/event_manager/test_event_manager.py index 21a3697745a24f..d40275079366c9 100644 --- a/tests/sentry/event_manager/test_event_manager.py +++ b/tests/sentry/event_manager/test_event_manager.py @@ -2425,7 +2425,7 @@ def test_category_match_group(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_creation(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")) ) @@ -2519,7 +2519,7 @@ def test_perf_issue_creation(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_update(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")) ) @@ -2560,7 +2560,7 @@ def test_perf_issue_update(self) -> None: @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_error_issue_no_associate_perf_event(self) -> None: """Test that you can't associate a performance event with an error issue""" - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")) ) @@ -2581,7 +2581,7 @@ def test_error_issue_no_associate_perf_event(self) -> None: @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_no_associate_error_event(self) -> None: """Test that you can't associate an error event with a performance issue""" - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): manager = EventManager(make_event()) manager.normalize() event = manager.save(self.project.id) @@ -2601,7 +2601,7 @@ def test_perf_issue_no_associate_error_event(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_creation_ignored(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=2, @@ -2612,7 +2612,7 @@ def test_perf_issue_creation_ignored(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_creation_over_ignored_threshold(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event_1 = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3 ) diff --git a/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py b/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py index 6c87d64415bc87..12f2acc978e179 100644 --- a/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py +++ b/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py @@ -213,7 +213,7 @@ def test_m_n_plus_one_detector_enabled(self): event = get_event("m-n-plus-one-db/m-n-plus-one-graphql") sdk_span_mock = Mock() _detect_performance_problems(event, sdk_span_mock, self.create_project()) - sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + sdk_span_mock.root_span.set_tag.assert_has_calls( [ # Current + Experimental Detector call("_pi_all_issue_count", 2), diff --git a/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py b/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py index 7668cfb0e7ad1b..734dfaddcfac01 100644 --- a/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py +++ b/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py @@ -127,7 +127,7 @@ def test_m_n_plus_one_detector_enabled(self): event = get_event("m-n-plus-one-db/m-n-plus-one-graphql") sdk_span_mock = Mock() _detect_performance_problems(event, sdk_span_mock, self.create_project()) - sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + sdk_span_mock.root_span.set_tag.assert_has_calls( [ call("_pi_all_issue_count", 1), call("_pi_sdk_name", "sentry.javascript.node"), diff --git a/tests/sentry/utils/performance_issues/test_performance_detection.py b/tests/sentry/utils/performance_issues/test_performance_detection.py index 7b6b07ab49c518..ce31c2b4dcf1ce 100644 --- a/tests/sentry/utils/performance_issues/test_performance_detection.py +++ b/tests/sentry/utils/performance_issues/test_performance_detection.py @@ -414,8 +414,8 @@ def test_detects_performance_issues_in_n_plus_one_query(self): perf_problems = _detect_performance_problems(n_plus_one_event, sdk_span_mock, self.project) - assert sdk_span_mock.containing_transaction.set_tag.call_count == 6 - sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + assert sdk_span_mock.root_span.set_tag.call_count == 6 + sdk_span_mock.root_span.set_tag.assert_has_calls( [ call( "_pi_all_issue_count", diff --git a/tests/sentry/web/frontend/test_newest_performance_issue.py b/tests/sentry/web/frontend/test_newest_performance_issue.py index ff56059ad67ce8..71a59f48536eb0 100644 --- a/tests/sentry/web/frontend/test_newest_performance_issue.py +++ b/tests/sentry/web/frontend/test_newest_performance_issue.py @@ -46,7 +46,7 @@ def setUp(self): self.login_as(self.user) def test_simple(self): - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): latest_event_time = time() older_event_time = latest_event_time - 300 @@ -80,7 +80,7 @@ def test_simple(self): @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) @with_feature("system:multi-region") def test_simple_customer_domains(self): - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): latest_event_time = time() older_event_time = latest_event_time - 300 From 539ba803cd6a5475d9816e8ec0537dd572d989c4 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 13:36:11 +0200 Subject: [PATCH 25/66] Removed unused option --- src/sentry/taskworker/workerchild.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index d2847ff2e51896..7b4b6842f93283 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -285,7 +285,6 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - with ( track_memory_usage("taskworker.worker.memory_change"), sentry_sdk.start_span( - environ_or_headers=headers, op="queue.task.taskworker", name=f"{activation.namespace}:{activation.taskname}", origin="taskworker", From 77db522a24236d10edbcdcf08885178c6c535110 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 14:03:58 +0200 Subject: [PATCH 26/66] set num_of_spanes in event.data not event.measurement --- src/sentry/utils/sdk.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index cbf17fa0dce76a..121a1cbf53d6ed 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -244,12 +244,9 @@ def before_send_transaction(event: Event, _: Hint) -> Event | None: num_of_spans = len(event["spans"]) event["tags"]["spans_over_limit"] = str(num_of_spans >= 1000) - if not event["measurements"]: - event["measurements"] = {} - event["measurements"]["num_of_spans"] = { - "value": num_of_spans, - "unit": None, - } + + event.setdefault("data", {}).setdefault("num_of_spans", num_of_spans) + return event From 8ac865c487461ed7e673d4ffc6c3a7d6f4fde5bb Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 14:54:05 +0200 Subject: [PATCH 27/66] fixed linting --- src/sentry/integrations/web/organization_integration_setup.py | 2 +- src/sentry/utils/snuba.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/integrations/web/organization_integration_setup.py b/src/sentry/integrations/web/organization_integration_setup.py index 2b583a22ae2b2e..7a25e9ae25a91e 100644 --- a/src/sentry/integrations/web/organization_integration_setup.py +++ b/src/sentry/integrations/web/organization_integration_setup.py @@ -3,7 +3,7 @@ import sentry_sdk from django.http import Http404, HttpRequest from django.http.response import HttpResponseBase -from sentry_sdk.tracing import TransactionSource +from sentry_sdk.consts import TransactionSource from sentry import features from sentry.features.exceptions import FeatureNotRegistered diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 463ec50afcc893..fc5ed54270ccb6 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1092,7 +1092,7 @@ def _apply_cache_and_build_results( ) -> ResultSet: parent_api: str = "" scope = sentry_sdk.get_current_scope() - if scope.root_span: + if scope.root_span and scope.root_span.name: parent_api = scope.root_span.name # Store the original position of the query so that we can maintain the order From c9aed13b8639dbd75f32f1a3d6f7b89e107d1fea Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 15:29:56 +0200 Subject: [PATCH 28/66] Ignore measurements mypy check (because measuerements have already been removed from sentry sdk) --- src/sentry/utils/sdk.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 121a1cbf53d6ed..2af4419531e48b 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -245,8 +245,15 @@ def before_send_transaction(event: Event, _: Hint) -> Event | None: event["tags"]["spans_over_limit"] = str(num_of_spans >= 1000) - event.setdefault("data", {}).setdefault("num_of_spans", num_of_spans) - + # `measurements` are deprecated and have already been removed from the Python SDK. + # We ignore those lines in the mypy check because the SDKs Event type does not have `measurements` anymore. + # (on ingest and in the product measurements are still there so it is fine to set them for the time being) + if not event["measurements"]: # type: ignore[typeddict-item] + event["measurements"] = {} # type: ignore[typeddict-unknown-key] + event["measurements"]["num_of_spans"] = { # type: ignore[typeddict-item] + "value": num_of_spans, + "unit": None, + } return event From 2f8772754946c3c26334ec1ab1882d292b952fbe Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 15:30:29 +0200 Subject: [PATCH 29/66] replaced with --- src/sentry/runner/main.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/sentry/runner/main.py b/src/sentry/runner/main.py index 9fdf90600d08ca..b4c7b242af38a6 100644 --- a/src/sentry/runner/main.py +++ b/src/sentry/runner/main.py @@ -130,12 +130,14 @@ def main() -> None: # We do this here because `configure_structlog` executes later logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) logger = logging.getLogger(__name__) - try: func(**kwargs) except Exception as e: # This reports errors sentry-devservices - with sentry_sdk.init(dsn=os.environ["SENTRY_DEVSERVICES_DSN"]): + with sentry_sdk.new_scope() as scope: + scope.set_client( + sentry_sdk.Client(dsn=os.environ["SENTRY_DEVSERVICES_DSN"], debug=True) + ) if os.environ.get("USER"): sentry_sdk.set_user({"username": os.environ.get("USER")}) sentry_sdk.capture_exception(e) From eda30c8501678de79627a45c0d6a310cbc1fe047 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 26 May 2025 15:32:23 +0200 Subject: [PATCH 30/66] formatting --- src/sentry/runner/main.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/sentry/runner/main.py b/src/sentry/runner/main.py index b4c7b242af38a6..68351532163c2f 100644 --- a/src/sentry/runner/main.py +++ b/src/sentry/runner/main.py @@ -135,9 +135,7 @@ def main() -> None: except Exception as e: # This reports errors sentry-devservices with sentry_sdk.new_scope() as scope: - scope.set_client( - sentry_sdk.Client(dsn=os.environ["SENTRY_DEVSERVICES_DSN"], debug=True) - ) + scope.set_client(sentry_sdk.Client(dsn=os.environ["SENTRY_DEVSERVICES_DSN"])) if os.environ.get("USER"): sentry_sdk.set_user({"username": os.environ.get("USER")}) sentry_sdk.capture_exception(e) From 811295a685480e8f4b9d4e56fd5010fc10afe331 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 28 May 2025 09:30:32 +0200 Subject: [PATCH 31/66] does this work? --- tests/sentry/sentry_apps/api/bases/test_sentryapps.py | 4 +++- .../endpoints/test_sentry_app_installation_external_issues.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/sentry/sentry_apps/api/bases/test_sentryapps.py b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py index e8479f62ee97c8..8f037b8716f04b 100644 --- a/tests/sentry/sentry_apps/api/bases/test_sentryapps.py +++ b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py @@ -1,3 +1,5 @@ +from unittest import mock + import pytest from django.contrib.auth.models import AnonymousUser from django.test.utils import override_settings @@ -267,5 +269,5 @@ def test_handle_sentry_app_sentry_error(self): assert response.status_code == 500 assert response.data == error.to_public_dict() assert response.data == { - "detail": f"An issue occured during the integration platform process. Sentry error ID: {None}" + "detail": f"An issue occured during the integration platform process. Sentry error ID: {mock.ANY}" } diff --git a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py index 617aa163c6324a..8a7cefb53946b0 100644 --- a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py +++ b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py @@ -1,3 +1,4 @@ +from unittest import mock from unittest.mock import patch from django.urls import reverse @@ -118,6 +119,6 @@ def test_external_issue_creation_fails_with_db_error(self, mock_update_or_create assert response.status_code == 500 assert response.data == { - "detail": f"An issue occured during the integration platform process. Sentry error ID: {None}" + "detail": f"An issue occured during the integration platform process. Sentry error ID: {mock.ANY}" } mock_update_or_create.assert_called_once() From 3196756c8e51095d19d3f13ac4d10fcfa001e3d0 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 28 May 2025 09:33:52 +0200 Subject: [PATCH 32/66] restart ci From 6fe96cdce1cf276f89448a339d448c8bd7d20916 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 28 May 2025 10:02:46 +0200 Subject: [PATCH 33/66] Updated assert --- tests/sentry/sentry_apps/api/bases/test_sentryapps.py | 8 +++----- .../test_sentry_app_installation_external_issues.py | 7 +++---- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/tests/sentry/sentry_apps/api/bases/test_sentryapps.py b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py index 8f037b8716f04b..14241a88081c20 100644 --- a/tests/sentry/sentry_apps/api/bases/test_sentryapps.py +++ b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py @@ -1,5 +1,3 @@ -from unittest import mock - import pytest from django.contrib.auth.models import AnonymousUser from django.test.utils import override_settings @@ -268,6 +266,6 @@ def test_handle_sentry_app_sentry_error(self): assert response.status_code == 500 assert response.data == error.to_public_dict() - assert response.data == { - "detail": f"An issue occured during the integration platform process. Sentry error ID: {mock.ANY}" - } + assert response.data["detail"].startswith( + "An issue occured during the integration platform process. Sentry error ID: " + ) diff --git a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py index 8a7cefb53946b0..5030a256ee34eb 100644 --- a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py +++ b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_installation_external_issues.py @@ -1,4 +1,3 @@ -from unittest import mock from unittest.mock import patch from django.urls import reverse @@ -118,7 +117,7 @@ def test_external_issue_creation_fails_with_db_error(self, mock_update_or_create ) assert response.status_code == 500 - assert response.data == { - "detail": f"An issue occured during the integration platform process. Sentry error ID: {mock.ANY}" - } + assert response.data["detail"].startswith( + "An issue occured during the integration platform process. Sentry error ID:" + ) mock_update_or_create.assert_called_once() From b257ce3df90bf42944fa16ba7c8bfa6726836a1f Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 09:23:30 +0200 Subject: [PATCH 34/66] trigger ci From 3d225147e605a67358e5d41c9a26a492db3b98d9 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 10:01:40 +0200 Subject: [PATCH 35/66] Renamed set_span_data to set_span_attribute to reflect new api --- src/sentry/api/bases/organization.py | 4 ++-- .../api/endpoints/organization_event_details.py | 6 +++--- .../api/endpoints/organization_events_trace.py | 14 +++++++------- .../organization_profiling_functions.py | 8 +++++--- src/sentry/api/endpoints/organization_tags.py | 4 ++-- src/sentry/api/endpoints/organization_traces.py | 6 +++--- src/sentry/api/utils.py | 4 ++-- src/sentry/event_manager.py | 6 +++--- src/sentry/ingest/transaction_clusterer/rules.py | 4 ++-- src/sentry/models/release.py | 4 ++-- src/sentry/profiles/task.py | 16 ++++++++-------- src/sentry/profiles/utils.py | 4 ++-- src/sentry/search/events/fields.py | 8 ++++---- .../consumers/indexer/processing.py | 8 ++++---- src/sentry/snuba/rpc_dataset_common.py | 6 +++--- src/sentry/utils/sdk.py | 2 +- 16 files changed, 53 insertions(+), 51 deletions(-) diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index f5d43b88a117a3..e55b769e0d66bf 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -37,7 +37,7 @@ from sentry.utils import auth from sentry.utils.hashlib import hash_values from sentry.utils.numbers import format_grouped_length -from sentry.utils.sdk import bind_organization_context, set_span_data +from sentry.utils.sdk import bind_organization_context, set_span_attribute class NoProjects(Exception): @@ -569,7 +569,7 @@ def get_filter_params( len_projects = len(projects) sentry_sdk.set_tag("query.num_projects", len_projects) sentry_sdk.set_tag("query.num_projects.grouped", format_grouped_length(len_projects)) - set_span_data("query.num_projects", len_projects) + set_span_attribute("query.num_projects", len_projects) params: FilterParams = { "start": start, diff --git a/src/sentry/api/endpoints/organization_event_details.py b/src/sentry/api/endpoints/organization_event_details.py index 456c64ddf52a3a..89ddbf72c78300 100644 --- a/src/sentry/api/endpoints/organization_event_details.py +++ b/src/sentry/api/endpoints/organization_event_details.py @@ -22,7 +22,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute VALID_AVERAGE_COLUMNS = {"span.self_time", "span.duration"} @@ -39,7 +39,7 @@ def add_comparison_to_event(event, average_columns, request: Request): group_to_span_map[group].append(span) # Nothing to add comparisons to - set_span_data("query.groups", len(group_to_span_map)) + set_span_attribute("query.groups", len(group_to_span_map)) if len(group_to_span_map) == 0: return @@ -77,7 +77,7 @@ def add_comparison_to_event(event, average_columns, request: Request): ), ) ) - set_span_data("query.groups_found", len(result["data"])) + set_span_attribute("query.groups_found", len(result["data"])) for row in result["data"]: group = row["span.group"] for span in group_to_span_map[group]: diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index fb6c5bfdc5bfb7..47d0a41c2fca4d 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -36,7 +36,7 @@ from sentry.snuba.referrer import Referrer from sentry.utils.iterators import chunked from sentry.utils.numbers import base32_encode, format_grouped_length -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import bulk_snuba_queries from sentry.utils.validators import INVALID_ID_DETAILS, is_event_id, is_span_id @@ -761,7 +761,7 @@ def build_span_query(trace_id: str, spans_params: SnubaParams, query_spans: list # Performance improvement, snuba's parser is extremely slow when we're sending thousands of # span_ids here, using a `splitByChar` means that snuba will not parse the giant list of spans span_minimum = options.get("performance.traces.span_query_minimum_spans") - set_span_data("trace_view.spans.span_minimum", span_minimum) + set_span_attribute("trace_view.spans.span_minimum", span_minimum) sentry_sdk.set_tag("trace_view.split_by_char.optimization", len(query_spans) > span_minimum) if len(query_spans) > span_minimum: # TODO: because we're not doing an IN on a list of literals, snuba will not optimize the query with the HexInt @@ -813,14 +813,14 @@ def augment_transactions_with_spans( projects.add(error["project.id"]) ts_params = find_timestamp_params(transactions) time_buffer = options.get("performance.traces.span_query_timebuffer_hours") - set_span_data("trace_view.spans.time_buffer", time_buffer) + set_span_attribute("trace_view.spans.time_buffer", time_buffer) if ts_params["min"]: params.start = ts_params["min"] - timedelta(hours=time_buffer) if ts_params["max"]: params.end = ts_params["max"] + timedelta(hours=time_buffer) if ts_params["max"] and ts_params["min"]: - set_span_data( + set_span_attribute( "trace_view.trace_duration", (ts_params["max"] - ts_params["min"]).total_seconds() ) sentry_sdk.set_tag("trace_view.missing_timestamp_constraints", False) @@ -899,7 +899,7 @@ def augment_transactions_with_spans( total_chunks = 3 else: total_chunks = 4 - set_span_data("trace_view.span_query.total_chunks", total_chunks) + set_span_attribute("trace_view.span_query.total_chunks", total_chunks) chunks = chunked(list_spans, (len(list_spans) // total_chunks) + 1) queries = [build_span_query(trace_id, spans_params, chunk) for chunk in chunks] results = bulk_snuba_queries( @@ -1046,7 +1046,7 @@ def record_analytics( sentry_sdk.set_tag( "trace_view.transactions.grouped", format_grouped_length(len_transactions) ) - set_span_data("trace_view.transactions", len_transactions) + set_span_attribute("trace_view.transactions", len_transactions) projects: set[int] = set() for transaction in transactions: @@ -1055,7 +1055,7 @@ def record_analytics( len_projects = len(projects) sentry_sdk.set_tag("trace_view.projects", len_projects) sentry_sdk.set_tag("trace_view.projects.grouped", format_grouped_length(len_projects)) - set_span_data("trace_view.projects", len_projects) + set_span_attribute("trace_view.projects", len_projects) def get(self, request: Request, organization: Organization, trace_id: str) -> HttpResponse: if not request.user.is_authenticated: diff --git a/src/sentry/api/endpoints/organization_profiling_functions.py b/src/sentry/api/endpoints/organization_profiling_functions.py index e6a0659f991041..768f0710f2e5cc 100644 --- a/src/sentry/api/endpoints/organization_profiling_functions.py +++ b/src/sentry/api/endpoints/organization_profiling_functions.py @@ -24,7 +24,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer from sentry.utils.dates import parse_stats_period, validate_interval -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import bulk_snuba_queries TOP_FUNCTIONS_LIMIT = 50 @@ -202,7 +202,7 @@ def get_trends_data(stats_data) -> list[BreakpointData]: trending_functions = get_trends_data(stats_data) all_trending_functions_count = len(trending_functions) - set_span_data("profiling.top_functions", all_trending_functions_count) + set_span_attribute("profiling.top_functions", all_trending_functions_count) # Profiling functions have a resolution of ~10ms. To increase the confidence # of the results, the caller can specify a min threshold for the trend difference. @@ -215,7 +215,9 @@ def get_trends_data(stats_data) -> list[BreakpointData]: ] filtered_trending_functions_count = all_trending_functions_count - len(trending_functions) - set_span_data("profiling.top_functions.below_threshold", filtered_trending_functions_count) + set_span_attribute( + "profiling.top_functions.below_threshold", filtered_trending_functions_count + ) # Make sure to sort the results so that it's in order of largest change # to smallest change (ASC/DESC depends on the trend type) diff --git a/src/sentry/api/endpoints/organization_tags.py b/src/sentry/api/endpoints/organization_tags.py index 4fd7c20c293840..8b26a5a9e7f4ff 100644 --- a/src/sentry/api/endpoints/organization_tags.py +++ b/src/sentry/api/endpoints/organization_tags.py @@ -15,7 +15,7 @@ from sentry.api.utils import clamp_date_range, handle_query_errors from sentry.snuba.dataset import Dataset from sentry.utils.numbers import format_grouped_length -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute @region_silo_endpoint @@ -83,6 +83,6 @@ def get(self, request: Request, organization) -> Response: format_grouped_length(len(results), [1, 10, 50, 100]), ) sentry_sdk.set_tag("dataset_queried", dataset.value) - set_span_data("custom_tags.count", len(results)) + set_span_attribute("custom_tags.count", len(results)) return Response(serialize(results, request.user)) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index 3acc5e84504fd1..72daab5c96a61a 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -49,7 +49,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer from sentry.utils.numbers import clip -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import bulk_snuba_queries_with_referrers from sentry.utils.snuba_rpc import get_traces_rpc @@ -1417,7 +1417,7 @@ def process_user_queries( where, _ = resolve_conditions(user_query) queries[user_query] = where - set_span_data("user_queries_count", len(queries)) + set_span_attribute("user_queries_count", len(queries)) sentry_sdk.set_context("user_queries", {"raw_queries": user_queries}) return queries @@ -1452,7 +1452,7 @@ def process_rpc_user_queries( if where is not None: queries[user_query] = where - set_span_data("user_queries_count", len(queries)) + set_span_attribute("user_queries_count", len(queries)) sentry_sdk.set_context("user_queries", {"raw_queries": user_queries}) return queries diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index c87ed5d8a0e808..85974be350f0e4 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -44,7 +44,7 @@ from sentry.silo.base import SiloMode from sentry.types.region import get_local_region from sentry.utils.dates import parse_stats_period -from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_span_data +from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_span_attribute from sentry.utils.snuba import ( DatasetSelectionError, QueryConnectionFailed, @@ -454,7 +454,7 @@ def update_snuba_params_with_timestamp( # While possible, the majority of traces shouldn't take more than a week # Starting with 3d for now, but potentially something we can increase if this becomes a problem time_buffer = options.get("performance.traces.transaction_query_timebuffer_days") - set_span_data("trace_view.transactions.time_buffer", time_buffer) + set_span_attribute("trace_view.transactions.time_buffer", time_buffer) example_start = example_timestamp - timedelta(days=time_buffer) example_end = example_timestamp + timedelta(days=time_buffer) # If timestamp is being passed it should always overwrite the statsperiod or start & end diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 57566964eecc60..06885524098fe6 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -137,7 +137,7 @@ from sentry.utils.performance_issues.performance_detection import detect_performance_problems from sentry.utils.performance_issues.performance_problem import PerformanceProblem from sentry.utils.safe import get_path, safe_execute, setdefault_path, trim -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.tag_normalization import normalized_sdk_tag_from_event from .utils.event_tracker import TransactionStageStatus, track_sampled_event @@ -2589,8 +2589,8 @@ def save_transaction_events( ) except KeyError: continue - set_span_data("jobs", len(jobs)) - set_span_data("projects", len(projects)) + set_span_attribute("jobs", len(jobs)) + set_span_attribute("projects", len(projects)) # NOTE: Keep this list synchronized with sentry/spans/consumers/process_segments/message.py diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index 4d08193dcd2824..64da8ea464c6f0 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -9,7 +9,7 @@ from sentry.ingest.transaction_clusterer.rule_validator import RuleValidator from sentry.models.project import Project from sentry.utils import metrics -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from .base import ReplacementRule @@ -146,7 +146,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet: sorted_rules = [rule for rule in sorted_rules if rule[1] >= last_seen_deadline] if self.MERGE_MAX_RULES < len(rules): - set_span_data("discarded_rules", len(rules) - self.MERGE_MAX_RULES) + set_span_attribute("discarded_rules", len(rules) - self.MERGE_MAX_RULES) sentry_sdk.get_isolation_scope().set_context( "clustering_rules_max", { diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py index 342f3a75d3f27f..2b07efc2312d42 100644 --- a/src/sentry/models/release.py +++ b/src/sentry/models/release.py @@ -45,7 +45,7 @@ from sentry.utils.db import atomic_transaction from sentry.utils.hashlib import hash_values, md5_text from sentry.utils.numbers import validate_bigint -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute logger = logging.getLogger(__name__) @@ -645,7 +645,7 @@ def set_commits(self, commit_list): This will clear any existing commit log and replace it with the given commits. """ - set_span_data("release.set_commits", len(commit_list)) + set_span_attribute("release.set_commits", len(commit_list)) from sentry.models.releases.set_commits import set_commits diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py index aff9f4cb86a89a..eb5efded97095b 100644 --- a/src/sentry/profiles/task.py +++ b/src/sentry/profiles/task.py @@ -55,7 +55,7 @@ from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition from sentry.utils.locking import UnableToAcquireLock from sentry.utils.outcomes import Outcome, track_outcome -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute REVERSE_DEVICE_CLASS = {next(iter(tags)): label for label, tags in DEVICE_CLASS.items()} @@ -238,9 +238,9 @@ def process_profile_task( if "version" in profile: version = profile["version"] sentry_sdk.set_tag("format", f"sample_v{version}") - set_span_data("profile.samples", len(profile["profile"]["samples"])) - set_span_data("profile.stacks", len(profile["profile"]["stacks"])) - set_span_data("profile.frames", len(profile["profile"]["frames"])) + set_span_attribute("profile.samples", len(profile["profile"]["samples"])) + set_span_attribute("profile.stacks", len(profile["profile"]["stacks"])) + set_span_attribute("profile.frames", len(profile["profile"]["frames"])) elif "profiler_id" in profile and profile["platform"] == "android": sentry_sdk.set_tag("format", "android_chunk") else: @@ -266,9 +266,9 @@ def process_profile_task( _set_frames_platform(profile) if "version" in profile: - set_span_data("profile.samples.processed", len(profile["profile"]["samples"])) - set_span_data("profile.stacks.processed", len(profile["profile"]["stacks"])) - set_span_data("profile.frames.processed", len(profile["profile"]["frames"])) + set_span_attribute("profile.samples.processed", len(profile["profile"]["samples"])) + set_span_attribute("profile.stacks.processed", len(profile["profile"]["stacks"])) + set_span_attribute("profile.frames.processed", len(profile["profile"]["frames"])) if options.get("profiling.stack_trace_rules.enabled"): try: @@ -386,7 +386,7 @@ def _symbolicate_profile(profile: Profile, project: Project) -> bool: raw_modules, raw_stacktraces, frames_sent = _prepare_frames_from_profile( profile, platform ) - set_span_data( + set_span_attribute( f"profile.frames.sent.{platform}", len(frames_sent), ) diff --git a/src/sentry/profiles/utils.py b/src/sentry/profiles/utils.py index 0287f9453e4592..eda1b486e0407c 100644 --- a/src/sentry/profiles/utils.py +++ b/src/sentry/profiles/utils.py @@ -15,7 +15,7 @@ from sentry.grouping.enhancer import Enhancements, keep_profiling_rules from sentry.net.http import connection_from_url from sentry.utils import json, metrics -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute Profile = MutableMapping[str, Any] CallTrees = Mapping[str, list[Any]] @@ -113,7 +113,7 @@ def get_from_profiling_service( ) with sentry_sdk.start_span(op="json.dumps"): data = json.dumps(json_data).encode("utf-8") - set_span_data("payload.size", len(data)) + set_span_attribute("payload.size", len(data)) if metric: metric_name, metric_tags = metric metrics.distribution(metric_name, len(data), tags=metric_tags) diff --git a/src/sentry/search/events/fields.py b/src/sentry/search/events/fields.py index 2467bc20fddafb..c5de7772581f01 100644 --- a/src/sentry/search/events/fields.py +++ b/src/sentry/search/events/fields.py @@ -47,7 +47,7 @@ from sentry.search.events.types import NormalizedArg, ParamsType from sentry.search.utils import InvalidQuery, parse_duration from sentry.utils.numbers import format_grouped_length -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import ( SESSIONS_SNUBA_MAP, get_json_type, @@ -135,7 +135,7 @@ def project_threshold_config_expression( "project_threshold.count.grouped", format_grouped_length(num_project_thresholds, [10, 100, 250, 500]), ) - set_span_data("project_threshold.count", num_project_thresholds) + set_span_attribute("project_threshold.count", num_project_thresholds) num_transaction_thresholds = transaction_threshold_configs.count() sentry_sdk.set_tag("txn_threshold.count", num_transaction_thresholds) @@ -143,7 +143,7 @@ def project_threshold_config_expression( "txn_threshold.count.grouped", format_grouped_length(num_transaction_thresholds, [10, 100, 250, 500]), ) - set_span_data("txn_threshold.count", num_transaction_thresholds) + set_span_attribute("txn_threshold.count", num_transaction_thresholds) if num_project_thresholds + num_transaction_thresholds == 0: return ["tuple", [f"'{DEFAULT_PROJECT_THRESHOLD_METRIC}'", DEFAULT_PROJECT_THRESHOLD]] @@ -283,7 +283,7 @@ def team_key_transaction_expression(organization_id, team_ids, project_ids): sentry_sdk.set_tag( "team_key_txns.count.grouped", format_grouped_length(count, [10, 100, 250, 500]) ) - set_span_data("team_key_txns.count", count) + set_span_attribute("team_key_txns.count", count) # There are no team key transactions marked, so hard code false into the query. if count == 0: diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py index 2bd435d773942f..4b64b147ae4def 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/processing.py +++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py @@ -24,7 +24,7 @@ from sentry.sentry_metrics.indexer.mock import MockIndexer from sentry.sentry_metrics.indexer.postgres.postgres_v2 import PostgresIndexer from sentry.utils import metrics -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute logger = logging.getLogger(__name__) @@ -124,11 +124,11 @@ def _process_messages_impl( tags_validator=self.__get_tags_validator(), schema_validator=self.__get_schema_validator(), ) - set_span_data("indexer_batch.payloads.len", len(batch.parsed_payloads_by_meta)) + set_span_attribute("indexer_batch.payloads.len", len(batch.parsed_payloads_by_meta)) extracted_strings = batch.extract_strings() - set_span_data("org_strings.len", len(extracted_strings)) + set_span_attribute("org_strings.len", len(extracted_strings)) with metrics.timer("metrics_consumer.bulk_record"), sentry_sdk.start_span(op="bulk_record"): record_result = self._indexer.bulk_record(extracted_strings) @@ -138,6 +138,6 @@ def _process_messages_impl( results = batch.reconstruct_messages(mapping, bulk_record_meta) - set_span_data("new_messages.len", len(results.data)) + set_span_attribute("new_messages.len", len(results.data)) return results diff --git a/src/sentry/snuba/rpc_dataset_common.py b/src/sentry/snuba/rpc_dataset_common.py index c5649c69f5468e..39b3091516b251 100644 --- a/src/sentry/snuba/rpc_dataset_common.py +++ b/src/sentry/snuba/rpc_dataset_common.py @@ -37,7 +37,7 @@ from sentry.search.events.fields import get_function_alias from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaData, SnubaParams from sentry.utils import json, snuba_rpc -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import process_value logger = logging.getLogger("sentry.snuba.spans_rpc") @@ -385,7 +385,7 @@ def process_table_response( assert len(column_value.results) == len(column_value.reliabilities), Exception( "Length of rpc results do not match length of rpc reliabilities" ) - set_span_data(f"SearchResolver.result_size.{attribute}", len(column_value.results)) + set_span_attribute(f"SearchResolver.result_size.{attribute}", len(column_value.results)) while len(final_data) < len(column_value.results): final_data.append({}) @@ -403,7 +403,7 @@ def process_table_response( final_confidence[index][attribute] = CONFIDENCES.get( column_value.reliabilities[index], None ) - set_span_data("SearchResolver.result_size.final_data", len(final_data)) + set_span_attribute("SearchResolver.result_size.final_data", len(final_data)) if debug: final_meta["query"] = json.loads(MessageToJson(table_request.rpc_request)) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 1393160d7b442c..aceed0ee191b8d 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -704,7 +704,7 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str: ) -def set_span_data(data_name, value): +def set_span_attribute(data_name, value): span = sentry_sdk.get_current_span() if span is not None: span.set_attribute(data_name, value) From 6fb01f053df522871db4b7db863d69714538f461 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 10:03:52 +0200 Subject: [PATCH 36/66] Reverted attribute name change. this is safer --- src/sentry/ingest/consumer/processors.py | 4 ++-- src/sentry/utils/sdk.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index ac8cc765396984..023fc7a5e5bbc0 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -54,8 +54,8 @@ def inner(*args, **kwargs): default=getattr(settings, "SENTRY_INGEST_CONSUMER_APM_SAMPLING", 0), ) ) - # New behavior is to add a custom `sentry.sample_rate` that is picked up by `traces_sampler` - span_kwargs.setdefault("attributes", {}).setdefault("sentry.sample_rate", sample_rate) + # New behavior is to add a custom `sample_rate` that is picked up by `traces_sampler` + span_kwargs.setdefault("attributes", {}).setdefault("sample_rate", sample_rate) with sentry_sdk.start_span(**span_kwargs): return f(*args, **kwargs) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index aceed0ee191b8d..ac52985671f8b4 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -182,7 +182,7 @@ def traces_sampler(sampling_context): return SAMPLED_ROUTES[wsgi_path] # make it fail to I know what tests use this. - custom_sample_rate = sampling_context.get("attributes", {}).get("sentry.sample_rate") + custom_sample_rate = sampling_context.get("attributes", {}).get("sample_rate") if custom_sample_rate is not None: raise Exception("some error") From ca31b3ed40e8e9104f72eae95c1577be2bf5c25b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 10:30:16 +0200 Subject: [PATCH 37/66] testing something --- src/sentry/utils/sdk.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index ac52985671f8b4..1d5e2a8ab18857 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -246,6 +246,7 @@ def before_send_transaction(event: Event, _: Hint) -> Event | None: # `measurements` are deprecated and have already been removed from the Python SDK. # We ignore those lines in the mypy check because the SDKs Event type does not have `measurements` anymore. # (on ingest and in the product measurements are still there so it is fine to set them for the time being) + assert 1 == 2, "measurements are deprecated and have already been removed from the Python SDK." if not event["measurements"]: # type: ignore[typeddict-item] event["measurements"] = {} # type: ignore[typeddict-unknown-key] event["measurements"]["num_of_spans"] = { # type: ignore[typeddict-item] From b07b5063f966ee3d586652885561e0825edf420d Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 10:33:18 +0200 Subject: [PATCH 38/66] testing something --- src/sentry/utils/sdk.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 1d5e2a8ab18857..c751e239b0f773 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -177,6 +177,7 @@ def get_project_key(): def traces_sampler(sampling_context): + assert 1 == 2, "you found a test that uses the traces_sampler" wsgi_path = sampling_context.get("wsgi_environ", {}).get("PATH_INFO") if wsgi_path and wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] From 7645de617fc79bb51867645146f84d349e7bacde Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 13:31:29 +0200 Subject: [PATCH 39/66] test --- src/sentry/utils/sdk.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index c751e239b0f773..683bd7943dca84 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -182,11 +182,6 @@ def traces_sampler(sampling_context): if wsgi_path and wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] - # make it fail to I know what tests use this. - custom_sample_rate = sampling_context.get("attributes", {}).get("sample_rate") - if custom_sample_rate is not None: - raise Exception("some error") - # Apply sample_rate from custom_sampling_context custom_sample_rate = sampling_context.get("sample_rate") if custom_sample_rate is not None: From 19a8e15fd7a0c83f8386053ebeae609daa7b4267 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 2 Jun 2025 13:38:34 +0200 Subject: [PATCH 40/66] trigger ci From 6140c917156f5ffd1bcdebf5e61689d6dbd3f057 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 3 Jun 2025 12:03:49 +0200 Subject: [PATCH 41/66] cleanup --- src/sentry/utils/sdk.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 683bd7943dca84..d12cfd418b8461 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -177,7 +177,6 @@ def get_project_key(): def traces_sampler(sampling_context): - assert 1 == 2, "you found a test that uses the traces_sampler" wsgi_path = sampling_context.get("wsgi_environ", {}).get("PATH_INFO") if wsgi_path and wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] @@ -242,7 +241,6 @@ def before_send_transaction(event: Event, _: Hint) -> Event | None: # `measurements` are deprecated and have already been removed from the Python SDK. # We ignore those lines in the mypy check because the SDKs Event type does not have `measurements` anymore. # (on ingest and in the product measurements are still there so it is fine to set them for the time being) - assert 1 == 2, "measurements are deprecated and have already been removed from the Python SDK." if not event["measurements"]: # type: ignore[typeddict-item] event["measurements"] = {} # type: ignore[typeddict-unknown-key] event["measurements"]["num_of_spans"] = { # type: ignore[typeddict-item] From d74d1daece9b089f8ad0d0cd469a76686608c305 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Wed, 4 Jun 2025 18:23:35 +0200 Subject: [PATCH 42/66] test: Remove redundant assertions These assertions are redundant. We already assert that `response.data` has the correct contents in all three tests. If we want to check `error.to_public_dict()`'s value, we should do so in a separate unit test. Hopefully unblocks #92011 --- tests/sentry/sentry_apps/api/bases/test_sentryapps.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/sentry/sentry_apps/api/bases/test_sentryapps.py b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py index e8479f62ee97c8..a64d18f4e41568 100644 --- a/tests/sentry/sentry_apps/api/bases/test_sentryapps.py +++ b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py @@ -238,7 +238,6 @@ def test_handle_sentry_app_error(self): response = self.endpoint._handle_sentry_app_exception(error) assert response.status_code == 400 - assert response.data == error.to_public_dict() assert response.exception is True assert response.data == {"detail": error.message} @@ -253,7 +252,6 @@ def test_handle_sentry_app_integrator_error(self): response = self.endpoint._handle_sentry_app_exception(error) assert response.status_code == 400 - assert response.data == error.to_public_dict() assert response.exception is True assert response.data == {"detail": error.message, "context": public_context} @@ -265,7 +263,6 @@ def test_handle_sentry_app_sentry_error(self): response = self.endpoint._handle_sentry_app_exception(error) assert response.status_code == 500 - assert response.data == error.to_public_dict() assert response.data == { "detail": f"An issue occured during the integration platform process. Sentry error ID: {None}" } From a6aaf6389717629c57f9683a07a457bb07efbb96 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 5 Jun 2025 08:51:59 +0200 Subject: [PATCH 43/66] Those set_attribute calls are fine. See https://github.com/getsentry/sentry/pull/92720 --- src/sentry/api/endpoints/group_current_release.py | 1 - src/sentry/api/endpoints/organization_events_stats.py | 1 - src/sentry/api/endpoints/organization_events_timeseries.py | 1 - .../organization_on_demand_metrics_estimation_stats.py | 1 - src/sentry/issues/endpoints/organization_issues_count.py | 1 - src/sentry/issues/grouptype.py | 1 - src/sentry/relay/config/metric_extraction.py | 1 - src/sentry/snuba/query_subscriptions/consumer.py | 1 - src/sentry/tagstore/snuba/backend.py | 2 -- src/sentry/taskworker/workerchild.py | 1 - 10 files changed, 11 deletions(-) diff --git a/src/sentry/api/endpoints/group_current_release.py b/src/sentry/api/endpoints/group_current_release.py index b48886fcd36d2b..ad2d89b3d3e546 100644 --- a/src/sentry/api/endpoints/group_current_release.py +++ b/src/sentry/api/endpoints/group_current_release.py @@ -66,7 +66,6 @@ def get(self, request: Request, group) -> Response: with sentry_sdk.start_span(op="CurrentReleaseEndpoint.get.current_release") as span: span.set_attribute("Environment Count", len(environments)) - # TODO-anton: split dict into multiple attributes span.set_attribute( "Raw Parameters", { diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 165f6f5788a27a..6dcf03ff47eab4 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -190,7 +190,6 @@ def get(self, request: Request, organization: Organization) -> Response: query_source = self.get_request_source(request) with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: - # TODO-anton: split dict into multiple attributes span.set_attribute("organization", organization) top_events = 0 diff --git a/src/sentry/api/endpoints/organization_events_timeseries.py b/src/sentry/api/endpoints/organization_events_timeseries.py index 316304d2cfcde5..eb65693b912843 100644 --- a/src/sentry/api/endpoints/organization_events_timeseries.py +++ b/src/sentry/api/endpoints/organization_events_timeseries.py @@ -152,7 +152,6 @@ def get_comparison_delta(self, request: Request) -> timedelta | None: def get(self, request: Request, organization: Organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: - # TODO-anton: split dict into multiple attributes span.set_attribute("organization", organization) top_events = self.get_top_events(request) diff --git a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py index b16751fc006fc1..6250942232a019 100644 --- a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py +++ b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py @@ -67,7 +67,6 @@ def get(self, request: Request, organization: Organization) -> Response: return Response({"detail": "missing required parameter yAxis"}, status=400) with sentry_sdk.start_span(op="discover.metrics.endpoint", name="get_full_metrics") as span: - # TODO-anton: split dict into multiple attributes span.set_attribute("organization", organization) try: diff --git a/src/sentry/issues/endpoints/organization_issues_count.py b/src/sentry/issues/endpoints/organization_issues_count.py index 3f833ab4de18a0..40126fbef70a69 100644 --- a/src/sentry/issues/endpoints/organization_issues_count.py +++ b/src/sentry/issues/endpoints/organization_issues_count.py @@ -63,7 +63,6 @@ def _count( query_kwargs["actor"] = request.user with start_span(op="start_search") as span: - # TODO-anton: split dict into multiple attributes span.set_attribute("query_kwargs", query_kwargs) result = search.backend.query(**query_kwargs) return result.hits diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index 08fce090848c21..8feb8b7283d7a3 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -126,7 +126,6 @@ def get_visible( span.set_tag("has_batch_features", batch_features is not None) span.set_tag("released", released) span.set_tag("enabled", enabled) - # TODO-anton: split dict into multiple attributes span.set_attribute("feature_to_grouptype", feature_to_grouptype) return released + enabled diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index 4158c933b2720e..9a094fe7005d10 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -793,7 +793,6 @@ def _convert_aggregate_and_query_to_metrics( } with sentry_sdk.start_span(op="converting_aggregate_and_query") as span: - # TODO-anton: split dict into multiple attributes span.set_attribute("widget_query_args", {"query": query, "aggregate": aggregate}) # Create as many specs as we support for spec_version in OnDemandMetricSpecVersioning.get_spec_versions(): diff --git a/src/sentry/snuba/query_subscriptions/consumer.py b/src/sentry/snuba/query_subscriptions/consumer.py index 4fa7525881a077..6302c659981347 100644 --- a/src/sentry/snuba/query_subscriptions/consumer.py +++ b/src/sentry/snuba/query_subscriptions/consumer.py @@ -163,7 +163,6 @@ def handle_message( tags={"dataset": dataset}, ), ): - # TODO-anton: split dict into multiple attributes span.set_attribute("payload", contents) span.set_attribute("subscription_dataset", subscription.snuba_query.dataset) span.set_attribute("subscription_query", subscription.snuba_query.query) diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index be9def0ece12ee..6f38059b8e5231 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -335,7 +335,6 @@ def __get_tag_keys_for_projects( ) as span: result = cache.get(cache_key, None) - # TODO-anton: are array attributes really supported? span.set_attribute("cache.key", [cache_key]) if result is not None: @@ -365,7 +364,6 @@ def __get_tag_keys_for_projects( op="cache.put", name="sentry.tagstore.cache.__get_tag_keys_for_projects" ) as span: cache.set(cache_key, result, 300) - # TODO-anton: are array attributes really supported? span.set_attribute("cache.key", [cache_key]) span.set_attribute("cache.item_size", len(str(result))) metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index d21f5770e69119..af120154b28cd9 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -301,7 +301,6 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - origin="taskworker", ) as root_span, ): - # TODO-anton: split dict into multiple attributes root_span.set_attribute( "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} ) From e484188fc6898228fd9aae3f50458ee1edf06377 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Thu, 5 Jun 2025 16:43:54 +0200 Subject: [PATCH 44/66] ref: Use `set_client` to change DSN `with sentry_sdk.init` is deprecated and will be removed in Sentry SDK 3.0. Split off from #92011 --- src/sentry/runner/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sentry/runner/main.py b/src/sentry/runner/main.py index 9fdf90600d08ca..5c44f79b179714 100644 --- a/src/sentry/runner/main.py +++ b/src/sentry/runner/main.py @@ -135,7 +135,8 @@ def main() -> None: func(**kwargs) except Exception as e: # This reports errors sentry-devservices - with sentry_sdk.init(dsn=os.environ["SENTRY_DEVSERVICES_DSN"]): + with sentry_sdk.new_scope() as scope: + scope.set_client(sentry_sdk.Client(dsn=os.environ["SENTRY_DEVSERVICES_DSN"])) if os.environ.get("USER"): sentry_sdk.set_user({"username": os.environ.get("USER")}) sentry_sdk.capture_exception(e) From 9188a3d3df6ffc5628950b978de03c6aec37f492 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Thu, 5 Jun 2025 16:56:03 +0200 Subject: [PATCH 45/66] ref: Rename `set_span_data` to `set_span_attribute` `sentry_sdk.Span.set_data` will be replaced with `sentry_sdk.Span.set_attribute` in Sentry SDK 3.0. This change renames the `set_span_data` method to `set_span_attribute` to match the new method name in the SDK. Split off from #92011 --- src/sentry/api/bases/organization.py | 4 ++-- .../api/endpoints/organization_event_details.py | 6 +++--- .../api/endpoints/organization_events_trace.py | 14 +++++++------- .../organization_profiling_functions.py | 8 +++++--- src/sentry/api/endpoints/organization_tags.py | 4 ++-- src/sentry/api/endpoints/organization_traces.py | 6 +++--- src/sentry/api/utils.py | 4 ++-- src/sentry/event_manager.py | 6 +++--- src/sentry/ingest/transaction_clusterer/rules.py | 4 ++-- src/sentry/models/release.py | 4 ++-- src/sentry/profiles/task.py | 16 ++++++++-------- src/sentry/profiles/utils.py | 4 ++-- src/sentry/search/events/fields.py | 8 ++++---- .../consumers/indexer/processing.py | 8 ++++---- src/sentry/snuba/rpc_dataset_common.py | 6 +++--- src/sentry/utils/sdk.py | 2 +- 16 files changed, 53 insertions(+), 51 deletions(-) diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index 5fb069cde2dbdf..130c6e1ec1c05d 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -37,7 +37,7 @@ from sentry.utils import auth from sentry.utils.hashlib import hash_values from sentry.utils.numbers import format_grouped_length -from sentry.utils.sdk import bind_organization_context, set_span_data +from sentry.utils.sdk import bind_organization_context, set_span_attribute class NoProjects(Exception): @@ -569,7 +569,7 @@ def get_filter_params( len_projects = len(projects) sentry_sdk.set_tag("query.num_projects", len_projects) sentry_sdk.set_tag("query.num_projects.grouped", format_grouped_length(len_projects)) - set_span_data("query.num_projects", len_projects) + set_span_attribute("query.num_projects", len_projects) params: FilterParams = { "start": start, diff --git a/src/sentry/api/endpoints/organization_event_details.py b/src/sentry/api/endpoints/organization_event_details.py index 456c64ddf52a3a..89ddbf72c78300 100644 --- a/src/sentry/api/endpoints/organization_event_details.py +++ b/src/sentry/api/endpoints/organization_event_details.py @@ -22,7 +22,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.query_sources import QuerySource from sentry.snuba.referrer import Referrer -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute VALID_AVERAGE_COLUMNS = {"span.self_time", "span.duration"} @@ -39,7 +39,7 @@ def add_comparison_to_event(event, average_columns, request: Request): group_to_span_map[group].append(span) # Nothing to add comparisons to - set_span_data("query.groups", len(group_to_span_map)) + set_span_attribute("query.groups", len(group_to_span_map)) if len(group_to_span_map) == 0: return @@ -77,7 +77,7 @@ def add_comparison_to_event(event, average_columns, request: Request): ), ) ) - set_span_data("query.groups_found", len(result["data"])) + set_span_attribute("query.groups_found", len(result["data"])) for row in result["data"]: group = row["span.group"] for span in group_to_span_map[group]: diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py index fb6c5bfdc5bfb7..47d0a41c2fca4d 100644 --- a/src/sentry/api/endpoints/organization_events_trace.py +++ b/src/sentry/api/endpoints/organization_events_trace.py @@ -36,7 +36,7 @@ from sentry.snuba.referrer import Referrer from sentry.utils.iterators import chunked from sentry.utils.numbers import base32_encode, format_grouped_length -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import bulk_snuba_queries from sentry.utils.validators import INVALID_ID_DETAILS, is_event_id, is_span_id @@ -761,7 +761,7 @@ def build_span_query(trace_id: str, spans_params: SnubaParams, query_spans: list # Performance improvement, snuba's parser is extremely slow when we're sending thousands of # span_ids here, using a `splitByChar` means that snuba will not parse the giant list of spans span_minimum = options.get("performance.traces.span_query_minimum_spans") - set_span_data("trace_view.spans.span_minimum", span_minimum) + set_span_attribute("trace_view.spans.span_minimum", span_minimum) sentry_sdk.set_tag("trace_view.split_by_char.optimization", len(query_spans) > span_minimum) if len(query_spans) > span_minimum: # TODO: because we're not doing an IN on a list of literals, snuba will not optimize the query with the HexInt @@ -813,14 +813,14 @@ def augment_transactions_with_spans( projects.add(error["project.id"]) ts_params = find_timestamp_params(transactions) time_buffer = options.get("performance.traces.span_query_timebuffer_hours") - set_span_data("trace_view.spans.time_buffer", time_buffer) + set_span_attribute("trace_view.spans.time_buffer", time_buffer) if ts_params["min"]: params.start = ts_params["min"] - timedelta(hours=time_buffer) if ts_params["max"]: params.end = ts_params["max"] + timedelta(hours=time_buffer) if ts_params["max"] and ts_params["min"]: - set_span_data( + set_span_attribute( "trace_view.trace_duration", (ts_params["max"] - ts_params["min"]).total_seconds() ) sentry_sdk.set_tag("trace_view.missing_timestamp_constraints", False) @@ -899,7 +899,7 @@ def augment_transactions_with_spans( total_chunks = 3 else: total_chunks = 4 - set_span_data("trace_view.span_query.total_chunks", total_chunks) + set_span_attribute("trace_view.span_query.total_chunks", total_chunks) chunks = chunked(list_spans, (len(list_spans) // total_chunks) + 1) queries = [build_span_query(trace_id, spans_params, chunk) for chunk in chunks] results = bulk_snuba_queries( @@ -1046,7 +1046,7 @@ def record_analytics( sentry_sdk.set_tag( "trace_view.transactions.grouped", format_grouped_length(len_transactions) ) - set_span_data("trace_view.transactions", len_transactions) + set_span_attribute("trace_view.transactions", len_transactions) projects: set[int] = set() for transaction in transactions: @@ -1055,7 +1055,7 @@ def record_analytics( len_projects = len(projects) sentry_sdk.set_tag("trace_view.projects", len_projects) sentry_sdk.set_tag("trace_view.projects.grouped", format_grouped_length(len_projects)) - set_span_data("trace_view.projects", len_projects) + set_span_attribute("trace_view.projects", len_projects) def get(self, request: Request, organization: Organization, trace_id: str) -> HttpResponse: if not request.user.is_authenticated: diff --git a/src/sentry/api/endpoints/organization_profiling_functions.py b/src/sentry/api/endpoints/organization_profiling_functions.py index e6a0659f991041..768f0710f2e5cc 100644 --- a/src/sentry/api/endpoints/organization_profiling_functions.py +++ b/src/sentry/api/endpoints/organization_profiling_functions.py @@ -24,7 +24,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer from sentry.utils.dates import parse_stats_period, validate_interval -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import bulk_snuba_queries TOP_FUNCTIONS_LIMIT = 50 @@ -202,7 +202,7 @@ def get_trends_data(stats_data) -> list[BreakpointData]: trending_functions = get_trends_data(stats_data) all_trending_functions_count = len(trending_functions) - set_span_data("profiling.top_functions", all_trending_functions_count) + set_span_attribute("profiling.top_functions", all_trending_functions_count) # Profiling functions have a resolution of ~10ms. To increase the confidence # of the results, the caller can specify a min threshold for the trend difference. @@ -215,7 +215,9 @@ def get_trends_data(stats_data) -> list[BreakpointData]: ] filtered_trending_functions_count = all_trending_functions_count - len(trending_functions) - set_span_data("profiling.top_functions.below_threshold", filtered_trending_functions_count) + set_span_attribute( + "profiling.top_functions.below_threshold", filtered_trending_functions_count + ) # Make sure to sort the results so that it's in order of largest change # to smallest change (ASC/DESC depends on the trend type) diff --git a/src/sentry/api/endpoints/organization_tags.py b/src/sentry/api/endpoints/organization_tags.py index 4fd7c20c293840..8b26a5a9e7f4ff 100644 --- a/src/sentry/api/endpoints/organization_tags.py +++ b/src/sentry/api/endpoints/organization_tags.py @@ -15,7 +15,7 @@ from sentry.api.utils import clamp_date_range, handle_query_errors from sentry.snuba.dataset import Dataset from sentry.utils.numbers import format_grouped_length -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute @region_silo_endpoint @@ -83,6 +83,6 @@ def get(self, request: Request, organization) -> Response: format_grouped_length(len(results), [1, 10, 50, 100]), ) sentry_sdk.set_tag("dataset_queried", dataset.value) - set_span_data("custom_tags.count", len(results)) + set_span_attribute("custom_tags.count", len(results)) return Response(serialize(results, request.user)) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index 3acc5e84504fd1..72daab5c96a61a 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -49,7 +49,7 @@ from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer from sentry.utils.numbers import clip -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import bulk_snuba_queries_with_referrers from sentry.utils.snuba_rpc import get_traces_rpc @@ -1417,7 +1417,7 @@ def process_user_queries( where, _ = resolve_conditions(user_query) queries[user_query] = where - set_span_data("user_queries_count", len(queries)) + set_span_attribute("user_queries_count", len(queries)) sentry_sdk.set_context("user_queries", {"raw_queries": user_queries}) return queries @@ -1452,7 +1452,7 @@ def process_rpc_user_queries( if where is not None: queries[user_query] = where - set_span_data("user_queries_count", len(queries)) + set_span_attribute("user_queries_count", len(queries)) sentry_sdk.set_context("user_queries", {"raw_queries": user_queries}) return queries diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index c87ed5d8a0e808..85974be350f0e4 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -44,7 +44,7 @@ from sentry.silo.base import SiloMode from sentry.types.region import get_local_region from sentry.utils.dates import parse_stats_period -from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_span_data +from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_span_attribute from sentry.utils.snuba import ( DatasetSelectionError, QueryConnectionFailed, @@ -454,7 +454,7 @@ def update_snuba_params_with_timestamp( # While possible, the majority of traces shouldn't take more than a week # Starting with 3d for now, but potentially something we can increase if this becomes a problem time_buffer = options.get("performance.traces.transaction_query_timebuffer_days") - set_span_data("trace_view.transactions.time_buffer", time_buffer) + set_span_attribute("trace_view.transactions.time_buffer", time_buffer) example_start = example_timestamp - timedelta(days=time_buffer) example_end = example_timestamp + timedelta(days=time_buffer) # If timestamp is being passed it should always overwrite the statsperiod or start & end diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index bdb9444b9d8f74..3971752c028b04 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -136,7 +136,7 @@ from sentry.utils.performance_issues.performance_problem import PerformanceProblem from sentry.utils.projectflags import set_project_flag_and_signal from sentry.utils.safe import get_path, safe_execute, setdefault_path, trim -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.tag_normalization import normalized_sdk_tag_from_event from .utils.event_tracker import TransactionStageStatus, track_sampled_event @@ -2605,8 +2605,8 @@ def save_transaction_events( ) except KeyError: continue - set_span_data("jobs", len(jobs)) - set_span_data("projects", len(projects)) + set_span_attribute("jobs", len(jobs)) + set_span_attribute("projects", len(projects)) # NOTE: Keep this list synchronized with sentry/spans/consumers/process_segments/message.py diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index 901694971dd7da..3979a5e410742f 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -9,7 +9,7 @@ from sentry.ingest.transaction_clusterer.rule_validator import RuleValidator from sentry.models.project import Project from sentry.utils import metrics -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from .base import ReplacementRule @@ -146,7 +146,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet: sorted_rules = [rule for rule in sorted_rules if rule[1] >= last_seen_deadline] if self.MERGE_MAX_RULES < len(rules): - set_span_data("discarded_rules", len(rules) - self.MERGE_MAX_RULES) + set_span_attribute("discarded_rules", len(rules) - self.MERGE_MAX_RULES) sentry_sdk.Scope.get_isolation_scope().set_context( "clustering_rules_max", { diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py index 0b2dcaf4a513bc..8ee88b2d169041 100644 --- a/src/sentry/models/release.py +++ b/src/sentry/models/release.py @@ -45,7 +45,7 @@ from sentry.utils.db import atomic_transaction from sentry.utils.hashlib import hash_values, md5_text from sentry.utils.numbers import validate_bigint -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute logger = logging.getLogger(__name__) @@ -645,7 +645,7 @@ def set_commits(self, commit_list): This will clear any existing commit log and replace it with the given commits. """ - set_span_data("release.set_commits", len(commit_list)) + set_span_attribute("release.set_commits", len(commit_list)) from sentry.models.releases.set_commits import set_commits diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py index 4afa825d58bc1f..c6b627460861cf 100644 --- a/src/sentry/profiles/task.py +++ b/src/sentry/profiles/task.py @@ -56,7 +56,7 @@ from sentry.utils.locking import UnableToAcquireLock from sentry.utils.outcomes import Outcome, track_outcome from sentry.utils.projectflags import set_project_flag_and_signal -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute REVERSE_DEVICE_CLASS = {next(iter(tags)): label for label, tags in DEVICE_CLASS.items()} @@ -239,9 +239,9 @@ def process_profile_task( if "version" in profile: version = profile["version"] sentry_sdk.set_tag("format", f"sample_v{version}") - set_span_data("profile.samples", len(profile["profile"]["samples"])) - set_span_data("profile.stacks", len(profile["profile"]["stacks"])) - set_span_data("profile.frames", len(profile["profile"]["frames"])) + set_span_attribute("profile.samples", len(profile["profile"]["samples"])) + set_span_attribute("profile.stacks", len(profile["profile"]["stacks"])) + set_span_attribute("profile.frames", len(profile["profile"]["frames"])) elif "profiler_id" in profile and profile["platform"] == "android": sentry_sdk.set_tag("format", "android_chunk") else: @@ -267,9 +267,9 @@ def process_profile_task( _set_frames_platform(profile) if "version" in profile: - set_span_data("profile.samples.processed", len(profile["profile"]["samples"])) - set_span_data("profile.stacks.processed", len(profile["profile"]["stacks"])) - set_span_data("profile.frames.processed", len(profile["profile"]["frames"])) + set_span_attribute("profile.samples.processed", len(profile["profile"]["samples"])) + set_span_attribute("profile.stacks.processed", len(profile["profile"]["stacks"])) + set_span_attribute("profile.frames.processed", len(profile["profile"]["frames"])) if options.get("profiling.stack_trace_rules.enabled"): try: @@ -386,7 +386,7 @@ def _symbolicate_profile(profile: Profile, project: Project) -> bool: raw_modules, raw_stacktraces, frames_sent = _prepare_frames_from_profile( profile, platform ) - set_span_data( + set_span_attribute( f"profile.frames.sent.{platform}", len(frames_sent), ) diff --git a/src/sentry/profiles/utils.py b/src/sentry/profiles/utils.py index 0287f9453e4592..eda1b486e0407c 100644 --- a/src/sentry/profiles/utils.py +++ b/src/sentry/profiles/utils.py @@ -15,7 +15,7 @@ from sentry.grouping.enhancer import Enhancements, keep_profiling_rules from sentry.net.http import connection_from_url from sentry.utils import json, metrics -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute Profile = MutableMapping[str, Any] CallTrees = Mapping[str, list[Any]] @@ -113,7 +113,7 @@ def get_from_profiling_service( ) with sentry_sdk.start_span(op="json.dumps"): data = json.dumps(json_data).encode("utf-8") - set_span_data("payload.size", len(data)) + set_span_attribute("payload.size", len(data)) if metric: metric_name, metric_tags = metric metrics.distribution(metric_name, len(data), tags=metric_tags) diff --git a/src/sentry/search/events/fields.py b/src/sentry/search/events/fields.py index 2467bc20fddafb..c5de7772581f01 100644 --- a/src/sentry/search/events/fields.py +++ b/src/sentry/search/events/fields.py @@ -47,7 +47,7 @@ from sentry.search.events.types import NormalizedArg, ParamsType from sentry.search.utils import InvalidQuery, parse_duration from sentry.utils.numbers import format_grouped_length -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import ( SESSIONS_SNUBA_MAP, get_json_type, @@ -135,7 +135,7 @@ def project_threshold_config_expression( "project_threshold.count.grouped", format_grouped_length(num_project_thresholds, [10, 100, 250, 500]), ) - set_span_data("project_threshold.count", num_project_thresholds) + set_span_attribute("project_threshold.count", num_project_thresholds) num_transaction_thresholds = transaction_threshold_configs.count() sentry_sdk.set_tag("txn_threshold.count", num_transaction_thresholds) @@ -143,7 +143,7 @@ def project_threshold_config_expression( "txn_threshold.count.grouped", format_grouped_length(num_transaction_thresholds, [10, 100, 250, 500]), ) - set_span_data("txn_threshold.count", num_transaction_thresholds) + set_span_attribute("txn_threshold.count", num_transaction_thresholds) if num_project_thresholds + num_transaction_thresholds == 0: return ["tuple", [f"'{DEFAULT_PROJECT_THRESHOLD_METRIC}'", DEFAULT_PROJECT_THRESHOLD]] @@ -283,7 +283,7 @@ def team_key_transaction_expression(organization_id, team_ids, project_ids): sentry_sdk.set_tag( "team_key_txns.count.grouped", format_grouped_length(count, [10, 100, 250, 500]) ) - set_span_data("team_key_txns.count", count) + set_span_attribute("team_key_txns.count", count) # There are no team key transactions marked, so hard code false into the query. if count == 0: diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py index 43b04ad8a5cb25..3971be7edd5893 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/processing.py +++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py @@ -24,7 +24,7 @@ from sentry.sentry_metrics.indexer.mock import MockIndexer from sentry.sentry_metrics.indexer.postgres.postgres_v2 import PostgresIndexer from sentry.utils import metrics -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute logger = logging.getLogger(__name__) @@ -124,11 +124,11 @@ def _process_messages_impl( tags_validator=self.__get_tags_validator(), schema_validator=self.__get_schema_validator(), ) - set_span_data("indexer_batch.payloads.len", len(batch.parsed_payloads_by_meta)) + set_span_attribute("indexer_batch.payloads.len", len(batch.parsed_payloads_by_meta)) extracted_strings = batch.extract_strings() - set_span_data("org_strings.len", len(extracted_strings)) + set_span_attribute("org_strings.len", len(extracted_strings)) with metrics.timer("metrics_consumer.bulk_record"), sentry_sdk.start_span(op="bulk_record"): record_result = self._indexer.bulk_record(extracted_strings) @@ -138,6 +138,6 @@ def _process_messages_impl( results = batch.reconstruct_messages(mapping, bulk_record_meta) - set_span_data("new_messages.len", len(results.data)) + set_span_attribute("new_messages.len", len(results.data)) return results diff --git a/src/sentry/snuba/rpc_dataset_common.py b/src/sentry/snuba/rpc_dataset_common.py index 59375ebc26253c..f359251498a12e 100644 --- a/src/sentry/snuba/rpc_dataset_common.py +++ b/src/sentry/snuba/rpc_dataset_common.py @@ -44,7 +44,7 @@ from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaData, SnubaParams from sentry.snuba.discover import OTHER_KEY, create_groupby_dict, create_result_key from sentry.utils import json, snuba_rpc -from sentry.utils.sdk import set_span_data +from sentry.utils.sdk import set_span_attribute from sentry.utils.snuba import SnubaTSResult, process_value logger = logging.getLogger("sentry.snuba.spans_rpc") @@ -412,7 +412,7 @@ def process_table_response( assert len(column_value.results) == len(column_value.reliabilities), Exception( "Length of rpc results do not match length of rpc reliabilities" ) - set_span_data(f"SearchResolver.result_size.{attribute}", len(column_value.results)) + set_span_attribute(f"SearchResolver.result_size.{attribute}", len(column_value.results)) while len(final_data) < len(column_value.results): final_data.append({}) @@ -430,7 +430,7 @@ def process_table_response( final_confidence[index][attribute] = CONFIDENCES.get( column_value.reliabilities[index], None ) - set_span_data("SearchResolver.result_size.final_data", len(final_data)) + set_span_attribute("SearchResolver.result_size.final_data", len(final_data)) if debug: final_meta["query"] = json.loads(MessageToJson(table_request.rpc_request)) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index f2768ad1f8d720..622217047fea3e 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -704,7 +704,7 @@ def set_measurement(measurement_name, value, unit=None): pass -def set_span_data(data_name, value): +def set_span_attribute(data_name, value): span = sentry_sdk.get_current_span() if span is not None: span.set_data(data_name, value) From 0870caa161390a7add7aff9865f75e80fd7698ce Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Fri, 6 Jun 2025 15:14:37 +0200 Subject: [PATCH 46/66] ref: Remove `propagate_hub` to `ThreadingIntegration` This parameter is deprecated as of Sentry SDK 2.0.0; the default value is now `True`, so passing this parameter is no longer needed. --- src/sentry/utils/sdk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 503118b8b1eedf..72fad9fb457761 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -490,7 +490,7 @@ def flush( LoggingIntegration(event_level=None, sentry_logs_level=logging.INFO), RustInfoIntegration(), RedisIntegration(), - ThreadingIntegration(propagate_hub=True), + ThreadingIntegration(), ], **sdk_options, ) From 102cb523f9d7473ad7bd465b380d591f23a90c85 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Fri, 6 Jun 2025 15:20:46 +0200 Subject: [PATCH 47/66] ref(sdk): Use top-level `get_current_scope` Using `sentry_sdk.Scope.get_current_scope` has been deprecated in favor of `sentry_sdk.get_current_scope`. Going forward, we generally would like SDK users to stick to the top-level APIs. Split off from #92011. --- src/sentry/cache/base.py | 2 +- .../integrations/web/organization_integration_setup.py | 2 +- src/sentry/scim/endpoints/utils.py | 2 +- src/sentry/utils/concurrent.py | 2 +- src/sentry/utils/sdk.py | 2 +- src/sentry/utils/snuba.py | 6 +++--- src/sentry/utils/snuba_rpc.py | 6 ++---- tests/sentry/utils/test_sdk.py | 4 ++-- 8 files changed, 12 insertions(+), 14 deletions(-) diff --git a/src/sentry/cache/base.py b/src/sentry/cache/base.py index bbbf28ca961316..80855281e93949 100644 --- a/src/sentry/cache/base.py +++ b/src/sentry/cache/base.py @@ -46,7 +46,7 @@ def _mark_transaction(self, op): if not self.is_default_cache: return - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() # Do not set this tag if we're in the global scope (which roughly # equates to having a transaction). if scope.transaction: diff --git a/src/sentry/integrations/web/organization_integration_setup.py b/src/sentry/integrations/web/organization_integration_setup.py index fc207e76199eee..2b583a22ae2b2e 100644 --- a/src/sentry/integrations/web/organization_integration_setup.py +++ b/src/sentry/integrations/web/organization_integration_setup.py @@ -21,7 +21,7 @@ class OrganizationIntegrationSetupView(ControlSiloOrganizationView): csrf_protect = False def handle(self, request: HttpRequest, organization, provider_id) -> HttpResponseBase: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_transaction_name(f"integration.{provider_id}", source=TransactionSource.VIEW) pipeline = IntegrationPipeline( diff --git a/src/sentry/scim/endpoints/utils.py b/src/sentry/scim/endpoints/utils.py index b40562e373f7ec..c73c549882d0d7 100644 --- a/src/sentry/scim/endpoints/utils.py +++ b/src/sentry/scim/endpoints/utils.py @@ -25,7 +25,7 @@ class SCIMApiError(APIException): def __init__(self, detail, status_code=400): - transaction = sentry_sdk.Scope.get_current_scope().transaction + transaction = sentry_sdk.get_current_scope().transaction if transaction is not None: transaction.set_tag("http.status_code", status_code) super().__init__({"schemas": [SCIM_API_ERROR], "detail": detail}) diff --git a/src/sentry/utils/concurrent.py b/src/sentry/utils/concurrent.py index 82e1b706d2fa47..5d09bc92431d97 100644 --- a/src/sentry/utils/concurrent.py +++ b/src/sentry/utils/concurrent.py @@ -248,7 +248,7 @@ def submit[ priority, ( sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_current_scope(), callable, future, ), diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 72fad9fb457761..bf9d8057604ffa 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -584,7 +584,7 @@ def check_current_scope_transaction( Note: Ignores scope `transaction` values with `source = "custom"`, indicating a value which has been set maunually. """ - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() transaction_from_request = get_transaction_name_from_request(request) if ( diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index d112d1e3902668..1a82826a7816ba 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1091,7 +1091,7 @@ def _apply_cache_and_build_results( use_cache: bool | None = False, ) -> ResultSet: parent_api: str = "" - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() if scope.transaction: parent_api = scope.transaction.name @@ -1162,7 +1162,7 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: [ ( sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_current_scope(), snuba_request, ) for snuba_request in snuba_requests_list @@ -1175,7 +1175,7 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: _snuba_query( ( sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_current_scope(), snuba_requests_list[0], ) ) diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index 42c92e4400f74f..bc66cc62d4fb87 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -123,7 +123,7 @@ def _make_rpc_requests( partial_request = partial( _make_rpc_request, thread_isolation_scope=sentry_sdk.Scope.get_isolation_scope(), - thread_current_scope=sentry_sdk.Scope.get_current_scope(), + thread_current_scope=sentry_sdk.get_current_scope(), ) response = [ result @@ -253,9 +253,7 @@ def _make_rpc_request( else thread_isolation_scope ) thread_current_scope = ( - sentry_sdk.Scope.get_current_scope() - if thread_current_scope is None - else thread_current_scope + sentry_sdk.get_current_scope() if thread_current_scope is None else thread_current_scope ) if SNUBA_INFO: from google.protobuf.json_format import MessageToJson diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 1609caa4ed86d4..592547b863d681 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -216,7 +216,7 @@ def test_scope_has_correct_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/dogs/{name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch is None @@ -225,7 +225,7 @@ def test_scope_has_wrong_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/tricks/{trick_name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch == { "scope_transaction": "/tricks/{trick_name}/", From 4c154c86d424175519734ab505e7c6192bb9583d Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Fri, 6 Jun 2025 15:29:23 +0200 Subject: [PATCH 48/66] ref(sdk): Migrate to `get_isolation_scope` Migrate calls of `sentry_sdk.Scope.get_isolation_scope` to `sentry_sdk.get_isolation_scope`. Calls where `Scope` is first imported and then where `Scope.get_isolation_scope` is called are left as-is; we will address those separately. Split off from #92011. --- src/sentry/data_export/tasks.py | 2 +- src/sentry/hybridcloud/rpc/service.py | 2 +- src/sentry/ingest/transaction_clusterer/rules.py | 2 +- .../integrations/source_code_management/commit_context.py | 2 +- .../integrations/source_code_management/repository.py | 2 +- src/sentry/integrations/utils/scope.py | 2 +- src/sentry/middleware/integrations/classifications.py | 2 +- src/sentry/replays/consumers/recording.py | 2 +- src/sentry/replays/usecases/ingest/__init__.py | 2 +- src/sentry/reprocessing2.py | 2 +- src/sentry/scim/endpoints/members.py | 2 +- .../sentry_apps/api/endpoints/sentry_app_authorizations.py | 2 +- src/sentry/shared_integrations/client/base.py | 2 +- src/sentry/utils/concurrent.py | 2 +- src/sentry/utils/snuba.py | 4 ++-- src/sentry/utils/snuba_rpc.py | 4 ++-- src/sentry/web/frontend/csrf_failure.py | 2 +- tests/relay_integration/test_sdk.py | 6 +++--- 18 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/sentry/data_export/tasks.py b/src/sentry/data_export/tasks.py index d070749e84f87a..65657ee09b6e96 100644 --- a/src/sentry/data_export/tasks.py +++ b/src/sentry/data_export/tasks.py @@ -381,7 +381,7 @@ def merge_export_blobs(data_export_id, **kwargs): def _set_data_on_scope(data_export): - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if data_export.user_id: user = dict(id=data_export.user_id) scope.set_user(user) diff --git a/src/sentry/hybridcloud/rpc/service.py b/src/sentry/hybridcloud/rpc/service.py index d630c0ca9862ea..b117ccdebdf84e 100644 --- a/src/sentry/hybridcloud/rpc/service.py +++ b/src/sentry/hybridcloud/rpc/service.py @@ -596,7 +596,7 @@ def _remote_exception(self, message: str) -> RpcRemoteException: def _raise_from_response_status_error(self, response: requests.Response) -> NoReturn: rpc_method = f"{self.service_name}.{self.method_name}" - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("rpc_method", rpc_method) scope.set_tag("rpc_status_code", response.status_code) diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index 3979a5e410742f..64da8ea464c6f0 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -147,7 +147,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet: if self.MERGE_MAX_RULES < len(rules): set_span_attribute("discarded_rules", len(rules) - self.MERGE_MAX_RULES) - sentry_sdk.Scope.get_isolation_scope().set_context( + sentry_sdk.get_isolation_scope().set_context( "clustering_rules_max", { "num_existing_rules": len(rules), diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py index b0ac2f586aa7c0..0aefd81f55ad4a 100644 --- a/src/sentry/integrations/source_code_management/commit_context.py +++ b/src/sentry/integrations/source_code_management/commit_context.py @@ -266,7 +266,7 @@ def queue_pr_comment_task_if_needed( ), extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, ) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("queue_comment_check.merge_commit_sha", commit.key) scope.set_tag("queue_comment_check.organization_id", commit.organization_id) diff --git a/src/sentry/integrations/source_code_management/repository.py b/src/sentry/integrations/source_code_management/repository.py index 1fe3efd26b0b0c..d63b106338a1dd 100644 --- a/src/sentry/integrations/source_code_management/repository.py +++ b/src/sentry/integrations/source_code_management/repository.py @@ -197,7 +197,7 @@ def get_stacktrace_link( "organization_id": repo.organization_id, } ) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("stacktrace_link.tried_version", False) def encode_url(url: str) -> str: diff --git a/src/sentry/integrations/utils/scope.py b/src/sentry/integrations/utils/scope.py index 9a99e3349deaac..efccbe0f2fb44d 100644 --- a/src/sentry/integrations/utils/scope.py +++ b/src/sentry/integrations/utils/scope.py @@ -24,7 +24,7 @@ def clear_tags_and_context() -> None: """Clear certain tags and context since it should not be set.""" reset_values = False - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() for tag in ["organization", "organization.slug"]: if tag in scope._tags: diff --git a/src/sentry/middleware/integrations/classifications.py b/src/sentry/middleware/integrations/classifications.py index cf1dbf119f7f69..5200e0b492ec70 100644 --- a/src/sentry/middleware/integrations/classifications.py +++ b/src/sentry/middleware/integrations/classifications.py @@ -124,7 +124,7 @@ def get_response(self, request: HttpRequest) -> HttpResponseBase: parser_class = self.integration_parsers.get(provider) if not parser_class: - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("provider", provider) scope.set_tag("path", request.path) sentry_sdk.capture_exception( diff --git a/src/sentry/replays/consumers/recording.py b/src/sentry/replays/consumers/recording.py index 7e77b08625c73e..e609c10620c891 100644 --- a/src/sentry/replays/consumers/recording.py +++ b/src/sentry/replays/consumers/recording.py @@ -79,7 +79,7 @@ def process_message(message: Message[KafkaPayload]) -> ProcessedRecordingMessage def commit_message(message: Message[ProcessedRecordingMessage]) -> None: - isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() + isolation_scope = sentry_sdk.get_isolation_scope().fork() with sentry_sdk.scope.use_isolation_scope(isolation_scope): with sentry_sdk.start_transaction( name="replays.consumer.recording_buffered.commit_message", diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py index b62dab27dab32c..20e5b0ecdeb056 100644 --- a/src/sentry/replays/usecases/ingest/__init__.py +++ b/src/sentry/replays/usecases/ingest/__init__.py @@ -93,7 +93,7 @@ class RecordingIngestMessage: def ingest_recording(message_bytes: bytes) -> None: """Ingest non-chunked recording messages.""" - isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() + isolation_scope = sentry_sdk.get_isolation_scope().fork() with sentry_sdk.scope.use_isolation_scope(isolation_scope): with sentry_sdk.start_transaction( diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index ba3a99f5f29efc..75c3a766f6245d 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -372,7 +372,7 @@ def buffered_delete_old_primary_hash( old_primary_hashes.add(old_primary_hash) reprocessing_store.add_hash(project_id, group_id, old_primary_hash) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("project_id", project_id) scope.set_tag("old_group_id", group_id) scope.set_tag("old_primary_hash", old_primary_hash) diff --git a/src/sentry/scim/endpoints/members.py b/src/sentry/scim/endpoints/members.py index 147c68a9db8160..5cf88a7c4c6740 100644 --- a/src/sentry/scim/endpoints/members.py +++ b/src/sentry/scim/endpoints/members.py @@ -530,7 +530,7 @@ def post(self, request: Request, organization) -> Response: """ update_role = False - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if "sentryOrgRole" in request.data and request.data["sentryOrgRole"]: role = request.data["sentryOrgRole"].lower() diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py index 23a1f35619d732..00cd07b91a4f3e 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py @@ -40,7 +40,7 @@ class SentryAppAuthorizationsEndpoint(SentryAppAuthorizationsBaseEndpoint): } def post(self, request: Request, installation: SentryAppInstallation) -> Response: - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("organization", installation.organization_id) scope.set_tag("sentry_app_id", installation.sentry_app.id) diff --git a/src/sentry/shared_integrations/client/base.py b/src/sentry/shared_integrations/client/base.py index 11f0a6cc32d701..e8560f8cf06102 100644 --- a/src/sentry/shared_integrations/client/base.py +++ b/src/sentry/shared_integrations/client/base.py @@ -231,7 +231,7 @@ def _request( ) if self.integration_type: - sentry_sdk.Scope.get_isolation_scope().set_tag(self.integration_type, self.name) + sentry_sdk.get_isolation_scope().set_tag(self.integration_type, self.name) request = Request( method=method.upper(), diff --git a/src/sentry/utils/concurrent.py b/src/sentry/utils/concurrent.py index 5d09bc92431d97..6405f5a4e9789e 100644 --- a/src/sentry/utils/concurrent.py +++ b/src/sentry/utils/concurrent.py @@ -247,7 +247,7 @@ def submit[ task = PriorityTask( priority, ( - sentry_sdk.Scope.get_isolation_scope(), + sentry_sdk.get_isolation_scope(), sentry_sdk.get_current_scope(), callable, future, diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 1a82826a7816ba..26274cbbe85634 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1161,7 +1161,7 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: _snuba_query, [ ( - sentry_sdk.Scope.get_isolation_scope(), + sentry_sdk.get_isolation_scope(), sentry_sdk.get_current_scope(), snuba_request, ) @@ -1174,7 +1174,7 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: query_results = [ _snuba_query( ( - sentry_sdk.Scope.get_isolation_scope(), + sentry_sdk.get_isolation_scope(), sentry_sdk.get_current_scope(), snuba_requests_list[0], ) diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index bc66cc62d4fb87..b411a8e873933e 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -122,7 +122,7 @@ def _make_rpc_requests( # Sets the thread parameters once so we're not doing it in the map repeatedly partial_request = partial( _make_rpc_request, - thread_isolation_scope=sentry_sdk.Scope.get_isolation_scope(), + thread_isolation_scope=sentry_sdk.get_isolation_scope(), thread_current_scope=sentry_sdk.get_current_scope(), ) response = [ @@ -248,7 +248,7 @@ def _make_rpc_request( thread_current_scope: sentry_sdk.Scope | None = None, ) -> BaseHTTPResponse: thread_isolation_scope = ( - sentry_sdk.Scope.get_isolation_scope() + sentry_sdk.get_isolation_scope() if thread_isolation_scope is None else thread_isolation_scope ) diff --git a/src/sentry/web/frontend/csrf_failure.py b/src/sentry/web/frontend/csrf_failure.py index 7035763b2b2a81..0dd0d53007bfaa 100644 --- a/src/sentry/web/frontend/csrf_failure.py +++ b/src/sentry/web/frontend/csrf_failure.py @@ -14,7 +14,7 @@ def view(request: HttpRequest, reason: str = "") -> HttpResponse: context = {"no_referer": reason == REASON_NO_REFERER} extras = {"reason": reason, "referer": request.META.get("HTTP_REFERER")} - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() # Emit a sentry request that the incoming request is rejected by the CSRF protection. if hasattr(request, "user") and request.user.is_authenticated: diff --git a/tests/relay_integration/test_sdk.py b/tests/relay_integration/test_sdk.py index d9853f27447b21..de70fa1af3a390 100644 --- a/tests/relay_integration/test_sdk.py +++ b/tests/relay_integration/test_sdk.py @@ -108,7 +108,7 @@ def test_bind_organization_context(default_organization): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization"] == default_organization.id assert scope._tags["organization.slug"] == default_organization.slug assert scope._contexts["organization"] == { @@ -130,7 +130,7 @@ def add_context(scope, organization, **kwargs): with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization.test"] == "1" @@ -146,5 +146,5 @@ def add_context(scope, organization, **kwargs): with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization"] == default_organization.id From 534c9e2eb76e320889ff40cb5b749fa59f43dca2 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Fri, 6 Jun 2025 15:34:55 +0200 Subject: [PATCH 49/66] ref(sdk): Migrate to `sentry_sdk.get_global_scope` Migrate calls of `sentry_sdk.Scope.get_global_scope` to `sentry_sdk.get_global_scope`. Split off from #92011. --- src/sentry/testutils/pytest/sentry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/testutils/pytest/sentry.py b/src/sentry/testutils/pytest/sentry.py index 09fc1412fab558..fde3c33130819e 100644 --- a/src/sentry/testutils/pytest/sentry.py +++ b/src/sentry/testutils/pytest/sentry.py @@ -286,7 +286,7 @@ def pytest_configure(config: pytest.Config) -> None: from sentry.runner.initializer import initialize_app initialize_app({"settings": settings, "options": None}) - sentry_sdk.Scope.get_global_scope().set_client(None) + sentry_sdk.get_global_scope().set_client(None) register_extensions() from sentry.utils.redis import clusters @@ -362,7 +362,7 @@ def pytest_runtest_teardown(item: pytest.Item) -> None: ProjectOption.objects.clear_local_cache() UserOption.objects.clear_local_cache() - sentry_sdk.Scope.get_global_scope().set_client(None) + sentry_sdk.get_global_scope().set_client(None) def _shuffle(items: list[pytest.Item], r: random.Random) -> None: From a71823ebaa6959b549dab5486b54df91e5e65017 Mon Sep 17 00:00:00 2001 From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com> Date: Fri, 6 Jun 2025 13:48:16 +0000 Subject: [PATCH 50/66] :hammer_and_wrench: apply pre-commit fixes From 5d1e3321c0a7460c8ead614a59f04d3c5cbcc6c9 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 11 Jun 2025 10:26:34 +0200 Subject: [PATCH 51/66] Removed debug code --- pyproject.toml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a94d628ff05065..54e72218f0f24a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,10 +40,6 @@ filterwarnings = [ # pytest has not yet implemented the replacement for this yet "ignore:The --looponfail command line argument.*", - - # TODO-anton: just for testing locally, remove before merging - "ignore::DeprecationWarning:setuptools._distutils.version", - "ignore::DeprecationWarning:redis.connection" ] looponfailroots = ["src", "tests"] From 443f07449c7daeffeb3d691c0b46322c864f7a60 Mon Sep 17 00:00:00 2001 From: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> Date: Wed, 11 Jun 2025 17:08:37 +0200 Subject: [PATCH 52/66] test: Add test for custom sample rates (#93011) ### Legal Boilerplate Look, I get it. The entity doing business as "Sentry" was incorporated in the State of Delaware in 2015 as Functional Software, Inc. and is gonna need some rights from me in order to utilize my contributions in this here PR. So here's the deal: I retain all rights, title and interest in and to my contributions, and by keeping this boilerplate intact I confirm that Sentry can use, modify, copy, and redistribute my contributions, under Sentry's choice of terms. --- .../test_ingest_consumer_processing.py | 58 +++++++++++++++++++ tests/sentry/utils/test_sdk.py | 13 +++++ 2 files changed, 71 insertions(+) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index 749245f7a5d21d..e3cebadad7c6c7 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -1,6 +1,7 @@ from __future__ import annotations import datetime +import os import time import uuid import zipfile @@ -8,6 +9,7 @@ from typing import Any from unittest.mock import Mock, patch +import django.test import orjson import pytest from arroyo.backends.kafka.consumer import KafkaPayload @@ -24,6 +26,7 @@ process_event, process_individual_attachment, process_userreport, + trace_func, ) from sentry.ingest.types import ConsumerType from sentry.models.debugfile import create_files_from_dif_zip @@ -621,3 +624,58 @@ def test_collect_span_metrics(default_project): assert mock_metrics.incr.call_count == 0 collect_span_metrics(default_project, {"spans": [1, 2, 3]}) assert mock_metrics.incr.call_count == 1 + + +@pytest.mark.parametrize( + "env_value,settings_value,expected_sample_rate", + [ + # Both unset - should use default of 0 + (None, None, 0.0), + # Only environment variable set + ("0", None, 0.0), + ("1", None, 1.0), + ("0.5", None, 0.5), + # Only settings value set + (None, 0, 0.0), + (None, 1, 1.0), + (None, 0.7, 0.7), + # Both set - environment variable should take precedence + ("0", 1, 0.0), # env=0, settings=1 -> should use env (0) + ("1", 0, 1.0), # env=1, settings=0 -> should use env (1) + ("0.3", 0.8, 0.3), # env=0.3, settings=0.8 -> should use env (0.3) + ], +) +def test_sample_rate_passed(env_value, settings_value, expected_sample_rate): + # Test various combinations of environment variable and settings values + + # Prepare environment + env_dict = {} + if env_value is not None: + env_dict["SENTRY_INGEST_CONSUMER_APM_SAMPLING"] = env_value + + with patch.dict(os.environ, env_dict, clear=True): + with django.test.override_settings(SENTRY_INGEST_CONSUMER_APM_SAMPLING=settings_value): + # If settings_value is None, delete the setting to simulate it not being set + if settings_value is None: + del settings.SENTRY_INGEST_CONSUMER_APM_SAMPLING + + with patch( + "sentry.ingest.consumer.processors.sentry_sdk.start_span" + ) as mock_start_span: + # Create a dummy function to decorate + @trace_func(name="test_span") + def dummy_function(): + return "test_result" + + # Call the decorated function + result = dummy_function() + + # Verify the function returned correctly + assert result == "test_result" + + # Verify start_span was called with correct arguments + mock_start_span.assert_called_once() + call_args = mock_start_span.call_args + + # Check that the span_kwargs include the expected sample_rate + assert call_args.kwargs["attributes"]["sample_rate"] == expected_sample_rate diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 592547b863d681..c0f3842dbfbaf3 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -19,6 +19,7 @@ check_current_scope_transaction, check_tag_for_scope_bleed, merge_context_into_scope, + traces_sampler, ) @@ -64,6 +65,18 @@ def test_context_scope_merge_with_existing_context(self): "charlie": "goofy", } + def test_traces_sampler_custom_sample_rate_0_0(self): + sampling_context = {"sample_rate": 0.0} + assert traces_sampler(sampling_context) == 0.0 + + def test_traces_sampler_custom_sample_rate_0_5(self): + sampling_context = {"sample_rate": 0.5} + assert traces_sampler(sampling_context) == 0.5 + + def test_traces_sampler_custom_sample_rate_1_0(self): + sampling_context = {"sample_rate": 1.0} + assert traces_sampler(sampling_context) == 1.0 + @patch("sentry.utils.sdk.logger.warning") class CheckTagForScopeBleedTest(TestCase): From d494df2ebfdd7fe2e923bb6f0830f01036350340 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 12 Jun 2025 13:01:03 +0200 Subject: [PATCH 53/66] set_data -> set_attribute --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- src/sentry/demo_mode/tasks.py | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 5107c48d7d9c80..132f0591eebd25 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -70,7 +70,7 @@ sentry-ophio>=1.1.3 sentry-protos==0.2.0 sentry-redis-tools>=0.5.0 sentry-relay>=0.9.9 -sentry-sdk[http2]==3.0.0a1 +sentry-sdk[http2]==3.0.0a2 slack-sdk>=3.27.2 snuba-sdk>=3.0.43 simplejson>=3.17.6 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 80e42287744ae6..03de241fd55653 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -195,7 +195,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==3.0.0a1 +sentry-sdk==3.0.0a2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.17.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index a28a16e4e85cd0..65a79e85f96335 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -133,7 +133,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==3.0.0a1 +sentry-sdk==3.0.0a2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.17.0 diff --git a/src/sentry/demo_mode/tasks.py b/src/sentry/demo_mode/tasks.py index 4b9bb735ef7deb..e4d8ed499565f7 100644 --- a/src/sentry/demo_mode/tasks.py +++ b/src/sentry/demo_mode/tasks.py @@ -92,8 +92,8 @@ def _sync_project_debug_files( organization_id=target_org.id, ).values_list("id", flat=True) ) - span.set_data("source_project_ids", source_project_ids) - span.set_data("target_project_ids", target_project_ids) + span.set_attribute("source_project_ids", source_project_ids) + span.set_attribute("target_project_ids", target_project_ids) project_debug_files = ProjectDebugFile.objects.filter( Q(project_id__in=source_project_ids) | Q(project_id__in=target_project_ids), @@ -114,7 +114,7 @@ def _sync_project_debug_files( for source_project_debug_file in different_project_debug_files: with sentry_sdk.start_span(name="sync-project-debug-files-sync-project-debug-file") as span: - span.set_data("source_project_debug_file_id", source_project_debug_file.id) + span.set_attribute("source_project_debug_file_id", source_project_debug_file.id) _sync_project_debug_file(source_project_debug_file, target_org) From 3b850fd31476d38119529c71bfba622f0b0d0dc7 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 12 Jun 2025 13:03:55 +0200 Subject: [PATCH 54/66] trigger ci From 2193459d510f81607fae49d4a04325c48e35d17c Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 16 Jun 2025 13:59:38 +0200 Subject: [PATCH 55/66] new pytest-sentry --- requirements-dev-frozen.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 9008135158549a..0144efc1d38b55 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -156,7 +156,7 @@ pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 pytest-rerunfailures==15.0 -pytest-sentry==0.4.1 +pytest-sentry==0.5.0 pytest-workaround-12888==1.0.0 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 From f641055cd3e356b5b3fe3e648d5976699f2aafd2 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 16 Jun 2025 14:29:00 +0200 Subject: [PATCH 56/66] update --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 9ef89f29ae3205..4e735fe407f62f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,7 +16,7 @@ pytest-django>=4.9.0 pytest-fail-slow>=0.3.0 pytest-json-report>=1.5.0 pytest-rerunfailures>=15 -pytest-sentry>=0.4.0 +pytest-sentry>=0.5.0 pytest-workaround-12888 pytest-xdist>=3 responses>=0.23.1 From 024762257284259e59a2e1f91e474b393150277b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 16 Jun 2025 14:38:28 +0200 Subject: [PATCH 57/66] client is not an argument of Scope constructor anymore --- tests/sentry/metrics/test_minimetrics.py | 14 +++++++------- tests/sentry/utils/test_sdk.py | 3 ++- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/tests/sentry/metrics/test_minimetrics.py b/tests/sentry/metrics/test_minimetrics.py index f7739b703c70b3..e40440fba4f941 100644 --- a/tests/sentry/metrics/test_minimetrics.py +++ b/tests/sentry/metrics/test_minimetrics.py @@ -43,14 +43,14 @@ def get_transaction(self): @pytest.fixture(scope="function") def scope(): - scope = sentry_sdk.Scope( - ty=sentry_sdk.scope.ScopeType.CURRENT, - client=Client( - dsn="http://foo@example.invalid/42", - transport=DummyTransport, - traces_sample_rate=1.0, - ), + scope = sentry_sdk.Scope(ty=sentry_sdk.scope.ScopeType.CURRENT) + client = Client( + dsn="http://foo@example.invalid/42", + transport=DummyTransport, + traces_sample_rate=1.0, ) + scope.set_client(client) + with sentry_sdk.use_scope(scope): yield scope diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index c0f3842dbfbaf3..64b7d57d07dc32 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -306,7 +306,8 @@ def test_no_scope_data_passed(self, mock_sdk_capture_exception: MagicMock): capture_exception_with_scope_check(Exception()) passed_scope = mock_sdk_capture_exception.call_args.kwargs["scope"] - empty_scope = Scope(client=passed_scope.client) + empty_scope = Scope() + empty_scope.set_client(passed_scope.client) for entry in empty_scope.__slots__: # _propagation_context is generated on __init__ for tracing without performance From 25712df985d31a083cebc617b6064adcbf8b5730 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 23 Jun 2025 12:37:05 +0200 Subject: [PATCH 58/66] Migrate sampling_context to use SDK 3.0 attributes (#94022) See the Celery and WSGI sections [in the migration guide](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x#sampling) Closes https://github.com/getsentry/sentry-python/issues/4505 --- src/sentry/utils/sdk.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 66683d4e6c0851..28b6dae2878cc3 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -178,8 +178,8 @@ def get_project_key(): def traces_sampler(sampling_context): - wsgi_path = sampling_context.get("wsgi_environ", {}).get("PATH_INFO") - if wsgi_path and wsgi_path in SAMPLED_ROUTES: + wsgi_path = sampling_context.get("url.path") + if wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] # Apply sample_rate from custom_sampling_context @@ -192,8 +192,7 @@ def traces_sampler(sampling_context): return sampling_context["parent_sampled"] if "celery_job" in sampling_context: - task_name = sampling_context["celery_job"].get("task") - + task_name = sampling_context.get("celery.job.task") if task_name in SAMPLED_TASKS: return SAMPLED_TASKS[task_name] From c1f35b4fe71cedb2cbc2b1d6cef406c45f33a63f Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 26 Jun 2025 08:52:24 +0200 Subject: [PATCH 59/66] naming --- .../ingest_consumer/test_ingest_consumer_processing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index e3cebadad7c6c7..457f9ea2044d47 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -662,13 +662,13 @@ def test_sample_rate_passed(env_value, settings_value, expected_sample_rate): with patch( "sentry.ingest.consumer.processors.sentry_sdk.start_span" ) as mock_start_span: - # Create a dummy function to decorate + # Create a placeholder function to decorate @trace_func(name="test_span") - def dummy_function(): + def placeholder_function(): return "test_result" # Call the decorated function - result = dummy_function() + result = placeholder_function() # Verify the function returned correctly assert result == "test_result" From b64c9caccc8de95ec907278635e1fe03f3e2da04 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 26 Jun 2025 08:55:35 +0200 Subject: [PATCH 60/66] test parametrization --- .../ingest_consumer/test_ingest_consumer_processing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index 457f9ea2044d47..7d8a0d1f68e41d 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -627,8 +627,8 @@ def test_collect_span_metrics(default_project): @pytest.mark.parametrize( - "env_value,settings_value,expected_sample_rate", - [ + ("env_value", "settings_value", "expected_sample_rate"), + ( # Both unset - should use default of 0 (None, None, 0.0), # Only environment variable set @@ -643,7 +643,7 @@ def test_collect_span_metrics(default_project): ("0", 1, 0.0), # env=0, settings=1 -> should use env (0) ("1", 0, 1.0), # env=1, settings=0 -> should use env (1) ("0.3", 0.8, 0.3), # env=0.3, settings=0.8 -> should use env (0.3) - ], + ), ) def test_sample_rate_passed(env_value, settings_value, expected_sample_rate): # Test various combinations of environment variable and settings values From 3674af78b24ca9a9577d55ad5d2b165658ce31d5 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 27 Jun 2025 10:51:02 +0200 Subject: [PATCH 61/66] Removed new tests to test something --- .../test_ingest_consumer_processing.py | 58 ------------------- tests/sentry/utils/test_sdk.py | 13 ----- 2 files changed, 71 deletions(-) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index 7d8a0d1f68e41d..749245f7a5d21d 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -1,7 +1,6 @@ from __future__ import annotations import datetime -import os import time import uuid import zipfile @@ -9,7 +8,6 @@ from typing import Any from unittest.mock import Mock, patch -import django.test import orjson import pytest from arroyo.backends.kafka.consumer import KafkaPayload @@ -26,7 +24,6 @@ process_event, process_individual_attachment, process_userreport, - trace_func, ) from sentry.ingest.types import ConsumerType from sentry.models.debugfile import create_files_from_dif_zip @@ -624,58 +621,3 @@ def test_collect_span_metrics(default_project): assert mock_metrics.incr.call_count == 0 collect_span_metrics(default_project, {"spans": [1, 2, 3]}) assert mock_metrics.incr.call_count == 1 - - -@pytest.mark.parametrize( - ("env_value", "settings_value", "expected_sample_rate"), - ( - # Both unset - should use default of 0 - (None, None, 0.0), - # Only environment variable set - ("0", None, 0.0), - ("1", None, 1.0), - ("0.5", None, 0.5), - # Only settings value set - (None, 0, 0.0), - (None, 1, 1.0), - (None, 0.7, 0.7), - # Both set - environment variable should take precedence - ("0", 1, 0.0), # env=0, settings=1 -> should use env (0) - ("1", 0, 1.0), # env=1, settings=0 -> should use env (1) - ("0.3", 0.8, 0.3), # env=0.3, settings=0.8 -> should use env (0.3) - ), -) -def test_sample_rate_passed(env_value, settings_value, expected_sample_rate): - # Test various combinations of environment variable and settings values - - # Prepare environment - env_dict = {} - if env_value is not None: - env_dict["SENTRY_INGEST_CONSUMER_APM_SAMPLING"] = env_value - - with patch.dict(os.environ, env_dict, clear=True): - with django.test.override_settings(SENTRY_INGEST_CONSUMER_APM_SAMPLING=settings_value): - # If settings_value is None, delete the setting to simulate it not being set - if settings_value is None: - del settings.SENTRY_INGEST_CONSUMER_APM_SAMPLING - - with patch( - "sentry.ingest.consumer.processors.sentry_sdk.start_span" - ) as mock_start_span: - # Create a placeholder function to decorate - @trace_func(name="test_span") - def placeholder_function(): - return "test_result" - - # Call the decorated function - result = placeholder_function() - - # Verify the function returned correctly - assert result == "test_result" - - # Verify start_span was called with correct arguments - mock_start_span.assert_called_once() - call_args = mock_start_span.call_args - - # Check that the span_kwargs include the expected sample_rate - assert call_args.kwargs["attributes"]["sample_rate"] == expected_sample_rate diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 082cd957e6fb6b..6e432ded5a3c7e 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -20,7 +20,6 @@ check_current_scope_transaction, check_tag_for_scope_bleed, merge_context_into_scope, - traces_sampler, ) @@ -66,18 +65,6 @@ def test_context_scope_merge_with_existing_context(self): "charlie": "goofy", } - def test_traces_sampler_custom_sample_rate_0_0(self): - sampling_context = {"sample_rate": 0.0} - assert traces_sampler(sampling_context) == 0.0 - - def test_traces_sampler_custom_sample_rate_0_5(self): - sampling_context = {"sample_rate": 0.5} - assert traces_sampler(sampling_context) == 0.5 - - def test_traces_sampler_custom_sample_rate_1_0(self): - sampling_context = {"sample_rate": 1.0} - assert traces_sampler(sampling_context) == 1.0 - @patch("sentry.utils.sdk.logger.warning") class CheckTagForScopeBleedTest(TestCase): From 6c2a99ff3b518f3dc5d9d57d5b67f6c4d76b805e Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 27 Jun 2025 11:02:48 +0200 Subject: [PATCH 62/66] small fix --- src/sentry/utils/sdk.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 9c87cce1dbb2f0..36ff449b1880b5 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -192,10 +192,9 @@ def traces_sampler(sampling_context): if sampling_context["parent_sampled"] is not None: return sampling_context["parent_sampled"] - if "celery_job" in sampling_context: - task_name = sampling_context.get("celery.job.task") - if task_name in SAMPLED_TASKS: - return SAMPLED_TASKS[task_name] + task_name = sampling_context.get("celery.job.task") + if task_name in SAMPLED_TASKS: + return SAMPLED_TASKS[task_name] # Default to the sampling rate in settings return float(settings.SENTRY_BACKEND_APM_SAMPLING or 0) From 5411f366bca3ddd6190d8453421665e476cef985 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 27 Jun 2025 11:56:42 +0200 Subject: [PATCH 63/66] Revert "Removed new tests to test something" This reverts commit 3674af78b24ca9a9577d55ad5d2b165658ce31d5. --- .../test_ingest_consumer_processing.py | 58 +++++++++++++++++++ tests/sentry/utils/test_sdk.py | 13 +++++ 2 files changed, 71 insertions(+) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index 749245f7a5d21d..7d8a0d1f68e41d 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -1,6 +1,7 @@ from __future__ import annotations import datetime +import os import time import uuid import zipfile @@ -8,6 +9,7 @@ from typing import Any from unittest.mock import Mock, patch +import django.test import orjson import pytest from arroyo.backends.kafka.consumer import KafkaPayload @@ -24,6 +26,7 @@ process_event, process_individual_attachment, process_userreport, + trace_func, ) from sentry.ingest.types import ConsumerType from sentry.models.debugfile import create_files_from_dif_zip @@ -621,3 +624,58 @@ def test_collect_span_metrics(default_project): assert mock_metrics.incr.call_count == 0 collect_span_metrics(default_project, {"spans": [1, 2, 3]}) assert mock_metrics.incr.call_count == 1 + + +@pytest.mark.parametrize( + ("env_value", "settings_value", "expected_sample_rate"), + ( + # Both unset - should use default of 0 + (None, None, 0.0), + # Only environment variable set + ("0", None, 0.0), + ("1", None, 1.0), + ("0.5", None, 0.5), + # Only settings value set + (None, 0, 0.0), + (None, 1, 1.0), + (None, 0.7, 0.7), + # Both set - environment variable should take precedence + ("0", 1, 0.0), # env=0, settings=1 -> should use env (0) + ("1", 0, 1.0), # env=1, settings=0 -> should use env (1) + ("0.3", 0.8, 0.3), # env=0.3, settings=0.8 -> should use env (0.3) + ), +) +def test_sample_rate_passed(env_value, settings_value, expected_sample_rate): + # Test various combinations of environment variable and settings values + + # Prepare environment + env_dict = {} + if env_value is not None: + env_dict["SENTRY_INGEST_CONSUMER_APM_SAMPLING"] = env_value + + with patch.dict(os.environ, env_dict, clear=True): + with django.test.override_settings(SENTRY_INGEST_CONSUMER_APM_SAMPLING=settings_value): + # If settings_value is None, delete the setting to simulate it not being set + if settings_value is None: + del settings.SENTRY_INGEST_CONSUMER_APM_SAMPLING + + with patch( + "sentry.ingest.consumer.processors.sentry_sdk.start_span" + ) as mock_start_span: + # Create a placeholder function to decorate + @trace_func(name="test_span") + def placeholder_function(): + return "test_result" + + # Call the decorated function + result = placeholder_function() + + # Verify the function returned correctly + assert result == "test_result" + + # Verify start_span was called with correct arguments + mock_start_span.assert_called_once() + call_args = mock_start_span.call_args + + # Check that the span_kwargs include the expected sample_rate + assert call_args.kwargs["attributes"]["sample_rate"] == expected_sample_rate diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 6e432ded5a3c7e..082cd957e6fb6b 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -20,6 +20,7 @@ check_current_scope_transaction, check_tag_for_scope_bleed, merge_context_into_scope, + traces_sampler, ) @@ -65,6 +66,18 @@ def test_context_scope_merge_with_existing_context(self): "charlie": "goofy", } + def test_traces_sampler_custom_sample_rate_0_0(self): + sampling_context = {"sample_rate": 0.0} + assert traces_sampler(sampling_context) == 0.0 + + def test_traces_sampler_custom_sample_rate_0_5(self): + sampling_context = {"sample_rate": 0.5} + assert traces_sampler(sampling_context) == 0.5 + + def test_traces_sampler_custom_sample_rate_1_0(self): + sampling_context = {"sample_rate": 1.0} + assert traces_sampler(sampling_context) == 1.0 + @patch("sentry.utils.sdk.logger.warning") class CheckTagForScopeBleedTest(TestCase): From 2804ca9f064ef91ac952dcf8ae93e54b36d3745a Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 27 Jun 2025 11:58:24 +0200 Subject: [PATCH 64/66] better checks --- src/sentry/utils/sdk.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 36ff449b1880b5..43dbdf1b41f961 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -180,7 +180,7 @@ def get_project_key(): def traces_sampler(sampling_context): wsgi_path = sampling_context.get("url.path") - if wsgi_path in SAMPLED_ROUTES: + if wsgi_path is not None and wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] # Apply sample_rate from custom_sampling_context @@ -189,11 +189,12 @@ def traces_sampler(sampling_context): return float(custom_sample_rate) # If there's already a sampling decision, just use that - if sampling_context["parent_sampled"] is not None: + parent_sampled = sampling_context.get("parent_sampled") + if parent_sampled is not None: return sampling_context["parent_sampled"] task_name = sampling_context.get("celery.job.task") - if task_name in SAMPLED_TASKS: + if task_name is not None and task_name in SAMPLED_TASKS: return SAMPLED_TASKS[task_name] # Default to the sampling rate in settings From 4f0cec6242ca01867e70e90c1f35e419ea03e222 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 1 Jul 2025 14:17:24 +0200 Subject: [PATCH 65/66] Fix scope level calls --- src/sentry/spans/consumers/process/factory.py | 3 +-- src/sentry/spans/consumers/process/flusher.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/sentry/spans/consumers/process/factory.py b/src/sentry/spans/consumers/process/factory.py index b916654acd83af..7377d82c201b57 100644 --- a/src/sentry/spans/consumers/process/factory.py +++ b/src/sentry/spans/consumers/process/factory.py @@ -66,8 +66,7 @@ def create_with_partitions( partitions: Mapping[Partition, int], ) -> ProcessingStrategy[KafkaPayload]: # TODO: remove once span buffer is live in all regions - scope = sentry_sdk.get_isolation_scope() - scope.level = "warning" + sentry_sdk.set_level("warning") self.rebalancing_count += 1 sentry_sdk.set_tag("sentry_spans_rebalancing_count", str(self.rebalancing_count)) diff --git a/src/sentry/spans/consumers/process/flusher.py b/src/sentry/spans/consumers/process/flusher.py index 451541a795e4ee..ab947983f440e7 100644 --- a/src/sentry/spans/consumers/process/flusher.py +++ b/src/sentry/spans/consumers/process/flusher.py @@ -142,8 +142,7 @@ def main( produce_to_pipe: Callable[[KafkaPayload], None] | None, ) -> None: # TODO: remove once span buffer is live in all regions - scope = sentry_sdk.get_isolation_scope() - scope.level = "warning" + sentry_sdk.set_level("warning") shard_tag = ",".join(map(str, shards)) sentry_sdk.set_tag("sentry_spans_buffer_component", "flusher") From 7c2038dda4e8be8c9e90bb231a1cbf10200f9acf Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 1 Jul 2025 17:29:49 +0200 Subject: [PATCH 66/66] Bump pytest-sentry to 0.5.1 --- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index f8018defe250f1..b2c1581a3e632a 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -157,7 +157,7 @@ pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 pytest-rerunfailures==15.0 -pytest-sentry==0.5.0 +pytest-sentry==0.5.1 pytest-workaround-12888==1.0.0 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 diff --git a/requirements-dev.txt b/requirements-dev.txt index 846bfed4a8d747..0483af9d23869d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -17,7 +17,7 @@ pytest-django>=4.9.0 pytest-fail-slow>=0.3.0 pytest-json-report>=1.5.0 pytest-rerunfailures>=15 -pytest-sentry>=0.5.0 +pytest-sentry>=0.5.1 pytest-workaround-12888 pytest-xdist>=3 responses>=0.23.1