Skip to content

Commit 81943d0

Browse files
committed
Add tests
1 parent 64e58b4 commit 81943d0

File tree

7 files changed

+638
-35
lines changed

7 files changed

+638
-35
lines changed

newrelic/api/transaction.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1065,7 +1065,7 @@ def _make_sampling_decision(self):
10651065
remote_parent_not_sampled_setting = self.settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled,
10661066
)
10671067
_logger.debug("Full granularity sampling decision was %s with priority=%s.", sampled, priority)
1068-
if computed_sampled:
1068+
if computed_sampled or not self.settings.distributed_tracing.sampler.partial_granularity.enabled:
10691069
self._priority = computed_priority
10701070
self._sampled = computed_sampled
10711071
self._sampling_decision_made = True
@@ -1086,6 +1086,13 @@ def _make_sampling_decision(self):
10861086
self._sampling_decision_made = True
10871087
if self._sampled:
10881088
self.partial_granularity_sampled = True
1089+
return
1090+
1091+
# This is only reachable if both full and partial granularity tracing are off.
1092+
# Set priority=0 and do not sample. This enables DT headers to still be sent
1093+
# even if the trace is never sampled.
1094+
self._priority = 0
1095+
self._sampled = False
10891096

10901097
def _freeze_path(self):
10911098
if self._frozen_path is None:
@@ -1237,7 +1244,6 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"):
12371244
return False
12381245

12391246
try:
1240-
self._remote_parent_sampled = payload.get("sa")
12411247
version = payload.get("v")
12421248
major_version = version and int(version[0])
12431249

@@ -1258,7 +1264,7 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"):
12581264
if not any(k in data for k in ("id", "tx")):
12591265
self._record_supportability("Supportability/DistributedTrace/AcceptPayload/ParseException")
12601266
return False
1261-
1267+
self._remote_parent_sampled = data.get("sa")
12621268
settings = self._settings
12631269
account_id = data.get("ac")
12641270
trusted_account_key = settings.trusted_account_key or (
@@ -1349,7 +1355,7 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"):
13491355
try:
13501356
traceparent = ensure_str(traceparent).strip()
13511357
data = W3CTraceParent.decode(traceparent)
1352-
self._remote_parent_sampled = data.get("sa")
1358+
self._remote_parent_sampled = data.pop("sa", None)
13531359
except:
13541360
data = None
13551361

newrelic/core/config.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1415,7 +1415,12 @@ def simplify_distributed_tracing_sampler_granularity_settings(settings):
14151415
settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled = settings.distributed_tracing.sampler.remote_parent_sampled
14161416
if not settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled:
14171417
settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled = settings.distributed_tracing.sampler.remote_parent_not_sampled
1418-
1418+
# Partial granularity tracing is not available in infinite tracing mode.
1419+
if settings.infinite_tracing.enabled and settings.distributed_tracing.sampler.partial_granularity.enabled:
1420+
_logger.warning(
1421+
"Improper configuration. Infinite tracing cannot be enabled at the same time as partial granularity tracing. Setting distributed_tracing.sampler.partial_granularity.enabled=False."
1422+
)
1423+
settings.distributed_tracing.sampler.partial_granularity.enabled = False
14191424

14201425
def _remove_ignored_configs(server_settings):
14211426
if not server_settings.get("agent_config"):

newrelic/core/node_mixin.py

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -95,23 +95,42 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic
9595
# If we are in essential mode return the span with minimized attributes.
9696
if partial_granularity_type == "essential":
9797
return [i_attrs, {}, a_minimized_attrs]
98-
# If the span is an exit span but span compression (compact) is enabled, we need to check
99-
# for uniqueness before returning it.
98+
# If the span is an exit span but span compression (compact) is enabled,
99+
# we need to check for uniqueness before returning it.
100100
# Combine all the entity relationship attr values into a string to be
101101
# used as the hash to check for uniqueness.
102102
span_attrs = "".join([str(a_minimized_attrs[key]) for key in exit_span_attrs_present])
103103
new_exit_span = span_attrs not in ct_exit_spans
104-
# If this is a new exit span, add it to the known ct_exit_spans and return it.
104+
# If this is a new exit span, add it to the known ct_exit_spans and
105+
# return it.
105106
if new_exit_span:
106-
# ids is the list of span guids that share this unqiue exit span.
107+
# nr.ids is the list of span guids that share this unqiue exit span.
107108
a_minimized_attrs["nr.ids"] = []
108109
a_minimized_attrs["nr.durations"] = self.duration
109-
ct_exit_spans[span_attrs] = [a_minimized_attrs]
110+
ct_exit_spans[span_attrs] = [i_attrs, a_minimized_attrs]
110111
return [i_attrs, {}, a_minimized_attrs]
111112
# If this is an exit span we've already seen, add it's guid to the list
112-
# of ids on the seen span and return None.
113-
ct_exit_spans[span_attrs][0]["nr.ids"].append(self.guid)
114-
ct_exit_spans[span_attrs][0]["nr.durations"] += self.duration
113+
# of ids on the seen span, compute the new duration & start time, and
114+
# return None.
115+
ct_exit_spans[span_attrs][1]["nr.ids"].append(self.guid)
116+
# Compute the new start and end time for all compressed spans and use
117+
# that to set the duration for all compressed spans.
118+
current_start_time = ct_exit_spans[span_attrs][0]["timestamp"]
119+
current_end_time = ct_exit_spans[span_attrs][0]["timestamp"]/1000 + ct_exit_spans[span_attrs][1]["nr.durations"]
120+
new_start_time = i_attrs["timestamp"]
121+
new_end_time = i_attrs["timestamp"]/1000 + i_attrs["duration"]
122+
set_start_time = min(new_start_time, current_start_time)
123+
# If the new span starts after the old span's end time or the new span
124+
# ends before the current span starts; add the durations.
125+
if current_end_time < new_start_time/1000 or new_end_time < current_start_time/1000:
126+
set_duration = ct_exit_spans[span_attrs][1]["nr.durations"] + i_attrs["duration"]
127+
# Otherwise, if the new and old span's overlap in time, use the newest
128+
# end time and subtract the start time from it to calculate the new
129+
# duration.
130+
else:
131+
set_duration = max(current_end_time, new_end_time) - set_start_time/1000
132+
ct_exit_spans[span_attrs][0]["timestamp"] = set_start_time
133+
ct_exit_spans[span_attrs][1]["nr.durations"] = set_duration
115134
return None
116135

117136
def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None):

0 commit comments

Comments
 (0)