Skip to content

Commit 0685a9c

Browse files
committed
Strip out prototype only stuff
1 parent 548f1b7 commit 0685a9c

File tree

8 files changed

+35
-175
lines changed

8 files changed

+35
-175
lines changed

newrelic/_version.py

Lines changed: 0 additions & 34 deletions
This file was deleted.

newrelic/common/streaming_utils.py

Lines changed: 1 addition & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
import collections
1616
import logging
1717
import threading
18-
import sys
1918

2019
try:
2120
from newrelic.core.infinite_tracing_pb2 import AttributeValue, SpanBatch
@@ -26,76 +25,13 @@
2625
_logger = logging.getLogger(__name__)
2726

2827

29-
def get_deep_size(obj, seen=None):
30-
"""Recursively calculates the size of an object including nested lists and dicts."""
31-
if seen is None:
32-
seen = set()
33-
size = -8*3 # Subtract 8 for each of the 3 attribute lists as those don't count.
34-
else:
35-
size = 0
36-
37-
# Avoid recursion for already seen objects (handle circular references)
38-
obj_id = id(obj)
39-
if obj_id in seen:
40-
return 0
41-
seen.add(obj_id)
42-
43-
if isinstance(obj, str):
44-
size += len(obj)
45-
return size
46-
elif isinstance(obj, float) or isinstance(obj, int):
47-
size += 8
48-
return size
49-
elif isinstance(obj, bool):
50-
size += 1
51-
return size
52-
elif isinstance(obj, dict):
53-
size += sum(get_deep_size(k, seen) + get_deep_size(v, seen) for k, v in obj.items())
54-
elif isinstance(obj, (list, tuple, set, frozenset)):
55-
size += 8 + sum(get_deep_size(i, seen) for i in obj)
56-
else:
57-
size += 8
58-
59-
return size
60-
61-
62-
def get_deep_size_protobuf(obj):
63-
"""Recursively calculates the size of an object including nested lists and dicts."""
64-
size = 0
65-
if hasattr(obj, "string_value"):
66-
size += len(obj.string_value)
67-
return size
68-
elif hasattr(obj, "double_value"):
69-
size += 8
70-
return size
71-
elif hasattr(obj, "int_value"):
72-
size += 8
73-
return size
74-
elif hasattr(obj, "bool_value"):
75-
size += 1
76-
return size
77-
78-
if hasattr(obj, "agent_attributes"):
79-
size += sum(len(k) + get_deep_size_protobuf(v) for k, v in obj.agent_attributes.items())
80-
if hasattr(obj, "user_attributes"):
81-
size += sum(len(k) + get_deep_size_protobuf(v) for k, v in obj.user_attributes.items())
82-
if hasattr(obj, "intrinsics"):
83-
size += sum(len(k) + get_deep_size_protobuf(v) for k, v in obj.intrinsics.items())
84-
else:
85-
size += 8
86-
87-
return size
88-
89-
9028
class StreamBuffer:
9129
def __init__(self, maxlen, batching=False):
9230
self._queue = collections.deque(maxlen=maxlen)
9331
self._notify = self.condition()
9432
self._shutdown = False
9533
self._seen = 0
9634
self._dropped = 0
97-
self._bytes = 0
98-
self._ct_processing_time = 0
9935
self._settings = None
10036

10137
self.batching = batching
@@ -115,8 +51,6 @@ def put(self, item):
11551
return
11652

11753
self._seen += 1
118-
_logger.debug(f"{item.intrinsics['name']} [{len(item.intrinsics)}, {len(item.user_attributes)}, {len(item.agent_attributes)}] {get_deep_size_protobuf(item)}")
119-
self._bytes += get_deep_size_protobuf(item)
12054

12155
# NOTE: dropped can be over-counted as the queue approaches
12256
# capacity while data is still being transmitted.
@@ -133,10 +67,8 @@ def stats(self):
13367
with self._notify:
13468
seen, dropped = self._seen, self._dropped
13569
self._seen, self._dropped = 0, 0
136-
_bytes, ct_processing_time = self._bytes, self._ct_processing_time
137-
self._bytes, self._ct_processing_time = 0, 0
13870

139-
return seen, dropped, _bytes, ct_processing_time
71+
return seen, dropped
14072

14173
def __bool__(self):
14274
return bool(self._queue)

newrelic/config.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -409,6 +409,10 @@ def _process_configuration(section):
409409
_process_setting(section, "distributed_tracing.exclude_newrelic_header", "getboolean", None)
410410
_process_setting(section, "distributed_tracing.sampler.remote_parent_sampled", "get", None)
411411
_process_setting(section, "distributed_tracing.sampler.remote_parent_not_sampled", "get", None)
412+
_process_setting(section, "distributed_tracing.sampler.partial_granularity.enabled", "getboolean", None)
413+
_process_setting(section, "distributed_tracing.sampler.partial_granularity.type", "get", None)
414+
_process_setting(section, "distributed_tracing.sampler.partial_granularity.remote_parent_sampled", "get", None)
415+
_process_setting(section, "distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled", "get", None)
412416
_process_setting(section, "span_events.enabled", "getboolean", None)
413417
_process_setting(section, "span_events.max_samples_stored", "getint", None)
414418
_process_setting(section, "span_events.attributes.enabled", "getboolean", None)

newrelic/core/application.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -506,11 +506,7 @@ def connect_to_data_collector(self, activate_agent):
506506
sampling_target_period = 60.0
507507
else:
508508
sampling_target_period = configuration.sampling_target_period_in_seconds
509-
sampling_target = configuration.sampling_target
510-
# If span reduction is enabled double the transaction reservoir size.
511-
if configuration.distributed_tracing.drop_inprocess_spans.enabled or configuration.distributed_tracing.unique_spans.enabled:
512-
sampling_target = configuration.sampling_target*2
513-
self.adaptive_sampler = AdaptiveSampler(sampling_target, sampling_target_period)
509+
self.adaptive_sampler = AdaptiveSampler(configuration.sampling_target, sampling_target_period)
514510

515511
active_session.connect_span_stream(self._stats_engine.span_stream, self.record_custom_metric)
516512

@@ -1356,14 +1352,11 @@ def harvest(self, shutdown=False, flexible=False):
13561352
span_stream = stats.span_stream
13571353
# Only merge stats as part of default harvest
13581354
if span_stream is not None and not flexible:
1359-
spans_seen, spans_dropped, _bytes, ct_processing_time = span_stream.stats()
1355+
spans_seen, spans_dropped = span_stream.stats()
13601356
spans_sent = spans_seen - spans_dropped
13611357

13621358
internal_count_metric("Supportability/InfiniteTracing/Span/Seen", spans_seen)
13631359
internal_count_metric("Supportability/InfiniteTracing/Span/Sent", spans_sent)
1364-
print(f"spans sent: {spans_sent}")
1365-
internal_count_metric("Supportability/InfiniteTracing/Bytes/Seen", _bytes)
1366-
internal_count_metric("Supportability/CoreTracing/TotalTime", ct_processing_time*1000) # Time in ms.
13671360
else:
13681361
spans = stats.span_events
13691362
if spans:
@@ -1380,9 +1373,6 @@ def harvest(self, shutdown=False, flexible=False):
13801373
spans_sampled = spans.num_samples
13811374
internal_count_metric("Supportability/SpanEvent/TotalEventsSeen", spans_seen)
13821375
internal_count_metric("Supportability/SpanEvent/TotalEventsSent", spans_sampled)
1383-
print(f"spans sent: {spans_sampled}")
1384-
internal_count_metric("Supportability/DistributedTracing/Bytes/Seen", spans.bytes)
1385-
internal_count_metric("Supportability/SpanEvent/TotalCoreTracingTime", spans.ct_processing_time*1000) # Time in ms.
13861376

13871377
stats.reset_span_events()
13881378

newrelic/core/config.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -333,19 +333,11 @@ class DistributedTracingSettings(Settings):
333333
pass
334334

335335

336-
class DistributedTracingDropInprocessSpansSettings(Settings):
337-
pass
338-
339-
340-
class DistributedTracingUniqueSpansSettings(Settings):
341-
pass
342-
343-
344-
class DistributedTracingMinimizeAttributesSettings(Settings):
336+
class DistributedTracingSamplerSettings(Settings):
345337
pass
346338

347339

348-
class DistributedTracingSamplerSettings(Settings):
340+
class DistributedTracingSamplerPartialGranularitySettings(Settings):
349341
pass
350342

351343

@@ -522,6 +514,7 @@ class EventHarvestConfigHarvestLimitSettings(Settings):
522514
_settings.distributed_tracing.unique_spans = DistributedTracingUniqueSpansSettings()
523515
_settings.distributed_tracing.minimize_attributes = DistributedTracingMinimizeAttributesSettings()
524516
_settings.distributed_tracing.sampler = DistributedTracingSamplerSettings()
517+
_settings.distributed_tracing.sampler.partial_granularity = DistributedTracingSamplerPartialGranularitySettings()
525518
_settings.error_collector = ErrorCollectorSettings()
526519
_settings.error_collector.attributes = ErrorCollectorAttributesSettings()
527520
_settings.event_harvest_config = EventHarvestConfigSettings()
@@ -852,15 +845,22 @@ def default_otlp_host(host):
852845
_settings.ml_insights_events.enabled = False
853846

854847
_settings.distributed_tracing.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_ENABLED", default=True)
855-
_settings.distributed_tracing.drop_inprocess_spans.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_DROP_INPROCESS_SPANS_ENABLED", default=False)
856-
_settings.distributed_tracing.unique_spans.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_UNIQUE_SPANS_ENABLED", default=False)
857-
_settings.distributed_tracing.minimize_attributes.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_MINIMIZE_ATTRIBUTES_ENABLED", default=False)
858848
_settings.distributed_tracing.sampler.remote_parent_sampled = os.environ.get(
859849
"NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_SAMPLED", "default"
860850
)
861851
_settings.distributed_tracing.sampler.remote_parent_not_sampled = os.environ.get(
862852
"NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_NOT_SAMPLED", "default"
863853
)
854+
_settings.distributed_tracing.sampler.partial_granularity.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_ENABLED", default=False)
855+
_settings.distributed_tracing.sampler.partial_granularity.type = os.environ.get(
856+
"NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_TYPE", "essential"
857+
)
858+
_settings.distributed_tracing.sampler.partial_granularity.remote_parent_sampled = os.environ.get(
859+
"NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_REMOTE_PARENT_SAMPLED", "default"
860+
)
861+
_settings.distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled = os.environ.get(
862+
"NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_REMOTE_PARENT_NOT_SAMPLED", "default"
863+
)
864864
_settings.distributed_tracing.exclude_newrelic_header = False
865865
_settings.span_events.enabled = _environ_as_bool("NEW_RELIC_SPAN_EVENTS_ENABLED", default=True)
866866
_settings.span_events.max_samples_stored = _environ_as_int(

newrelic/core/node_mixin.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def get_trace_segment_params(self, settings, params=None):
4949
_params["exclusive_duration_millis"] = 1000.0 * self.exclusive
5050
return _params
5151

52-
def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, ct_exit_spans=None, ct_processing_time=None):
52+
def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, ct_exit_spans=None):
5353
if ct_exit_spans is None:
5454
ct_exit_spans = {}
5555
i_attrs = (base_attrs and base_attrs.copy()) or attr_class()
@@ -79,20 +79,16 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic
7979
u_attrs, settings.attribute_filter, DST_SPAN_EVENTS, attr_class=attr_class
8080
)
8181

82-
start_time = time.time()
8382
if settings.distributed_tracing.drop_inprocess_spans.enabled or settings.distributed_tracing.unique_spans.enabled:
8483
exit_span_attrs_present = attribute.SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES & set(a_attrs)
8584
# If this is the entry node, always return it.
8685
if i_attrs.get("nr.entryPoint"):
87-
ct_processing_time[0] += (time.time() - start_time)
8886
return [i_attrs, u_attrs, {}] if settings.distributed_tracing.minimize_attributes.enabled else [i_attrs, u_attrs, a_attrs]
8987
# If this is the an LLM node, always return it.
9088
if a_attrs.get("llm") or i_attrs["name"].startswith("Llm/"):
91-
ct_processing_time[0] += (time.time() - start_time)
9289
return [i_attrs, u_attrs, {"llm": True}] if settings.distributed_tracing.minimize_attributes.enabled else [i_attrs, u_attrs, a_attrs]
9390
# If the span is not an exit span, skip it by returning None.
9491
if not exit_span_attrs_present:
95-
ct_processing_time[0] += (time.time() - start_time)
9692
return None
9793
# If the span is an exit span but unique spans is enabled, we need to check
9894
# for uniqueness before returning it.
@@ -107,25 +103,22 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic
107103
if new_exit_span:
108104
u_attrs["nr.durations"] = self.duration
109105
ct_exit_spans[span_attrs] = [u_attrs]
110-
ct_processing_time[0] += (time.time() - start_time)
111106
return [i_attrs, u_attrs, a_minimized_attrs] if settings.distributed_tracing.minimize_attributes.enabled else [i_attrs, u_attrs, a_attrs]
112107
# If this is an exit span we've already seen, add it's guid to the list
113108
# of ids on the seen span and return None.
114109
# For now add ids to user attributes list
115110
ct_exit_spans[span_attrs][0]["nr.ids"].append(self.guid)
116111
ct_exit_spans[span_attrs][0]["nr.durations"] += self.duration
117112

118-
ct_processing_time[0] += (time.time() - start_time)
119113
return None
120114
elif settings.distributed_tracing.minimize_attributes.enabled:
121115
# Drop all non-entity relationship attributes from the span.
122116
exit_span_attrs_present = attribute.SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES & set(a_attrs)
123117
a_attrs = attr_class({key: a_attrs[key] for key in exit_span_attrs_present})
124-
ct_processing_time[0] += (time.time() - start_time)
125118
return [i_attrs, u_attrs, a_attrs]
126119

127-
def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, ct_exit_spans=None, ct_processing_time=None):
128-
span = self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class, ct_exit_spans=ct_exit_spans, ct_processing_time=ct_processing_time)
120+
def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, ct_exit_spans=None):
121+
span = self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class, ct_exit_spans=ct_exit_spans)
129122
parent_id = parent_guid
130123
if span: # span will be None if the span is an inprocess span or repeated exit span.
131124
yield span
@@ -134,7 +127,7 @@ def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=di
134127
parent_id = self.guid
135128
for child in self.children:
136129
for event in child.span_events( # noqa: UP028
137-
settings, base_attrs=base_attrs, parent_guid=parent_id, attr_class=attr_class, ct_exit_spans=ct_exit_spans, ct_processing_time=ct_processing_time
130+
settings, base_attrs=base_attrs, parent_guid=parent_id, attr_class=attr_class, ct_exit_spans=ct_exit_spans
138131
):
139132
if event: # event will be None if the span is an inprocess span or repeated exit span.
140133
yield event

newrelic/core/stats_engine.py

Lines changed: 6 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from newrelic.common.encoding_utils import json_encode
3636
from newrelic.common.metric_utils import create_metric_identity
3737
from newrelic.common.object_names import parse_exc_info
38-
from newrelic.common.streaming_utils import StreamBuffer, get_deep_size
38+
from newrelic.common.streaming_utils import StreamBuffer
3939
from newrelic.core.attribute import (
4040
MAX_LOG_MESSAGE_LENGTH,
4141
create_agent_attributes,
@@ -445,26 +445,6 @@ def merge(self, other_data_set, priority=None):
445445
self.num_seen += other_data_set.num_seen - other_data_set.num_samples
446446

447447

448-
class SpanSampledDataSet(SampledDataSet):
449-
def __init__(self, capacity=100):
450-
super().__init__(capacity=capacity)
451-
self.ct_processing_time = 0
452-
self.bytes = 0
453-
454-
def add(self, sample, priority=None):
455-
super().add(sample=sample, priority=priority)
456-
_logger.debug(f"{sample[0]['name']} [{len(sample[0])}, {len(sample[1])}, {len(sample[2])}] {get_deep_size(sample)}")
457-
self.bytes += get_deep_size(sample)
458-
459-
def reset(self):
460-
super().reset()
461-
self.ct_processing_time = 0
462-
463-
def merge(self, other_data_set, priority=None):
464-
super().merge(other_data_set=other_data_set, priority=priority)
465-
self.ct_processing_time += other_data_set.ct_processing_time
466-
467-
468448
class LimitedDataSet(list):
469449
def __init__(self, capacity=200):
470450
super().__init__()
@@ -548,7 +528,7 @@ def __init__(self):
548528
self._error_events = SampledDataSet()
549529
self._custom_events = SampledDataSet()
550530
self._ml_events = SampledDataSet()
551-
self._span_events = SpanSampledDataSet()
531+
self._span_events = SampledDataSet()
552532
self._log_events = SampledDataSet()
553533
self._span_stream = None
554534
self.__sql_stats_table = {}
@@ -1205,16 +1185,11 @@ def record_transaction(self, transaction):
12051185

12061186
if settings.distributed_tracing.enabled and settings.span_events.enabled and settings.collect_span_events:
12071187
if settings.infinite_tracing.enabled:
1208-
ct_processing_time = [0] # Hack for getting Python to create a non mutable number.
1209-
for event in transaction.span_protos(settings, ct_processing_time=ct_processing_time):
1188+
for event in transaction.span_protos(settings):
12101189
self._span_stream.put(event)
1211-
self._span_stream._ct_processing_time += ct_processing_time[0]
12121190
elif transaction.sampled:
1213-
ct_processing_time = [0] # Hack for getting Python to create a non mutable number.
1214-
for event in transaction.span_events(self.__settings, ct_processing_time=ct_processing_time):
1191+
for event in transaction.span_events(self.__settings):
12151192
self._span_events.add(event, priority=transaction.priority)
1216-
self._span_events.ct_processing_time += ct_processing_time[0]
1217-
12181193

12191194
# Merge in log events
12201195

@@ -1753,9 +1728,9 @@ def reset_ml_events(self):
17531728

17541729
def reset_span_events(self):
17551730
if self.__settings is not None:
1756-
self._span_events = SpanSampledDataSet(self.__settings.event_harvest_config.harvest_limits.span_event_data)
1731+
self._span_events = SampledDataSet(self.__settings.span_events.max_samples_stored)
17571732
else:
1758-
self._span_events = SpanSampledDataSet()
1733+
self._span_events = SampledDataSet()
17591734

17601735
def reset_log_events(self):
17611736
if self.__settings is not None:

0 commit comments

Comments
 (0)