Skip to content

Commit 0c4c355

Browse files
committed
Fixup timing
1 parent cf01684 commit 0c4c355

File tree

4 files changed

+69
-59
lines changed

4 files changed

+69
-59
lines changed

newrelic/common/streaming_utils.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,12 +63,9 @@ def put(self, item):
6363
# being measured.
6464
if len(self._queue) >= self._queue.maxlen:
6565
self._dropped += 1
66-
print(f"dropped: {self._dropped}")
6766

6867
self._queue.append(item)
6968
self._notify.notify_all()
70-
print(f"seen: {self._seen}")
71-
print(f"bytes: {self._bytes}")
7269

7370
def stats(self):
7471
with self._notify:

newrelic/core/application.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1384,9 +1384,13 @@ def harvest(self, shutdown=False, flexible=False):
13841384
spans_seen = spans.num_seen
13851385
spans_sampled = spans.num_samples
13861386
internal_count_metric("Supportability/SpanEvent/TotalEventsSeen", spans_seen)
1387+
print(f"Supportability/SpanEvent/TotalEventsSeen: {spans_seen}")
13871388
internal_count_metric("Supportability/SpanEvent/TotalEventsSent", spans_sampled)
1389+
print(f"Supportability/SpanEvent/TotalEventsSent: {spans_sampled}")
13881390
internal_count_metric("Supportability/DistributedTracing/Bytes/Seen", spans.bytes)
1391+
print(f"Supportability/DistributedTracing/Bytes/Seen: {spans.bytes}")
13891392
internal_count_metric("Supportability/SpanEvent/TotalCoreTracingTime", spans.ct_processing_time*1000) # Time in ms.
1393+
print(f"Supportability/SpanEvent/TotalCoreTracingTime: {spans.ct_processing_time*1000}") # Time in ms.
13901394

13911395
stats.reset_span_events()
13921396

newrelic/core/node_mixin.py

Lines changed: 31 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,10 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic
6262
i_attrs["timestamp"] = int(self.start_time * 1000)
6363
i_attrs["duration"] = self.duration
6464
i_attrs["category"] = "generic"
65+
# TODO: limit intrinsic attributes but this likely requires changes in the pipeline.
6566
#if settings.core_tracing.enabled:
66-
#i_ct_attrs = {"type", "name", "guid", "parentId", "transaction.name", "traceId", "nr.entryPoint", "transactionId"}
67-
#i_attrs = {key: value for key, value in i_attrs.items() if key in i_ct_attrs}
67+
# i_ct_attrs = {"type", "name", "guid", "parentId", "transaction.name", "traceId", "timestamp", "duration", "nr.entryPoint", "transactionId"}
68+
# i_attrs = {key: value for key, value in i_attrs.items() if key in i_ct_attrs}
6869

6970
if parent_guid:
7071
i_attrs["parentId"] = parent_guid
@@ -81,32 +82,40 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic
8182
u_attrs, settings.attribute_filter, DST_SPAN_EVENTS, attr_class=attr_class
8283
)
8384

85+
start_time = time.time()
8486
if settings.core_tracing.drop_inprocess_spans or settings.core_tracing.enabled:
8587
exit_span_attrs_present = attribute.SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES & set(a_attrs)
8688
if self.__class__.__name__ == "RootNode":
87-
return [i_attrs, u_attrs, a_attrs]
89+
if settings.core_tracing.enabled:
90+
ct_processing_time[0] += (time.time() - start_time)
91+
return [i_attrs, {}, {}]
92+
else:
93+
ct_processing_time[0] += (time.time() - start_time)
94+
return [i_attrs, u_attrs, a_attrs]
8895
if not exit_span_attrs_present:
96+
ct_processing_time[0] += (time.time() - start_time)
8997
return None
90-
if settings.core_tracing.enabled:
91-
start_time = time.time()
92-
# TODO: use attr value name rather than str casting.
93-
span_attrs = "".join([str(a_attrs[key]) for key in exit_span_attrs_present])
94-
new_exit_span = span_attrs not in ct_exit_spans
95-
if new_exit_span:
96-
ct_exit_spans[span_attrs] = self.ids
97-
else:
98-
# For now add ids to user attributes list
99-
ct_exit_spans[span_attrs].append(self.guid)
100-
ct_processing_time += (time.time() - start_time)
101-
if new_exit_span:
102-
return [i_attrs, u_attrs, a_attrs]
103-
return None
98+
if settings.core_tracing.enabled:
99+
a_attrs = attr_class({key: a_attrs[key] for key in exit_span_attrs_present})
100+
# TODO: use attr value name rather than str casting.
101+
span_attrs = "".join([str(a_attrs[key]) for key in exit_span_attrs_present])
102+
new_exit_span = span_attrs not in ct_exit_spans
103+
if new_exit_span:
104+
ct_exit_spans[span_attrs] = self.ids
105+
else:
106+
# For now add ids to user attributes list
107+
ct_exit_spans[span_attrs].append(self.guid)
108+
if new_exit_span:
109+
ct_processing_time[0] += (time.time() - start_time)
110+
return [i_attrs, u_attrs, a_attrs]
111+
ct_processing_time[0] += (time.time() - start_time)
112+
return None
113+
ct_processing_time[0] += (time.time() - start_time)
104114
return [i_attrs, u_attrs, a_attrs]
105115

106-
def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, ct_exit_spans=None, ct_processing_time=0):
116+
def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, ct_exit_spans=None, ct_processing_time=None):
107117
if settings.core_tracing.drop_inprocess_spans or settings.core_tracing.enabled:
108118
span = self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class, ct_exit_spans=ct_exit_spans, ct_processing_time=ct_processing_time)
109-
start_time = time.time()
110119
parent_id = parent_guid
111120
if span: # span will be None if the span is an inprocess span or repeated exit span.
112121
yield span
@@ -118,11 +127,12 @@ def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=di
118127
if event: # event will be None if the span is an inprocess span or repeated exit span.
119128
yield event
120129
else:
121-
yield self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class)
130+
span = self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class, ct_processing_time=ct_processing_time)
131+
yield span
122132

123133
for child in self.children:
124134
for event in child.span_events( # noqa: UP028
125-
settings, base_attrs=base_attrs, parent_guid=self.guid, attr_class=attr_class
135+
settings, base_attrs=base_attrs, parent_guid=self.guid, attr_class=attr_class, ct_processing_time=ct_processing_time
126136
):
127137
yield event
128138

newrelic/core/stats_engine.py

Lines changed: 34 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,26 @@
5555
from newrelic.core.metric import TimeMetric
5656
from newrelic.core.stack_trace import exception_stack
5757

58+
def get_deep_size(obj, seen=None):
59+
"""Recursively calculates the size of an object including nested lists and dicts."""
60+
if seen is None:
61+
seen = set()
62+
63+
# Avoid recursion for already seen objects (handle circular references)
64+
obj_id = id(obj)
65+
if obj_id in seen:
66+
return 0
67+
seen.add(obj_id)
68+
69+
size = sys.getsizeof(obj)
70+
71+
if isinstance(obj, dict):
72+
size += sum(get_deep_size(k, seen) + get_deep_size(v, seen) for k, v in obj.items())
73+
elif isinstance(obj, (list, tuple, set, frozenset)):
74+
size += sum(get_deep_size(i, seen) for i in obj)
75+
76+
return size
77+
5878
_logger = logging.getLogger(__name__)
5979

6080
EVENT_HARVEST_METHODS = {
@@ -449,40 +469,21 @@ def merge(self, other_data_set, priority=None):
449469

450470
class SpanSampledDataSet(SampledDataSet):
451471
def __init__(self, capacity=100):
452-
self.pq = []
453-
self.heap = False
454-
self.capacity = capacity
455-
self.num_seen = 0
456-
self.bytes = 0
472+
super().__init__(capacity=capacity)
457473
self.ct_processing_time = 0
458-
459-
if capacity <= 0:
460-
461-
def add(*args, **kwargs):
462-
self.num_seen += 1
463-
464-
self.add = add
474+
self.bytes = 0
465475

466476
def add(self, sample, priority=None):
467-
self.num_seen += 1
468-
self.bytes += objsize.get_deep_size(sample[:-1])
477+
super().add(sample=sample, priority=priority)
478+
self.bytes += get_deep_size(sample)
469479

470-
if priority is None:
471-
priority = random.random() # noqa: S311
480+
def reset(self):
481+
super().reset()
482+
self.ct_processing_time = 0
472483

473-
entry = (priority, self.num_seen, sample[:-1])
474-
if self.num_seen == self.capacity:
475-
self.pq.append(entry)
476-
self.heap = self.heap or heapify(self.pq) or True
477-
elif not self.heap:
478-
self.pq.append(entry)
479-
else:
480-
sampled = self.should_sample(priority)
481-
if not sampled:
482-
return
483-
heapreplace(self.pq, entry)
484-
if is_ft:
485-
self.ft_sent -= 1
484+
def merge(self, other_data_set, priority=None):
485+
super().merge(other_data_set=other_data_set, priority=priority)
486+
self.ct_processing_time += other_data_set.ct_processing_time
486487

487488

488489
class LimitedDataSet(list):
@@ -1235,17 +1236,15 @@ def record_transaction(self, transaction):
12351236

12361237
if settings.distributed_tracing.enabled and settings.span_events.enabled and settings.collect_span_events:
12371238
if settings.infinite_tracing.enabled:
1238-
ct_processing_time = 0
1239+
ct_processing_time = [0]
12391240
for event in transaction.span_protos(settings, ct_processing_time=ct_processing_time):
1240-
print(event.intrinsics["name"])
12411241
self._span_stream.put(event)
1242-
self._span_stream._ct_processing_time += ct_processing_time
1242+
self._span_stream._ct_processing_time += ct_processing_time[0]
12431243
elif transaction.sampled:
1244-
ct_processing_time = 0
1244+
ct_processing_time = [0]
12451245
for event in transaction.span_events(self.__settings, ct_processing_time=ct_processing_time):
1246-
print(event[0]["name"])
12471246
self._span_events.add(event, priority=transaction.priority)
1248-
self._span_events.ct_processing_time += ct_processing_time
1247+
self._span_events.ct_processing_time += ct_processing_time[0]
12491248

12501249

12511250
# Merge in log events

0 commit comments

Comments
 (0)