Skip to content

Commit 7935f1a

Browse files
bric3amarziali
authored andcommitted
chore(css): Use a map to create the metric key
Also delays the creation of the Utf8ByteString at serialization time. Note that `writer.writeUTF8` emits a header corresponding to the length of the value being written, calling this method again will count as an entry in the array. A possible idea was to use a cache to store the computation of the peerTags, however `Utf8ByteString` is not concatenable / appendable, which is necessary to have the proper encoding. Creating a new "temporary" `Utf8ByteString`, was replaced by a direct call to `String: :getBytes`.
1 parent de8eda0 commit 7935f1a

File tree

8 files changed

+57
-46
lines changed

8 files changed

+57
-46
lines changed

dd-trace-core/src/main/java/datadog/trace/common/metrics/ConflatingMetricsAggregator.java

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,8 @@
2525
import datadog.trace.core.DDTraceCoreInfo;
2626
import datadog.trace.core.monitor.HealthMetrics;
2727
import datadog.trace.util.AgentTaskScheduler;
28-
import datadog.trace.util.TraceUtils;
29-
import java.util.ArrayList;
3028
import java.util.Collections;
29+
import java.util.HashMap;
3130
import java.util.List;
3231
import java.util.Map;
3332
import java.util.Queue;
@@ -293,7 +292,7 @@ private boolean publish(CoreSpan<?> span, boolean isTopLevel) {
293292
// returning false means that either the batch can't take any
294293
// more data, or it has already been consumed
295294
if (batch.add(tag, durationNanos)) {
296-
// added to a pending batch prior to consumption
295+
// added to a pending batch prior to consumption,
297296
// so skip publishing to the queue (we also know
298297
// the key isn't rare enough to override the sampler)
299298
return false;
@@ -313,12 +312,12 @@ private boolean publish(CoreSpan<?> span, boolean isTopLevel) {
313312
return isNewKey || span.getError() > 0;
314313
}
315314

316-
private List<CharSequence> getPeerTags(CoreSpan<?> span) {
317-
List<CharSequence> peerTags = new ArrayList<>();
315+
private Map<String, String> getPeerTags(CoreSpan<?> span) {
316+
Map<String, String> peerTags = new HashMap<>();
318317
for (String peerTag : features.peerTags()) {
319318
Object value = span.getTag(peerTag);
320319
if (value != null) {
321-
peerTags.add(peerTag + ":" + TraceUtils.normalizeTag(value.toString()));
320+
peerTags.put(peerTag, value.toString());
322321
}
323322
}
324323
return peerTags;

dd-trace-core/src/main/java/datadog/trace/common/metrics/MetricKey.java

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import static datadog.trace.bootstrap.instrumentation.api.UTF8BytesString.EMPTY;
44

55
import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString;
6-
import java.util.Arrays;
7-
import java.util.List;
6+
import java.util.Collections;
7+
import java.util.Map;
88

99
/** The aggregation key for tracked metrics. */
1010
public final class MetricKey {
@@ -17,7 +17,7 @@ public final class MetricKey {
1717
private final int hash;
1818
private final boolean isTraceRoot;
1919
private final UTF8BytesString spanKind;
20-
private final UTF8BytesString[] peerTags;
20+
private final Map<String, String> peerTags;
2121

2222
public MetricKey(
2323
CharSequence resource,
@@ -28,7 +28,7 @@ public MetricKey(
2828
boolean synthetics,
2929
boolean isTraceRoot,
3030
CharSequence spanKind,
31-
List<CharSequence> peerTags) {
31+
Map<String, String> peerTags) {
3232
this.resource = null == resource ? EMPTY : UTF8BytesString.create(resource);
3333
this.service = null == service ? EMPTY : UTF8BytesString.create(service);
3434
this.operationName = null == operationName ? EMPTY : UTF8BytesString.create(operationName);
@@ -37,21 +37,19 @@ public MetricKey(
3737
this.synthetics = synthetics;
3838
this.isTraceRoot = isTraceRoot;
3939
this.spanKind = null == spanKind ? EMPTY : UTF8BytesString.create(spanKind);
40-
this.peerTags = new UTF8BytesString[peerTags.size()];
41-
for (int i = 0; i < peerTags.size(); i++) {
42-
this.peerTags[i] = UTF8BytesString.create(peerTags.get(i));
43-
}
40+
this.peerTags = peerTags == null ? Collections.emptyMap() : peerTags;
4441

4542
// Unrolled polynomial hashcode to avoid varargs allocation
4643
// and eliminate data dependency between iterations as in Arrays.hashCode.
4744
// Coefficient constants are powers of 31, with integer overflow (hence negative numbers).
4845
// See
4946
// https://richardstartin.github.io/posts/collecting-rocks-and-benchmarks
5047
// https://richardstartin.github.io/posts/still-true-in-java-9-handwritten-hash-codes-are-faster
48+
5149
this.hash =
5250
-196513505 * Boolean.hashCode(this.isTraceRoot)
5351
+ -1807454463 * this.spanKind.hashCode()
54-
+ 887_503_681 * Arrays.hashCode(this.peerTags)
52+
+ 887_503_681 * this.peerTags.hashCode() // possibly unroll here has well.
5553
+ 28_629_151 * this.resource.hashCode()
5654
+ 923_521 * this.service.hashCode()
5755
+ 29791 * this.operationName.hashCode()
@@ -92,7 +90,7 @@ public UTF8BytesString getSpanKind() {
9290
return spanKind;
9391
}
9492

95-
public UTF8BytesString[] getPeerTags() {
93+
public Map<String, String> getPeerTags() {
9694
return peerTags;
9795
}
9896

@@ -112,7 +110,7 @@ public boolean equals(Object o) {
112110
&& type.equals(metricKey.type)
113111
&& isTraceRoot == metricKey.isTraceRoot
114112
&& spanKind.equals(metricKey.spanKind)
115-
&& Arrays.equals(peerTags, metricKey.peerTags);
113+
&& peerTags.equals(metricKey.peerTags);
116114
}
117115
return false;
118116
}

dd-trace-core/src/main/java/datadog/trace/common/metrics/SerializingMetricWriter.java

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
package datadog.trace.common.metrics;
22

33
import static java.nio.charset.StandardCharsets.ISO_8859_1;
4+
import static java.nio.charset.StandardCharsets.UTF_8;
45

56
import datadog.communication.serialization.GrowableBuffer;
67
import datadog.communication.serialization.WritableFormatter;
78
import datadog.communication.serialization.msgpack.MsgPackWriter;
89
import datadog.trace.api.ProcessTags;
910
import datadog.trace.api.WellKnownTags;
1011
import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString;
12+
import datadog.trace.util.TraceUtils;
13+
import java.util.Map;
1114

1215
public final class SerializingMetricWriter implements MetricWriter {
1316

@@ -123,10 +126,19 @@ public void add(MetricKey key, AggregateMetric aggregate) {
123126
writer.writeUTF8(key.getSpanKind());
124127

125128
writer.writeUTF8(PEER_TAGS);
126-
UTF8BytesString[] peerTags = key.getPeerTags();
127-
writer.startArray(peerTags.length);
128-
for (UTF8BytesString peerTag : peerTags) {
129-
writer.writeUTF8(peerTag);
129+
Map<String, String> peerTags = key.getPeerTags();
130+
writer.startArray(peerTags.size());
131+
132+
StringBuilder peerTagBuilder = new StringBuilder();
133+
for (Map.Entry<String, String> peerTag : peerTags.entrySet()) {
134+
peerTagBuilder.setLength(0);
135+
String toWrite =
136+
peerTagBuilder
137+
.append(peerTag.getKey())
138+
.append(':')
139+
.append(TraceUtils.normalizeTag(peerTag.getValue()))
140+
.toString();
141+
writer.writeUTF8(toWrite.getBytes(UTF_8));
130142
}
131143

132144
writer.writeUTF8(HITS);

dd-trace-core/src/test/groovy/datadog/trace/common/metrics/AggregateMetricTest.groovy

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class AggregateMetricTest extends DDSpecification {
5151
given:
5252
AggregateMetric aggregate = new AggregateMetric().recordDurations(3, new AtomicLongArray(0L, 0L, 0L | ERROR_TAG | TOP_LEVEL_TAG))
5353

54-
Batch batch = new Batch().reset(new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", ["grault"]))
54+
Batch batch = new Batch().reset(new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", ["grault":"quux"]))
5555
batch.add(0L, 10)
5656
batch.add(0L, 10)
5757
batch.add(0L, 10)
@@ -126,7 +126,7 @@ class AggregateMetricTest extends DDSpecification {
126126
def "consistent under concurrent attempts to read and write"() {
127127
given:
128128
AggregateMetric aggregate = new AggregateMetric()
129-
MetricKey key = new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", ["grault"])
129+
MetricKey key = new MetricKey("foo", "bar", "qux", "type", 0, false, true, "corge", ["grault":"quux"])
130130
BlockingDeque<Batch> queue = new LinkedBlockingDeque<>(1000)
131131
ExecutorService reader = Executors.newSingleThreadExecutor()
132132
int writerCount = 10

dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
128128
false,
129129
true,
130130
"baz",
131-
[]
131+
[:]
132132
), _) >> { MetricKey key, AggregateMetric value ->
133133
value.getHitCount() == 1 && value.getTopLevelCount() == 1 && value.getDuration() == 100
134134
}
@@ -171,7 +171,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
171171
false,
172172
false,
173173
kind,
174-
[]
174+
[:]
175175
), { AggregateMetric aggregateMetric ->
176176
aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100
177177
})
@@ -224,7 +224,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
224224
false,
225225
false,
226226
"grault",
227-
["country:france"]
227+
["country":"france"]
228228
), { AggregateMetric aggregateMetric ->
229229
aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100
230230
})
@@ -238,7 +238,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
238238
false,
239239
false,
240240
"grault",
241-
["country:france", "georegion:europe"]
241+
["country":"france", "georegion":"europe"]
242242
), { AggregateMetric aggregateMetric ->
243243
aggregateMetric.getHitCount() == 1 && aggregateMetric.getTopLevelCount() == 0 && aggregateMetric.getDuration() == 100
244244
})
@@ -280,7 +280,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
280280
false,
281281
topLevel,
282282
"baz",
283-
[]
283+
[:]
284284
), { AggregateMetric value ->
285285
value.getHitCount() == 1 && value.getTopLevelCount() == topLevelCount && value.getDuration() == 100
286286
})
@@ -336,7 +336,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
336336
false,
337337
false,
338338
"baz",
339-
[]
339+
[:]
340340
), { AggregateMetric value ->
341341
value.getHitCount() == count && value.getDuration() == count * duration
342342
})
@@ -349,7 +349,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
349349
false,
350350
false,
351351
"baz",
352-
[]
352+
[:]
353353
), { AggregateMetric value ->
354354
value.getHitCount() == count && value.getDuration() == count * duration * 2
355355
})
@@ -399,7 +399,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
399399
false,
400400
true,
401401
"baz",
402-
[]
402+
[:]
403403
), _) >> { MetricKey key, AggregateMetric value ->
404404
value.getHitCount() == 1 && value.getDuration() == duration
405405
}
@@ -413,7 +413,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
413413
false,
414414
true,
415415
"baz",
416-
[]
416+
[:]
417417
), _)
418418
1 * writer.finishBucket() >> { latch.countDown() }
419419

@@ -459,7 +459,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
459459
false,
460460
true,
461461
"baz",
462-
[]
462+
[:]
463463
), { AggregateMetric value ->
464464
value.getHitCount() == 1 && value.getDuration() == duration
465465
})
@@ -490,7 +490,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
490490
false,
491491
true,
492492
"baz",
493-
[]
493+
[:]
494494
),{ AggregateMetric value ->
495495
value.getHitCount() == 1 && value.getDuration() == duration
496496
})
@@ -504,7 +504,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
504504
false,
505505
true,
506506
"baz",
507-
[]
507+
[:]
508508
), _)
509509
1 * writer.finishBucket() >> { latch.countDown() }
510510

@@ -550,7 +550,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
550550
false,
551551
true,
552552
"quux",
553-
[]
553+
[:]
554554
), { AggregateMetric value ->
555555
value.getHitCount() == 1 && value.getDuration() == duration
556556
})
@@ -606,7 +606,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification {
606606
false,
607607
true,
608608
"garply",
609-
[]
609+
[:]
610610
), { AggregateMetric value ->
611611
value.getHitCount() == 1 && value.getDuration() == duration
612612
})

dd-trace-core/src/test/groovy/datadog/trace/common/metrics/FootprintForkedTest.groovy

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,7 @@ class FootprintForkedTest extends DDSpecification {
4040
1000,
4141
1000,
4242
100,
43-
SECONDS
44-
)
43+
SECONDS)
4544
// Removing the 'features' as it's a mock, and mocks are heavyweight, e.g. around 22MiB
4645
def baseline = footprint(aggregator, features)
4746
aggregator.start()

dd-trace-core/src/test/groovy/datadog/trace/common/metrics/SerializingMetricWriterTest.groovy

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ class SerializingMetricWriterTest extends DDSpecification {
5353
false,
5454
false,
5555
"client",
56-
["country:canada", "georegion:amer", "peer.service:remote-service"]
56+
["country":"canada", "georegion":"amer", "peer.service":"remote-service"]
5757
),
5858
new AggregateMetric().recordDurations(10, new AtomicLongArray(1L))
5959
),
@@ -67,7 +67,7 @@ class SerializingMetricWriterTest extends DDSpecification {
6767
true,
6868
false,
6969
"producer",
70-
["country:canada", "georegion:amer", "peer.service:remote-service"]
70+
["country":"canada", "georegion":"amer", "peer.service":"remote-service"]
7171
),
7272
new AggregateMetric().recordDurations(9, new AtomicLongArray(1L))
7373
)
@@ -83,7 +83,7 @@ class SerializingMetricWriterTest extends DDSpecification {
8383
false,
8484
false,
8585
"producer",
86-
["messaging.destination" + i]
86+
["messaging.destination" : "dest" + i]
8787
),
8888
new AggregateMetric().recordDurations(10, new AtomicLongArray(1L))
8989
)
@@ -175,9 +175,12 @@ class SerializingMetricWriterTest extends DDSpecification {
175175
++elementCount
176176
assert unpacker.unpackString() == "PeerTags"
177177
int peerTagsLength = unpacker.unpackArrayHeader()
178-
assert peerTagsLength == key.getPeerTags().length
178+
assert peerTagsLength == key.getPeerTags().size()
179179
for (int i = 0; i < peerTagsLength; i++) {
180-
assert unpacker.unpackString() == key.getPeerTags()[i] as String
180+
def string = unpacker.unpackString()
181+
def separatorPos = string.indexOf(':')
182+
def tagVal = key.getPeerTags()[string.substring(0, separatorPos)]
183+
assert tagVal == string.substring(separatorPos + 1)
181184
}
182185
++elementCount
183186
assert unpacker.unpackString() == "Hits"

dd-trace-core/src/traceAgentTest/groovy/MetricsIntegrationTest.groovy

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,11 @@ class MetricsIntegrationTest extends AbstractTraceAgentTest {
3434
)
3535
writer.startBucket(2, System.nanoTime(), SECONDS.toNanos(10))
3636
writer.add(
37-
new MetricKey("resource1", "service1", "operation1", "sql", 0, false, true, "xyzzy", ["grault"]),
37+
new MetricKey("resource1", "service1", "operation1", "sql", 0, false, true, "xyzzy", ["grault":"quux"]),
3838
new AggregateMetric().recordDurations(5, new AtomicLongArray(2, 1, 2, 250, 4, 5))
3939
)
4040
writer.add(
41-
new MetricKey("resource2", "service2", "operation2", "web", 200, false, true, "xyzzy", ["grault"]),
41+
new MetricKey("resource2", "service2", "operation2", "web", 200, false, true, "xyzzy", ["grault":"quux"]),
4242
new AggregateMetric().recordDurations(10, new AtomicLongArray(1, 1, 200, 2, 3, 4, 5, 6, 7, 8, 9))
4343
)
4444
writer.finishBucket()

0 commit comments

Comments
 (0)