|
| 1 | +/* |
| 2 | + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one |
| 3 | + * or more contributor license agreements. Licensed under the "Elastic License |
| 4 | + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side |
| 5 | + * Public License v 1"; you may not use this file except in compliance with, at |
| 6 | + * your election, the "Elastic License 2.0", the "GNU Affero General Public |
| 7 | + * License v3.0 only", or the "Server Side Public License, v 1". |
| 8 | + */ |
| 9 | + |
| 10 | +package org.elasticsearch.benchmark.exponentialhistogram; |
| 11 | + |
| 12 | +import org.elasticsearch.exponentialhistogram.BucketIterator; |
| 13 | +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; |
| 14 | +import org.elasticsearch.exponentialhistogram.ExponentialHistogramGenerator; |
| 15 | +import org.elasticsearch.exponentialhistogram.ExponentialHistogramMerger; |
| 16 | +import org.openjdk.jmh.annotations.Benchmark; |
| 17 | +import org.openjdk.jmh.annotations.BenchmarkMode; |
| 18 | +import org.openjdk.jmh.annotations.Fork; |
| 19 | +import org.openjdk.jmh.annotations.Measurement; |
| 20 | +import org.openjdk.jmh.annotations.Mode; |
| 21 | +import org.openjdk.jmh.annotations.OutputTimeUnit; |
| 22 | +import org.openjdk.jmh.annotations.Param; |
| 23 | +import org.openjdk.jmh.annotations.Scope; |
| 24 | +import org.openjdk.jmh.annotations.Setup; |
| 25 | +import org.openjdk.jmh.annotations.State; |
| 26 | +import org.openjdk.jmh.annotations.Threads; |
| 27 | +import org.openjdk.jmh.annotations.Warmup; |
| 28 | + |
| 29 | +import java.util.List; |
| 30 | +import java.util.Random; |
| 31 | +import java.util.concurrent.ThreadLocalRandom; |
| 32 | +import java.util.concurrent.TimeUnit; |
| 33 | + |
| 34 | +@BenchmarkMode(Mode.AverageTime) |
| 35 | +@OutputTimeUnit(TimeUnit.NANOSECONDS) |
| 36 | +@Warmup(iterations = 3, time = 3, timeUnit = TimeUnit.SECONDS) |
| 37 | +@Measurement(iterations = 5, time = 2, timeUnit = TimeUnit.SECONDS) |
| 38 | +@Fork(1) |
| 39 | +@Threads(1) |
| 40 | +@State(Scope.Thread) |
| 41 | +public class ExponentialHistogramMergeBench { |
| 42 | + |
| 43 | + @Param({ "1000", "2000", "5000" }) |
| 44 | + int bucketCount; |
| 45 | + |
| 46 | + @Param({ "0.01", "0.1", "0.25", "0.5", "1.0", "2.0" }) |
| 47 | + double mergedHistoSizeFactor; |
| 48 | + |
| 49 | + Random random; |
| 50 | + ExponentialHistogramMerger histoMerger; |
| 51 | + |
| 52 | + ExponentialHistogram[] toMerge = new ExponentialHistogram[10_000]; |
| 53 | + |
| 54 | + int index; |
| 55 | + |
| 56 | + @Setup |
| 57 | + public void setUp() { |
| 58 | + random = ThreadLocalRandom.current(); |
| 59 | + histoMerger = new ExponentialHistogramMerger(bucketCount); |
| 60 | + |
| 61 | + ExponentialHistogramGenerator initial = new ExponentialHistogramGenerator(bucketCount); |
| 62 | + for (int j = 0; j < bucketCount; j++) { |
| 63 | + initial.add(Math.pow(1.001, j)); |
| 64 | + } |
| 65 | + ExponentialHistogram initialHisto = initial.get(); |
| 66 | + int cnt = getBucketCount(initialHisto); |
| 67 | + if (cnt < bucketCount) { |
| 68 | + throw new IllegalArgumentException("Expected bucket count to be " + bucketCount + ", but was " + cnt); |
| 69 | + } |
| 70 | + histoMerger.add(initialHisto); |
| 71 | + |
| 72 | + int dataPointSize = (int) Math.round(bucketCount * mergedHistoSizeFactor); |
| 73 | + |
| 74 | + for (int i = 0; i < toMerge.length; i++) { |
| 75 | + ExponentialHistogramGenerator generator = new ExponentialHistogramGenerator(dataPointSize); |
| 76 | + |
| 77 | + int bucketIndex = 0; |
| 78 | + for (int j = 0; j < dataPointSize; j++) { |
| 79 | + bucketIndex += 1 + random.nextInt(bucketCount) % (Math.max(1, bucketCount / dataPointSize)); |
| 80 | + generator.add(Math.pow(1.001, bucketIndex)); |
| 81 | + } |
| 82 | + toMerge[i] = generator.get(); |
| 83 | + cnt = getBucketCount(toMerge[i]); |
| 84 | + if (cnt < dataPointSize) { |
| 85 | + throw new IllegalArgumentException("Expected bucket count to be " + dataPointSize + ", but was " + cnt); |
| 86 | + } |
| 87 | + } |
| 88 | + |
| 89 | + index = 0; |
| 90 | + } |
| 91 | + |
| 92 | + private static int getBucketCount(ExponentialHistogram histo) { |
| 93 | + int cnt = 0; |
| 94 | + for (BucketIterator it : List.of(histo.negativeBuckets().iterator(), histo.positiveBuckets().iterator())) { |
| 95 | + while (it.hasNext()) { |
| 96 | + cnt++; |
| 97 | + it.advance(); |
| 98 | + } |
| 99 | + } |
| 100 | + return cnt; |
| 101 | + } |
| 102 | + |
| 103 | + @Benchmark |
| 104 | + @BenchmarkMode(Mode.AverageTime) |
| 105 | + @OutputTimeUnit(TimeUnit.MICROSECONDS) |
| 106 | + public void add() { |
| 107 | + if (index >= toMerge.length) { |
| 108 | + index = 0; |
| 109 | + } |
| 110 | + histoMerger.add(toMerge[index++]); |
| 111 | + } |
| 112 | +} |
0 commit comments