Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ public Buckets negativeBuckets() {
return EmptyBuckets.INSTANCE;
}

@Override
public double sum() {
return 0;
}

@Override
public long ramBytesUsed() {
return 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
*/
public interface ExponentialHistogram extends Accountable {

// TODO(b/128622): support min/max/sum/count storage and merging.
// TODO(b/128622): support min/max storage and merging.
// TODO(b/128622): Add special positive and negative infinity buckets
// to allow representation of explicit bucket histograms with open boundaries.

Expand Down Expand Up @@ -93,6 +93,15 @@ public interface ExponentialHistogram extends Accountable {
*/
Buckets negativeBuckets();

/**
* Returns the sum of all values represented by this histogram.
* Note that even if histograms are cumulative, the sum is not guaranteed to be monotonically increasing,
* because histograms support negative values.
*
* @return the sum, guaranteed to be zero for empty histograms
*/
double sum();

/**
* Represents a bucket range of an {@link ExponentialHistogram}, either the positive or the negative range.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ private void mergeValuesToHistogram() {
}

valueBuffer.reset();
valueBuffer.setSum(rawValuesSum());
int scale = valueBuffer.scale();

// Buckets must be provided with their indices in ascending order.
Expand Down Expand Up @@ -161,6 +162,14 @@ private void mergeValuesToHistogram() {
valueCount = 0;
}

private double rawValuesSum() {
double sum = 0;
for (int i = 0; i < valueCount; i++) {
sum += rawValueBuffer[i];
}
return sum;
}

private static long estimateBaseSize(int numBuckets) {
return SHALLOW_SIZE + RamEstimationUtil.estimateDoubleArray(numBuckets);
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ public void add(ExponentialHistogram toAdd) {
buffer = FixedCapacityExponentialHistogram.create(bucketLimit, circuitBreaker);
}
buffer.setZeroBucket(zeroBucket);
buffer.setSum(a.sum() + b.sum());

// We attempt to bring everything to the scale of A.
// This might involve increasing the scale for B, which would increase its indices.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Copyright Elasticsearch B.V., and/or licensed to Elasticsearch B.V.
* under one or more license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* This file is based on a modification of https://github.com/open-telemetry/opentelemetry-java which is licensed under the Apache 2.0 License.
*/

package org.elasticsearch.exponentialhistogram;

public class ExponentialHistogramUtils {

/**
* Estimates the sum of all values of a histogram just based on the populated buckets.
*
* @param negativeBuckets the negative buckets of the histogram
* @param positiveBuckets the positive buckets of the histogram
* @return the estimated sum of all values in the histogram, guaranteed to be zero if there are no buckets
*/
public static double estimateSum(BucketIterator negativeBuckets, BucketIterator positiveBuckets) {
double sum = 0.0;
while (negativeBuckets.hasNext()) {
double bucketMidPoint = ExponentialScaleUtils.getPointOfLeastRelativeError(
negativeBuckets.peekIndex(),
negativeBuckets.scale()
);
sum += -bucketMidPoint * negativeBuckets.peekCount();
negativeBuckets.advance();
}
while (positiveBuckets.hasNext()) {
double bucketMidPoint = ExponentialScaleUtils.getPointOfLeastRelativeError(
positiveBuckets.peekIndex(),
positiveBuckets.scale()
);
sum += bucketMidPoint * positiveBuckets.peekCount();
positiveBuckets.advance();
}
return sum;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
public class ExponentialHistogramXContent {

public static final String SCALE_FIELD = "scale";
public static final String SUM_FIELD = "sum";
public static final String ZERO_FIELD = "zero";
public static final String ZERO_COUNT_FIELD = "count";
public static final String ZERO_THRESHOLD_FIELD = "threshold";
Expand All @@ -49,6 +50,7 @@ public static void serialize(XContentBuilder builder, ExponentialHistogram histo
builder.startObject();

builder.field(SCALE_FIELD, histogram.scale());
builder.field(SUM_FIELD, histogram.sum());
double zeroThreshold = histogram.zeroBucket().zeroThreshold();
long zeroCount = histogram.zeroBucket().count();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ final class FixedCapacityExponentialHistogram implements ReleasableExponentialHi

private final Buckets positiveBuckets = new Buckets(true);

private double sum;

private final ExponentialHistogramCircuitBreaker circuitBreaker;
private boolean closed = false;

Expand All @@ -78,6 +80,7 @@ private FixedCapacityExponentialHistogram(int bucketCapacity, ExponentialHistogr
* Resets this histogram to the same state as a newly constructed one with the same capacity.
*/
void reset() {
sum = 0;
setZeroBucket(ZeroBucket.minimalEmpty());
resetBuckets(MAX_SCALE);
}
Expand Down Expand Up @@ -110,6 +113,15 @@ void setZeroBucket(ZeroBucket zeroBucket) {
this.zeroBucket = zeroBucket;
}

@Override
public double sum() {
return sum;
}

void setSum(double sum) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is only exposed for testing? If so, let's add a comment to call it out.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, we try to avoid recalculating in merging. Sounds good - I don't know how I feel about not validating the passed value but it can be expensive and tricky to do once.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it should be sufficient for us to do the validations required on ingestion and trust the values to be sane internally.
Also we don't just avoid recalculating while merging for performance reasons: The calculation we have is just an estimation. User can instead provide the exact sum on ingestion, which means we'll preserve exactness when merging, giving exact averages. In OTLP, the sum is provided by default.

this.sum = sum;
}

/**
* Attempts to add a bucket to the positive or negative range of this histogram.
* <br>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,22 @@ public void testEmptyZeroBucketIgnored() {
assertThat(posBuckets.hasNext(), equalTo(false));
}

public void testSumCorrectness() {
double[] firstValues = randomDoubles(100).map(val -> val * 2 - 1).toArray();
double[] secondValues = randomDoubles(50).map(val -> val * 2 - 1).toArray();
double correctSum = Arrays.stream(firstValues).sum() + Arrays.stream(secondValues).sum();
try (
ReleasableExponentialHistogram merged = ExponentialHistogram.merge(
2,
breaker(),
createAutoReleasedHistogram(10, firstValues),
createAutoReleasedHistogram(20, secondValues)
)
) {
assertThat(merged.sum(), closeTo(correctSum, 0.000001));
}
}

public void testUpscalingDoesNotExceedIndexLimits() {
for (int i = 0; i < 4; i++) {

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/*
* Copyright Elasticsearch B.V., and/or licensed to Elasticsearch B.V.
* under one or more license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* This file is based on a modification of https://github.com/open-telemetry/opentelemetry-java which is licensed under the Apache 2.0 License.
*/

package org.elasticsearch.exponentialhistogram;

import static org.hamcrest.Matchers.closeTo;

public class ExponentialHistogramUtilsTests extends ExponentialHistogramTestCase {

public void testRandomDataSumEstimation() {
for (int i = 0; i < 100; i++) {
int valueCount = randomIntBetween(100, 10_000);
int bucketCount = randomIntBetween(2, 500);

double correctSum = 0;
double sign = randomBoolean() ? 1 : -1;
double[] values = new double[valueCount];
for (int j = 0; j < valueCount; j++) {
values[j] = sign * Math.pow(10, randomIntBetween(1, 9)) * randomDouble();
correctSum += values[j];
}

ExponentialHistogram histo = createAutoReleasedHistogram(bucketCount, values);

double estimatedSum = ExponentialHistogramUtils.estimateSum(
histo.negativeBuckets().iterator(),
histo.positiveBuckets().iterator()
);

double correctAverage = correctSum / valueCount;
double estimatedAverage = estimatedSum / valueCount;

// If the histogram does not contain mixed sign values, we have a guaranteed relative error bound of 2^(2^-scale) - 1
double histogramBase = Math.pow(2, Math.pow(2, -histo.scale()));
double allowedError = Math.abs(correctAverage * (histogramBase - 1));

assertThat(estimatedAverage, closeTo(correctAverage, allowedError));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,14 @@ public class ExponentialHistogramXContentTests extends ExponentialHistogramTestC

public void testEmptyHistogram() {
ExponentialHistogram emptyHistogram = ExponentialHistogram.empty();
assertThat(toJson(emptyHistogram), equalTo("{\"scale\":" + emptyHistogram.scale() + "}"));
assertThat(toJson(emptyHistogram), equalTo("{\"scale\":" + emptyHistogram.scale() + ",\"sum\":0.0}"));
}

public void testFullHistogram() {
FixedCapacityExponentialHistogram histo = createAutoReleasedHistogram(100);
histo.setZeroBucket(new ZeroBucket(0.1234, 42));
histo.resetBuckets(7);
histo.setSum(1234.56);
histo.tryAddBucket(-10, 15, false);
histo.tryAddBucket(10, 5, false);
histo.tryAddBucket(-11, 10, true);
Expand All @@ -49,6 +50,7 @@ public void testFullHistogram() {
equalTo(
"{"
+ "\"scale\":7,"
+ "\"sum\":1234.56,"
+ "\"zero\":{\"count\":42,\"threshold\":0.1234},"
+ "\"positive\":{\"indices\":[-11,11],\"counts\":[10,20]},"
+ "\"negative\":{\"indices\":[-10,10],\"counts\":[15,5]}"
Expand All @@ -61,30 +63,34 @@ public void testOnlyZeroThreshold() {
FixedCapacityExponentialHistogram histo = createAutoReleasedHistogram(10);
histo.setZeroBucket(new ZeroBucket(5.0, 0));
histo.resetBuckets(3);
assertThat(toJson(histo), equalTo("{\"scale\":3,\"zero\":{\"threshold\":5.0}}"));
histo.setSum(1.1);
assertThat(toJson(histo), equalTo("{\"scale\":3,\"sum\":1.1,\"zero\":{\"threshold\":5.0}}"));
}

public void testOnlyZeroCount() {
FixedCapacityExponentialHistogram histo = createAutoReleasedHistogram(10);
histo.setZeroBucket(new ZeroBucket(0.0, 7));
histo.resetBuckets(2);
assertThat(toJson(histo), equalTo("{\"scale\":2,\"zero\":{\"count\":7}}"));
histo.setSum(1.1);
assertThat(toJson(histo), equalTo("{\"scale\":2,\"sum\":1.1,\"zero\":{\"count\":7}}"));
}

public void testOnlyPositiveBuckets() {
FixedCapacityExponentialHistogram histo = createAutoReleasedHistogram(10);
histo.resetBuckets(4);
histo.setSum(1.1);
histo.tryAddBucket(-1, 3, true);
histo.tryAddBucket(2, 5, true);
assertThat(toJson(histo), equalTo("{\"scale\":4,\"positive\":{\"indices\":[-1,2],\"counts\":[3,5]}}"));
assertThat(toJson(histo), equalTo("{\"scale\":4,\"sum\":1.1,\"positive\":{\"indices\":[-1,2],\"counts\":[3,5]}}"));
}

public void testOnlyNegativeBuckets() {
FixedCapacityExponentialHistogram histo = createAutoReleasedHistogram(10);
histo.resetBuckets(5);
histo.setSum(1.1);
histo.tryAddBucket(-1, 4, false);
histo.tryAddBucket(2, 6, false);
assertThat(toJson(histo), equalTo("{\"scale\":5,\"negative\":{\"indices\":[-1,2],\"counts\":[4,6]}}"));
assertThat(toJson(histo), equalTo("{\"scale\":5,\"sum\":1.1,\"negative\":{\"indices\":[-1,2],\"counts\":[4,6]}}"));
}

private static String toJson(ExponentialHistogram histo) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ public class CompressedExponentialHistogram implements ExponentialHistogram {

private double zeroThreshold;
private long valueCount;
private double sum;
private ZeroBucket lazyZeroBucket;

private final EncodedHistogramData encodedData = new EncodedHistogramData();
Expand All @@ -53,6 +54,11 @@ public ZeroBucket zeroBucket() {
return lazyZeroBucket;
}

@Override
public double sum() {
return sum;
}

@Override
public ExponentialHistogram.Buckets positiveBuckets() {
return positiveBuckets;
Expand All @@ -68,20 +74,23 @@ public ExponentialHistogram.Buckets negativeBuckets() {
*
* @param zeroThreshold the zeroThreshold for the histogram, which needs to be stored externally
* @param valueCount the total number of values the histogram contains, needs to be stored externally
* @param sum the total sum of the values the histogram contains, needs to be stored externally
* @param encodedHistogramData the encoded histogram bytes which previously where generated via
* {@link #writeHistogramBytes(StreamOutput, int, List, List)}.
*/
public void reset(double zeroThreshold, long valueCount, BytesRef encodedHistogramData) throws IOException {
public void reset(double zeroThreshold, long valueCount, double sum, BytesRef encodedHistogramData) throws IOException {
lazyZeroBucket = null;
this.zeroThreshold = zeroThreshold;
this.valueCount = valueCount;
this.sum = sum;
encodedData.decode(encodedHistogramData);
negativeBuckets.resetCachedData();
positiveBuckets.resetCachedData();
}

/**
* Serializes the given histogram, so that exactly the same data can be reconstructed via {@link #reset(double, long, BytesRef)}.
* Serializes the given histogram, so that exactly the same data can be reconstructed via
* {@link #reset(double, long, double, BytesRef)}.
*
* @param output the output to write the serialized bytes to
* @param scale the scale of the histogram
Expand Down
Loading