Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import java.util.OptionalLong;

class EmptyExponentialHistogram implements ReleasableExponentialHistogram {
class EmptyExponentialHistogram extends ReleasableExponentialHistogram {

static final EmptyExponentialHistogram INSTANCE = new EmptyExponentialHistogram();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@
import java.util.OptionalLong;

/**
* Interface for implementations of exponential histograms adhering to the
* Base class for implementations of exponential histograms adhering to the
* <a href="https://opentelemetry.io/docs/specs/otel/metrics/data-model/#exponentialhistogram">OpenTelemetry definition</a>.
* This interface supports sparse implementations, allowing iteration over buckets without requiring direct index access.<br>
* This class supports sparse implementations, allowing iteration over buckets without requiring direct index access.<br>
* The most important properties are:
* <ul>
* <li>The histogram has a scale parameter, which defines the accuracy. A higher scale implies a higher accuracy.
Expand All @@ -45,7 +45,7 @@
* Additionally, all algorithms assume that samples within a bucket are located at a single point: the point of least relative error
* (see {@link ExponentialScaleUtils#getPointOfLeastRelativeError(long, int)}).
*/
public interface ExponentialHistogram extends Accountable {
public abstract class ExponentialHistogram implements Accountable {

// TODO(b/128622): support min/max storage and merging.
// TODO(b/128622): Add special positive and negative infinity buckets
Expand All @@ -56,42 +56,42 @@ public interface ExponentialHistogram extends Accountable {
// Theoretically, a MAX_SCALE of 51 would work and would still cover the entire range of double values.
// For that to work, the math for converting from double to indices and back would need to be reworked.
// One option would be to use "Quadruple": https://github.com/m-vokhm/Quadruple
int MAX_SCALE = 38;
public static final int MAX_SCALE = 38;

// At this scale, all double values fall into a single bucket.
int MIN_SCALE = -11;
public static final int MIN_SCALE = -11;

// Only use 62 bits (plus the sign bit) at max to allow computing the difference between the smallest and largest index without causing
// an overflow.
// The extra bit also provides room for compact storage tricks.
int MAX_INDEX_BITS = 62;
long MAX_INDEX = (1L << MAX_INDEX_BITS) - 1;
long MIN_INDEX = -MAX_INDEX;
public static final int MAX_INDEX_BITS = 62;
public static final long MAX_INDEX = (1L << MAX_INDEX_BITS) - 1;
public static final long MIN_INDEX = -MAX_INDEX;

/**
* The scale of the histogram. Higher scales result in higher accuracy but potentially more buckets.
* Must be less than or equal to {@link #MAX_SCALE} and greater than or equal to {@link #MIN_SCALE}.
*
* @return the scale of the histogram
*/
int scale();
public abstract int scale();

/**
* @return the {@link ZeroBucket} representing the number of zero (or close-to-zero) values and its threshold
*/
ZeroBucket zeroBucket();
public abstract ZeroBucket zeroBucket();

/**
* @return a {@link Buckets} instance for the populated buckets covering the positive value range of this histogram.
* The {@link BucketIterator#scale()} of iterators obtained via {@link Buckets#iterator()} must be the same as {@link #scale()}.
*/
Buckets positiveBuckets();
public abstract Buckets positiveBuckets();

/**
* @return a {@link Buckets} instance for the populated buckets covering the negative value range of this histogram.
* The {@link BucketIterator#scale()} of iterators obtained via {@link Buckets#iterator()} must be the same as {@link #scale()}.
*/
Buckets negativeBuckets();
public abstract Buckets negativeBuckets();

/**
* Returns the sum of all values represented by this histogram.
Expand All @@ -100,12 +100,22 @@ public interface ExponentialHistogram extends Accountable {
*
* @return the sum, guaranteed to be zero for empty histograms
*/
double sum();
public abstract double sum();

/**
* Returns the number of values represented by this histogram.
* In other words, this is the sum of the counts of all buckets including the zero bucket.
*
* @return the value count, guaranteed to be zero for empty histograms
*/
public long valueCount() {
return zeroBucket().count() + positiveBuckets().valueCount() + negativeBuckets().valueCount();
}

/**
* Represents a bucket range of an {@link ExponentialHistogram}, either the positive or the negative range.
*/
interface Buckets {
public interface Buckets {

/**
* @return a {@link BucketIterator} for the populated buckets of this bucket range.
Expand All @@ -125,7 +135,52 @@ interface Buckets {

}

static ExponentialHistogram empty() {
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null) return false;

if ((other instanceof ExponentialHistogram) == false) {
return false;
}
ExponentialHistogram that = (ExponentialHistogram) other;

if (scale() != that.scale()) return false;
if (sum() != that.sum()) return false;
if (zeroBucket().equals(that.zeroBucket()) == false) return false;
if (bucketIteratorsEqual(negativeBuckets().iterator(), that.negativeBuckets().iterator()) == false) return false;
if (bucketIteratorsEqual(positiveBuckets().iterator(), that.positiveBuckets().iterator()) == false) return false;

return true;
}

private static boolean bucketIteratorsEqual(BucketIterator a, BucketIterator b) {
if (a.scale() != b.scale()) {
return false;
}
while (a.hasNext() && b.hasNext()) {
if (a.peekIndex() != b.peekIndex() || a.peekCount() != b.peekCount()) {
return false;
}
a.advance();
b.advance();
}
return a.hasNext() == b.hasNext();
}

@Override
public int hashCode() {
int hash = scale();
hash = 31 * hash + Double.hashCode(sum());
hash = 31 * hash + zeroBucket().hashCode();
hash = 31 * hash + Long.hashCode(valueCount());
// we intentionally don't include the hash of the buckets here, because that is likely expensive to compute
// instead, we assume that the value count and sum are a good enough approximation in most cases to minimize collisions
// the value count is typically available as a cached value and doesn't involve iterating over all buckets
return hash;
}

public static ExponentialHistogram empty() {
return EmptyExponentialHistogram.INSTANCE;
}

Expand All @@ -139,7 +194,7 @@ static ExponentialHistogram empty() {
* @param values the values to be added to the histogram
* @return a new {@link ReleasableExponentialHistogram}
*/
static ReleasableExponentialHistogram create(int maxBucketCount, ExponentialHistogramCircuitBreaker breaker, double... values) {
public static ReleasableExponentialHistogram create(int maxBucketCount, ExponentialHistogramCircuitBreaker breaker, double... values) {
try (ExponentialHistogramGenerator generator = ExponentialHistogramGenerator.create(maxBucketCount, breaker)) {
for (double val : values) {
generator.add(val);
Expand All @@ -156,7 +211,7 @@ static ReleasableExponentialHistogram create(int maxBucketCount, ExponentialHist
* @param histograms the histograms to merge
* @return the merged histogram
*/
static ReleasableExponentialHistogram merge(
public static ReleasableExponentialHistogram merge(
int maxBucketCount,
ExponentialHistogramCircuitBreaker breaker,
Iterator<ExponentialHistogram> histograms
Expand All @@ -177,7 +232,7 @@ static ReleasableExponentialHistogram merge(
* @param histograms the histograms to merge
* @return the merged histogram
*/
static ReleasableExponentialHistogram merge(
public static ReleasableExponentialHistogram merge(
int maxBucketCount,
ExponentialHistogramCircuitBreaker breaker,
ExponentialHistogram... histograms
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,5 @@ public static double estimateSum(BucketIterator negativeBuckets, BucketIterator
}
return sum;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,16 @@ public static int getMaximumScaleIncrease(long index) {
return Long.numberOfLeadingZeros(index) - (64 - MAX_INDEX_BITS);
}

/**
* Returns a scale at to which the given index can be scaled down without changing the exponentially scaled number it represents.
* @param index the index of the number
* @param scale the current scale of the number
* @return the new scale
*/
static int normalizeScale(long index, int scale) {
return Math.max(MIN_SCALE, scale - Long.numberOfTrailingZeros(index));
}

/**
* Returns the upper boundary of the bucket with the given index and scale.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
* Consumers must ensure that if the histogram is mutated, all previously acquired {@link BucketIterator}
* instances are no longer used.
*/
final class FixedCapacityExponentialHistogram implements ReleasableExponentialHistogram {
final class FixedCapacityExponentialHistogram extends ReleasableExponentialHistogram {

static final long BASE_SIZE = RamUsageEstimator.shallowSizeOfInstance(FixedCapacityExponentialHistogram.class) + ZeroBucket.SHALLOW_SIZE
+ 2 * Buckets.SHALLOW_SIZE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@
/**
* A histogram which participates in the {@link ExponentialHistogramCircuitBreaker} and therefore requires proper releasing.
*/
public interface ReleasableExponentialHistogram extends ExponentialHistogram, Releasable {
public abstract class ReleasableExponentialHistogram extends ExponentialHistogram implements Releasable {

/**
* @return an empty singleton, which does not allocate any memory and therefore {@link #close()} is a no-op.
*/
static ReleasableExponentialHistogram empty() {
public static ReleasableExponentialHistogram empty() {
return EmptyExponentialHistogram.INSTANCE;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,11 @@
import static org.elasticsearch.exponentialhistogram.ExponentialHistogram.MAX_SCALE;
import static org.elasticsearch.exponentialhistogram.ExponentialHistogram.MIN_INDEX;
import static org.elasticsearch.exponentialhistogram.ExponentialHistogram.MIN_SCALE;
import static org.elasticsearch.exponentialhistogram.ExponentialScaleUtils.adjustScale;
import static org.elasticsearch.exponentialhistogram.ExponentialScaleUtils.compareExponentiallyScaledValues;
import static org.elasticsearch.exponentialhistogram.ExponentialScaleUtils.computeIndex;
import static org.elasticsearch.exponentialhistogram.ExponentialScaleUtils.exponentiallyScaledToDoubleValue;
import static org.elasticsearch.exponentialhistogram.ExponentialScaleUtils.normalizeScale;

/**
* Represents the bucket for values around zero in an exponential histogram.
Expand Down Expand Up @@ -62,13 +64,7 @@ public final class ZeroBucket {
// A singleton for an empty zero bucket with the smallest possible threshold.
private static final ZeroBucket MINIMAL_EMPTY = new ZeroBucket(MIN_INDEX, MIN_SCALE, 0);

/**
* Creates a new zero bucket with a specific threshold and count.
*
* @param zeroThreshold The threshold defining the bucket's range [-zeroThreshold, +zeroThreshold].
* @param count The number of values in the bucket.
*/
public ZeroBucket(double zeroThreshold, long count) {
private ZeroBucket(double zeroThreshold, long count) {
assert zeroThreshold >= 0.0 : "zeroThreshold must not be negative";
this.index = Long.MAX_VALUE; // compute lazily when needed
this.scale = MAX_SCALE;
Expand All @@ -85,11 +81,11 @@ private ZeroBucket(long index, int scale, long count) {
this.count = count;
}

private ZeroBucket(double realThreshold, long index, int scale, long count) {
this.realThreshold = realThreshold;
this.index = index;
this.scale = scale;
this.count = count;
private ZeroBucket(ZeroBucket toCopy, long newCount) {
this.realThreshold = toCopy.realThreshold;
this.index = toCopy.index;
this.scale = toCopy.scale;
this.count = newCount;
}

/**
Expand All @@ -109,8 +105,37 @@ public static ZeroBucket minimalWithCount(long count) {
if (count == 0) {
return MINIMAL_EMPTY;
} else {
return new ZeroBucket(MINIMAL_EMPTY.zeroThreshold(), MINIMAL_EMPTY.index(), MINIMAL_EMPTY.scale(), count);
return new ZeroBucket(MINIMAL_EMPTY, count);
}
}

/**
* Creates a zero bucket from the given threshold represented as double.
*
* @param zeroThreshold the zero threshold defining the bucket range [-zeroThreshold, +zeroThreshold], must be non-negative
* @param count the number of values in the bucket
* @return the new {@link ZeroBucket}
*/
public static ZeroBucket create(double zeroThreshold, long count) {
if (zeroThreshold == 0) {
return minimalWithCount(count);
}
return new ZeroBucket(zeroThreshold, count);
}

/**
* Creates a zero bucket from the given threshold represented as exponentially scaled number.
*
* @param index the index of the exponentially scaled number defining the zero threshold
* @param scale the corresponding scale for the index
* @param count the number of values in the bucket
* @return the new {@link ZeroBucket}
*/
public static ZeroBucket create(long index, int scale, long count) {
if (index == MINIMAL_EMPTY.index && scale == MINIMAL_EMPTY.scale) {
return minimalWithCount(count);
}
return new ZeroBucket(index, scale, count);
}

/**
Expand Down Expand Up @@ -158,9 +183,9 @@ public ZeroBucket merge(ZeroBucket other) {
long totalCount = count + other.count;
// Both are populated, so we need to use the higher zero-threshold.
if (this.compareZeroThreshold(other) >= 0) {
return new ZeroBucket(realThreshold, index, scale, totalCount);
return new ZeroBucket(this, totalCount);
} else {
return new ZeroBucket(other.realThreshold, other.index, other.scale, totalCount);
return new ZeroBucket(other, totalCount);
}
}
}
Expand Down Expand Up @@ -219,10 +244,33 @@ public ZeroBucket collapseOverlappingBuckets(BucketIterator buckets) {
long collapsedUpperBoundIndex = highestCollapsedIndex + 1;
if (compareExponentiallyScaledValues(index(), scale(), collapsedUpperBoundIndex, buckets.scale()) >= 0) {
// Our current zero-threshold is larger than the upper boundary of the largest collapsed bucket, so we keep it.
return new ZeroBucket(realThreshold, index, scale, newZeroCount);
return new ZeroBucket(this, newZeroCount);
} else {
return new ZeroBucket(collapsedUpperBoundIndex, buckets.scale(), newZeroCount);
}
}
}

@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ZeroBucket that = (ZeroBucket) o;
if (count() != that.count()) return false;
if (Double.compare(zeroThreshold(), that.zeroThreshold()) != 0) return false;
if (compareExponentiallyScaledValues(index(), scale(), that.index(), that.scale()) != 0) return false;
return true;
}

@Override
public int hashCode() {
int normalizedScale = normalizeScale(index(), scale);
int scaleAdjustment = normalizedScale - scale;
long normalizedIndex = adjustScale(index(), scale, scaleAdjustment);

int result = normalizedScale;
result = 31 * result + Long.hashCode(normalizedIndex);
result = 31 * result + Double.hashCode(zeroThreshold());
result = 31 * result + Long.hashCode(count);
return result;
}
}
Loading