|
| 1 | +/* |
| 2 | + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one |
| 3 | + * or more contributor license agreements. Licensed under the Elastic License |
| 4 | + * 2.0; you may not use this file except in compliance with the Elastic License |
| 5 | + * 2.0. |
| 6 | + */ |
| 7 | + |
| 8 | +package org.elasticsearch.xpack.exponentialhistogram.aggregations.bucket.histogram; |
| 9 | + |
| 10 | +import org.elasticsearch.exponentialhistogram.BucketIterator; |
| 11 | +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; |
| 12 | +import org.elasticsearch.exponentialhistogram.ExponentialScaleUtils; |
| 13 | +import org.elasticsearch.search.aggregations.AggregationExecutionContext; |
| 14 | +import org.elasticsearch.search.aggregations.Aggregator; |
| 15 | +import org.elasticsearch.search.aggregations.AggregatorFactories; |
| 16 | +import org.elasticsearch.search.aggregations.BucketOrder; |
| 17 | +import org.elasticsearch.search.aggregations.CardinalityUpperBound; |
| 18 | +import org.elasticsearch.search.aggregations.LeafBucketCollector; |
| 19 | +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; |
| 20 | +import org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramAggregator; |
| 21 | +import org.elasticsearch.search.aggregations.bucket.histogram.DoubleBounds; |
| 22 | +import org.elasticsearch.search.aggregations.support.AggregationContext; |
| 23 | +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; |
| 24 | +import org.elasticsearch.xpack.exponentialhistogram.aggregations.support.ExponentialHistogramValuesSource; |
| 25 | +import org.elasticsearch.xpack.exponentialhistogram.fielddata.ExponentialHistogramValuesReader; |
| 26 | + |
| 27 | +import java.io.IOException; |
| 28 | +import java.util.Map; |
| 29 | + |
| 30 | +public final class ExponentialHistogramBackedHistogramAggregator extends AbstractHistogramAggregator { |
| 31 | + |
| 32 | + private final ExponentialHistogramValuesSource.ExponentialHistogram valuesSource; |
| 33 | + |
| 34 | + public ExponentialHistogramBackedHistogramAggregator( |
| 35 | + String name, |
| 36 | + AggregatorFactories factories, |
| 37 | + double interval, |
| 38 | + double offset, |
| 39 | + BucketOrder order, |
| 40 | + boolean keyed, |
| 41 | + long minDocCount, |
| 42 | + DoubleBounds extendedBounds, |
| 43 | + DoubleBounds hardBounds, |
| 44 | + ValuesSourceConfig valuesSourceConfig, |
| 45 | + AggregationContext context, |
| 46 | + Aggregator parent, |
| 47 | + CardinalityUpperBound cardinalityUpperBound, |
| 48 | + Map<String, Object> metadata |
| 49 | + ) throws IOException { |
| 50 | + super( |
| 51 | + name, |
| 52 | + factories, |
| 53 | + interval, |
| 54 | + offset, |
| 55 | + order, |
| 56 | + keyed, |
| 57 | + minDocCount, |
| 58 | + extendedBounds, |
| 59 | + hardBounds, |
| 60 | + valuesSourceConfig.format(), |
| 61 | + context, |
| 62 | + parent, |
| 63 | + cardinalityUpperBound, |
| 64 | + metadata |
| 65 | + ); |
| 66 | + |
| 67 | + this.valuesSource = (ExponentialHistogramValuesSource.ExponentialHistogram) valuesSourceConfig.getValuesSource(); |
| 68 | + |
| 69 | + // Sub aggregations are not allowed when running histogram agg over histograms |
| 70 | + if (subAggregators().length > 0) { |
| 71 | + throw new IllegalArgumentException("Histogram aggregation on histogram fields does not support sub-aggregations"); |
| 72 | + } |
| 73 | + } |
| 74 | + |
| 75 | + @Override |
| 76 | + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final LeafBucketCollector sub) throws IOException { |
| 77 | + ExponentialHistogramValuesReader values = valuesSource.getHistogramValues(aggCtx.getLeafReaderContext()); |
| 78 | + return new LeafBucketCollectorBase(sub, values) { |
| 79 | + @Override |
| 80 | + public void collect(int doc, long owningBucketOrd) throws IOException { |
| 81 | + if (values.advanceExact(doc)) { |
| 82 | + ExponentialHistogram histo = values.histogramValue(); |
| 83 | + forEachBucketCenter(histo, (center, count) -> { |
| 84 | + double key = Math.floor((center - offset) / interval); |
| 85 | + if (hardBounds == null || hardBounds.contain(key * interval)) { |
| 86 | + long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key)); |
| 87 | + if (bucketOrd < 0) { // already seen |
| 88 | + bucketOrd = -1 - bucketOrd; |
| 89 | + collectExistingBucket(sub, doc, bucketOrd); |
| 90 | + } else { |
| 91 | + collectBucket(sub, doc, bucketOrd); |
| 92 | + } |
| 93 | + // We have added the document already and we have incremented bucket doc_count |
| 94 | + // by _doc_count times. To compensate for this, we should increment doc_count by |
| 95 | + // (count - _doc_count) so that we have added it count times. |
| 96 | + incrementBucketDocCount(bucketOrd, count - docCountProvider.getDocCount(doc)); |
| 97 | + } |
| 98 | + }); |
| 99 | + } |
| 100 | + } |
| 101 | + }; |
| 102 | + } |
| 103 | + |
| 104 | + @FunctionalInterface |
| 105 | + private interface BucketCenterConsumer { |
| 106 | + void accept(double bucketCenter, long count) throws IOException; |
| 107 | + } |
| 108 | + |
| 109 | + private static void forEachBucketCenter(ExponentialHistogram histo, BucketCenterConsumer consumer) throws IOException { |
| 110 | + BucketIterator negIt = histo.negativeBuckets().iterator(); |
| 111 | + while (negIt.hasNext()) { |
| 112 | + double center = -ExponentialScaleUtils.getPointOfLeastRelativeError(negIt.peekIndex(), negIt.scale()); |
| 113 | + center = Math.clamp(center, histo.min(), histo.max()); |
| 114 | + consumer.accept(center, negIt.peekCount()); |
| 115 | + negIt.advance(); |
| 116 | + } |
| 117 | + if (histo.zeroBucket().count() > 0) { |
| 118 | + consumer.accept(0.0, histo.zeroBucket().count()); |
| 119 | + } |
| 120 | + BucketIterator posIt = histo.positiveBuckets().iterator(); |
| 121 | + while (posIt.hasNext()) { |
| 122 | + double center = ExponentialScaleUtils.getPointOfLeastRelativeError(posIt.peekIndex(), posIt.scale()); |
| 123 | + center = Math.clamp(center, histo.min(), histo.max()); |
| 124 | + consumer.accept(center, posIt.peekCount()); |
| 125 | + posIt.advance(); |
| 126 | + } |
| 127 | + } |
| 128 | + |
| 129 | +} |
0 commit comments