Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -532,15 +532,11 @@ public InternalAggregation get() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new InternalAutoDateHistogram(
getName(),
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
targetBuckets,
bucketInfo,
format,
getMetadata(),
bucketInnerInterval
);
final List<Bucket> buckets = new ArrayList<>(this.buckets);
for (int i = 0; i < buckets.size(); i++) {
buckets.set(i, buckets.get(i).finalizeSampling(samplingContext));
}
return new InternalAutoDateHistogram(getName(), buckets, targetBuckets, bucketInfo, format, getMetadata(), bucketInnerInterval);
}

private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult current, AggregationReduceContext reduceContext) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -270,12 +270,16 @@ public InternalAggregation get() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
final List<InternalBucket> buckets = new ArrayList<>(this.buckets.size());
for (InternalBucket bucket : this.buckets) {
buckets.add(bucket.finalizeSampling(samplingContext));
}
return new InternalComposite(
name,
size,
sourceNames,
buckets.isEmpty() ? formats : buckets.get(buckets.size() - 1).formats,
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
buckets,
buckets.isEmpty() ? afterKey : buckets.get(buckets.size() - 1).getRawKey(),
reverseMuls,
missingOrders,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -223,13 +223,11 @@ public void close() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new InternalFilters(
name,
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
keyed,
keyedBucket,
getMetadata()
);
final List<InternalBucket> buckets = new ArrayList<>(this.buckets.size());
for (InternalBucket bucket : this.buckets) {
buckets.add(bucket.finalizeSampling(samplingContext));
}
return new InternalFilters(name, buckets, keyed, keyedBucket, getMetadata());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.elasticsearch.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -137,20 +138,17 @@ public void close() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return create(
getName(),
requiredSize,
buckets.stream()
.<InternalGeoGridBucket>map(
b -> this.createBucket(
b.hashAsLong,
samplingContext.scaleUp(b.docCount),
InternalAggregations.finalizeSampling(b.aggregations, samplingContext)
)
final List<InternalGeoGridBucket> buckets = new ArrayList<>(this.buckets.size());
for (InternalGeoGridBucket bucket : this.buckets) {
buckets.add(
this.createBucket(
bucket.hashAsLong,
samplingContext.scaleUp(bucket.docCount),
InternalAggregations.finalizeSampling(bucket.aggregations, samplingContext)
)
.toList(),
getMetadata()
);
);
}
return create(getName(), requiredSize, buckets, getMetadata());
}

protected abstract B createBucket(long hashAsLong, long docCount, InternalAggregations aggregations);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -515,9 +515,13 @@ public InternalAggregation get() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
final List<Bucket> buckets = new ArrayList<>(this.buckets.size());
for (Bucket bucket : this.buckets) {
buckets.add(bucket.finalizeSampling(samplingContext));
}
return new InternalDateHistogram(
getName(),
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
buckets,
order,
minDocCount,
offset,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -446,16 +446,11 @@ public InternalAggregation get() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new InternalHistogram(
getName(),
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
order,
minDocCount,
emptyBucketInfo,
format,
keyed,
getMetadata()
);
final List<Bucket> buckets = new ArrayList<>(this.buckets.size());
for (Bucket bucket : this.buckets) {
buckets.add(bucket.finalizeSampling(samplingContext));
}
return new InternalHistogram(getName(), buckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -539,14 +539,11 @@ public InternalAggregation get() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new InternalVariableWidthHistogram(
getName(),
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
emptyBucketInfo,
targetNumBuckets,
format,
getMetadata()
);
final List<Bucket> buckets = new ArrayList<>(this.buckets.size());
for (Bucket bucket : this.buckets) {
buckets.add(bucket.finalizeSampling(samplingContext));
}
return new InternalVariableWidthHistogram(getName(), buckets, emptyBucketInfo, targetNumBuckets, format, getMetadata());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.elasticsearch.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -258,13 +259,11 @@ public void close() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new InternalBinaryRange(
name,
format,
keyed,
buckets.stream().map(b -> b.finalizeSampling(samplingContext)).toList(),
metadata
);
final List<Bucket> buckets = new ArrayList<>(this.buckets.size());
for (Bucket bucket : this.buckets) {
buckets.add(bucket.finalizeSampling(samplingContext));
}
return new InternalBinaryRange(name, format, keyed, buckets, metadata);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -325,24 +325,20 @@ public void close() {
@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
InternalRange.Factory<B, R> factory = getFactory();
return factory.create(
name,
ranges.stream()
.map(
b -> factory.createBucket(
b.getKey(),
b.from,
b.to,
samplingContext.scaleUp(b.getDocCount()),
InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext),
b.format
)
final List<B> buckets = new ArrayList<>(ranges.size());
for (B range : ranges) {
buckets.add(
factory.createBucket(
range.getKey(),
range.from,
range.to,
samplingContext.scaleUp(range.getDocCount()),
InternalAggregations.finalizeSampling(range.getAggregations(), samplingContext),
range.format
)
.toList(),
format,
keyed,
getMetadata()
);
);
}
return factory.create(name, buckets, format, keyed, getMetadata());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import org.elasticsearch.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

public class InternalRandomSampler extends InternalSingleBucketAggregation implements Sampler {
Expand Down Expand Up @@ -105,7 +107,12 @@ public InternalAggregation get() {
InternalAggregations aggs = subAggregatorReducer.get();
if (reduceContext.isFinalReduce() && aggs != null) {
SamplingContext context = buildContext();
aggs = InternalAggregations.from(aggs.asList().stream().map(agg -> agg.finalizeSampling(context)).toList());
final List<InternalAggregation> aaggregationList = aggs.asList();
final List<InternalAggregation> sampledAggregations = new ArrayList<>(aaggregationList.size());
for (InternalAggregation agg : aaggregationList) {
sampledAggregations.add(agg.finalizeSampling(context));
}
aggs = InternalAggregations.from(sampledAggregations);
}
return newAggregation(getName(), docCount, aggs);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,18 +344,21 @@ private BucketOrder getThisReduceOrder() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
final List<B> originalBuckets = getBuckets();
final List<B> buckets = new ArrayList<>(originalBuckets.size());
for (B bucket : originalBuckets) {
buckets.add(
createBucket(
samplingContext.scaleUp(bucket.getDocCount()),
InternalAggregations.finalizeSampling(bucket.getAggregations(), samplingContext),
getShowDocCountError() ? samplingContext.scaleUp(bucket.getDocCountError()) : 0,
bucket
)
);
}
return create(
name,
getBuckets().stream()
.map(
b -> createBucket(
samplingContext.scaleUp(b.getDocCount()),
InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext),
getShowDocCountError() ? samplingContext.scaleUp(b.getDocCountError()) : 0,
b
)
)
.toList(),
buckets,
getOrder(),
samplingContext.scaleUp(getDocCountError()),
samplingContext.scaleUp(getSumOfOtherDocCounts())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,19 +147,18 @@ private B reduceBucket(List<B> buckets, AggregationReduceContext context) {

@Override
public A finalizeSampling(SamplingContext samplingContext) {
return createWithFilter(
name,
getBuckets().stream()
.map(
b -> createBucket(
samplingContext.scaleUp(b.getDocCount()),
InternalAggregations.finalizeSampling(b.aggregations, samplingContext),
b
)
final List<B> originalBuckets = getBuckets();
final List<B> buckets = new ArrayList<>(originalBuckets.size());
for (B bucket : originalBuckets) {
buckets.add(
createBucket(
samplingContext.scaleUp(bucket.getDocCount()),
InternalAggregations.finalizeSampling(bucket.aggregations, samplingContext),
bucket
)
.toList(),
filter
);
);
}
return createWithFilter(name, buckets, filter);
}

public abstract boolean containsTerm(SetBackedScalingCuckooFilter filter, B bucket);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -284,20 +285,19 @@ private ReducerAndExtraInfo(BucketReducer<B> reducer) {
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
long supersetSize = samplingContext.scaleUp(getSupersetSize());
long subsetSize = samplingContext.scaleUp(getSubsetSize());
return create(
subsetSize,
supersetSize,
getBuckets().stream()
.map(
b -> createBucket(
samplingContext.scaleUp(b.subsetDf),
samplingContext.scaleUp(b.supersetDf),
InternalAggregations.finalizeSampling(b.aggregations, samplingContext),
b
)
final List<B> originalBuckets = getBuckets();
final List<B> buckets = new ArrayList<>(originalBuckets.size());
for (B bucket : originalBuckets) {
buckets.add(
createBucket(
samplingContext.scaleUp(bucket.subsetDf),
samplingContext.scaleUp(bucket.supersetDf),
InternalAggregations.finalizeSampling(bucket.aggregations, samplingContext),
bucket
)
.toList()
);
);
}
return create(subsetSize, supersetSize, buckets);
}

abstract B createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, B prototype);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -360,22 +360,17 @@ public InternalAggregation get() {

@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new InternalCategorizationAggregation(
name,
requiredSize,
minDocCount,
similarityThreshold,
metadata,
buckets.stream()
.map(
b -> new Bucket(
new SerializableTokenListCategory(b.getSerializableCategory(), samplingContext.scaleUp(b.getDocCount())),
b.getBucketOrd(),
InternalAggregations.finalizeSampling(b.aggregations, samplingContext)
)
final List<Bucket> buckets = new ArrayList<>(this.buckets.size());
for (Bucket bucket : this.buckets) {
buckets.add(
new Bucket(
new SerializableTokenListCategory(bucket.getSerializableCategory(), samplingContext.scaleUp(bucket.getDocCount())),
bucket.getBucketOrd(),
InternalAggregations.finalizeSampling(bucket.aggregations, samplingContext)
)
.collect(Collectors.toList())
);
);
}
return new InternalCategorizationAggregation(name, requiredSize, minDocCount, similarityThreshold, metadata, buckets);
}

public int getSimilarityThreshold() {
Expand Down