Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/changelog/131658.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 131658
summary: Fix `aggregate_metric_double` sorting and `mv_expand` issues
area: ES|QL
type: bug
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,8 @@ public MvOrdering mvOrdering() {

@Override
public Block expand() {
// TODO: support
throw new UnsupportedOperationException("AggregateMetricDoubleBlock");
this.incRef();
return this;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ public void decodeKey(BytesRef keys) {

@Override
public void decodeValue(BytesRef values) {
int count = TopNEncoder.DEFAULT_UNSORTABLE.decodeVInt(values);
if (count == 0) {
builder.appendNull();
return;
}
for (BlockLoader.DoubleBuilder subBuilder : List.of(builder.min(), builder.max(), builder.sum())) {
if (TopNEncoder.DEFAULT_UNSORTABLE.decodeBoolean(values)) {
subBuilder.appendDouble(TopNEncoder.DEFAULT_UNSORTABLE.decodeDouble(values));
Expand All @@ -51,7 +56,7 @@ public Block build() {

@Override
public String toString() {
return "ValueExtractorForAggregateMetricDouble";
return "ResultBuilderForAggregateMetricDouble";
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ public class ValueExtractorForAggregateMetricDouble implements ValueExtractor {

@Override
public void writeValue(BreakingBytesRefBuilder values, int position) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeVInt(1, values);
for (DoubleBlock doubleBlock : List.of(block.minBlock(), block.maxBlock(), block.sumBlock())) {
if (doubleBlock.isNull(position)) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeBoolean(false, values);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -915,6 +915,17 @@ public enum Cap {
*/
AGGREGATE_METRIC_DOUBLE_IMPLICIT_CASTING_IN_AGGS(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG),

/**
* Fixes bug when aggregate metric double is encoded as a single nul value but decoded as
* AggregateMetricDoubleBlock (expecting 4 values) in TopN.
*/
AGGREGATE_METRIC_DOUBLE_SORTING_FIXED(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG),

/**
* Stop erroring out when trying to apply MV_EXPAND on aggregate metric double.
*/
AGGREGATE_METRIC_DOUBLE_MV_EXPAND(AGGREGATE_METRIC_DOUBLE_FEATURE_FLAG),

/**
* Support change point detection "CHANGE_POINT".
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -629,3 +629,111 @@ setup:
- match: {values.5.2: 1065204.0}
- match: {values.5.3: "dog"}
- match: {values.5.4: "2021-04-29T21:00:00.000Z"}

---
"Sort from multiple indices one with aggregate metric double":
- requires:
test_runner_features: [capabilities]
capabilities:
- method: POST
path: /_query
parameters: []
capabilities: [aggregate_metric_double_sorting_fixed]
reason: "Fix sorting for rows comprised of docs from multiple indices where agg metric is missing from some"

- do:
indices.downsample:
index: test
target_index: test-downsample
body: >
{
"fixed_interval": "1h"
}
- is_true: acknowledged

- do:
indices.create:
index: test-2
body:
mappings:
properties:
some_field:
type: keyword

- do:
bulk:
refresh: true
index: test-2
body:
- '{"index": {}}'
- '{"some_field": "im a keyword!!!!!"}'

- do:
esql.query:
body:
query: "FROM test-* | SORT some_field, @timestamp, k8s.pod.uid | KEEP k8s.pod.network.rx, some_field, @timestamp | LIMIT 10"

- length: {values: 5}
- length: {values.0: 3}
- match: {columns.0.name: "k8s.pod.network.rx"}
- match: {columns.0.type: "aggregate_metric_double"}
- match: {columns.1.name: "some_field"}
- match: {columns.1.type: "keyword"}
- match: {columns.2.name: "@timestamp"}
- match: {columns.2.type: "date"}
- match: {values.0.0: null}
- match: {values.0.1: "im a keyword!!!!!"}
- match: {values.0.2: null}
- match: {values.1.0: '{"min":801479.0,"max":802133.0,"sum":1603612.0,"value_count":2}'}
- match: {values.1.1: null}
- match: {values.1.2: "2021-04-28T18:00:00.000Z"}
- match: {values.2.0: '{"min":530575.0,"max":530600.0,"sum":1061175.0,"value_count":2}'}
- match: {values.2.1: null}
- match: {values.2.2: "2021-04-28T18:00:00.000Z"}
- match: {values.3.0: '{"min":530604.0,"max":530605.0,"sum":1061209.0,"value_count":2}'}
- match: {values.3.1: null}
- match: {values.3.2: "2021-04-28T19:00:00.000Z"}
- match: {values.4.0: '{"min":802337.0,"max":803685.0,"sum":1606022.0,"value_count":2}'}
- match: {values.4.1: null}
- match: {values.4.2: "2021-04-28T20:00:00.000Z"}

---
"MV_EXPAND on non-MV aggregate metric double":
- requires:
test_runner_features: [capabilities]
capabilities:
- method: POST
path: /_query
parameters: []
capabilities: [aggregate_metric_double_mv_expand]
reason: "Have MV_EXPAND not error out when applied to aggregate_metric_doubles (is a no-op)"

- do:
indices.downsample:
index: test
target_index: test-downsample
body: >
{
"fixed_interval": "1h"
}
- is_true: acknowledged

- do:
esql.query:
body:
query: "FROM test-downsample | MV_EXPAND k8s.pod.network.rx | SORT @timestamp, k8s.pod.uid | KEEP k8s.pod.network.rx, @timestamp | LIMIT 10"

- length: {values: 4}
- length: {values.0: 2}
- match: {columns.0.name: "k8s.pod.network.rx"}
- match: {columns.0.type: "aggregate_metric_double"}
- match: {columns.1.name: "@timestamp"}
- match: {columns.1.type: "date"}
- match: {values.0.0: '{"min":801479.0,"max":802133.0,"sum":1603612.0,"value_count":2}'}
- match: {values.0.1: "2021-04-28T18:00:00.000Z"}
- match: {values.1.0: '{"min":530575.0,"max":530600.0,"sum":1061175.0,"value_count":2}'}
- match: {values.1.1: "2021-04-28T18:00:00.000Z"}
- match: {values.2.0: '{"min":530604.0,"max":530605.0,"sum":1061209.0,"value_count":2}'}
- match: {values.2.1: "2021-04-28T19:00:00.000Z"}
- match: {values.3.0: '{"min":802337.0,"max":803685.0,"sum":1606022.0,"value_count":2}'}
- match: {values.3.1: "2021-04-28T20:00:00.000Z"}
Loading