Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions docs/changelog/133601.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
pr: 133601
summary: Fix bug in topn
area: ES|QL
type: bug
issues:
- 133600
- 133574
- 133607
9 changes: 0 additions & 9 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -627,24 +627,15 @@ tests:
- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT
method: test {p0=search/10_source_filtering/no filtering}
issue: https://github.com/elastic/elasticsearch/issues/133561
- class: org.elasticsearch.compute.lucene.LuceneTopNSourceOperatorTests
method: testShardDataPartitioning
issue: https://github.com/elastic/elasticsearch/issues/133574
- class: org.elasticsearch.compute.data.BasicBlockTests
method: testIntBlock
issue: https://github.com/elastic/elasticsearch/issues/133596
- class: org.elasticsearch.xpack.logsdb.patternedtext.PatternedTextFieldMapperTests
method: testSyntheticSourceMany
issue: https://github.com/elastic/elasticsearch/issues/133598
- class: org.elasticsearch.compute.lucene.LuceneTopNSourceOperatorTests
method: testWithCranky
issue: https://github.com/elastic/elasticsearch/issues/133600
- class: org.elasticsearch.compute.data.BasicBlockTests
method: testDoubleBlock
issue: https://github.com/elastic/elasticsearch/issues/133606
- class: org.elasticsearch.compute.lucene.LuceneTopNSourceOperatorTests
method: testShardDataPartitioningWithCranky
issue: https://github.com/elastic/elasticsearch/issues/133607
- class: org.elasticsearch.compute.data.BasicBlockTests
method: testBooleanBlock
issue: https://github.com/elastic/elasticsearch/issues/133608
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,12 @@ public String describe() {
* Collected docs. {@code null} until we're {@link #emit(boolean)}.
*/
private ScoreDoc[] scoreDocs;

/**
* {@link ShardRefCounted} for collected docs.
*/
private ShardRefCounted shardRefCounted;

/**
* The offset in {@link #scoreDocs} of the next page.
*/
Expand Down Expand Up @@ -142,6 +148,7 @@ public boolean isFinished() {
public void finish() {
doneCollecting = true;
scoreDocs = null;
shardRefCounted = null;
assert isFinished();
}

Expand Down Expand Up @@ -202,6 +209,8 @@ private Page emit(boolean startEmitting) {
offset = 0;
if (perShardCollector != null) {
scoreDocs = perShardCollector.collector.topDocs().scoreDocs;
int shardId = perShardCollector.shardContext.index();
shardRefCounted = new ShardRefCounted.Single(shardId, shardContextCounters.get(shardId));
} else {
scoreDocs = new ScoreDoc[0];
}
Expand Down Expand Up @@ -239,7 +248,6 @@ private Page emit(boolean startEmitting) {
shard = blockFactory.newConstantIntBlockWith(shardId, size);
segments = currentSegmentBuilder.build();
docs = currentDocsBuilder.build();
ShardRefCounted shardRefCounted = new ShardRefCounted.Single(shardId, shardContextCounters.get(shardId));
docBlock = new DocVector(shardRefCounted, shard.asVector(), segments, docs, null).asBlock();
shard = null;
segments = null;
Expand Down