Skip to content

Commit ae089f8

Browse files
committed
Rename
1 parent 60b3314 commit ae089f8

File tree

1 file changed

+20
-8
lines changed

1 file changed

+20
-8
lines changed

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/read/ValuesFromSingleReader.java

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ protected void load(Block[] target, int offset) throws IOException {
8282
}
8383

8484
private void loadFromSingleLeaf(long jumboBytes, Block[] target, ValuesReaderDocs docs, int offset) throws IOException {
85-
int firstDoc = docs.get(0);
85+
int firstDoc = docs.get(offset);
8686
operator.positionFieldWork(shard, segment, firstDoc);
8787
StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS;
8888
LeafReaderContext ctx = operator.ctx(shard, segment);
@@ -112,8 +112,8 @@ private void loadFromSingleLeaf(long jumboBytes, Block[] target, ValuesReaderDoc
112112
loadFromRowStrideReaders(jumboBytes, target, storedFieldsSpec, rowStrideReaders, ctx, docs, offset);
113113
}
114114
for (ColumnAtATimeWork r : columnAtATimeReaders) {
115-
target[r.offset] = (Block) r.reader.read(loaderBlockFactory, docs, offset);
116-
operator.sanityCheckBlock(r.reader, docs.count(), target[r.offset], r.offset);
115+
target[r.idx] = (Block) r.reader.read(loaderBlockFactory, docs, offset);
116+
operator.sanityCheckBlock(r.reader, docs.count(), target[r.idx], r.idx);
117117
}
118118
if (log.isDebugEnabled()) {
119119
long total = 0;
@@ -171,13 +171,13 @@ private void loadFromRowStrideReaders(
171171
log.trace("{}: bytes loaded {}/{}", p, estimated, jumboBytes);
172172
}
173173
for (RowStrideReaderWork work : rowStrideReaders) {
174-
target[work.offset] = work.build();
175-
operator.sanityCheckBlock(work.reader, p - offset, target[work.offset], work.offset);
174+
target[work.idx] = work.build();
175+
operator.sanityCheckBlock(work.reader, p - offset, target[work.idx], work.idx);
176176
}
177177
if (log.isDebugEnabled()) {
178178
long actual = 0;
179179
for (RowStrideReaderWork work : rowStrideReaders) {
180-
actual += target[work.offset].ramBytesUsed();
180+
actual += target[work.idx].ramBytesUsed();
181181
}
182182
log.debug("loaded {} positions row stride estimated/actual {}/{} bytes", p - offset, estimated, actual);
183183
}
@@ -197,9 +197,21 @@ private boolean useSequentialStoredFieldsReader(BlockLoader.Docs docs, double st
197197
return range * storedFieldsSequentialProportion <= count;
198198
}
199199

200-
private record ColumnAtATimeWork(BlockLoader.ColumnAtATimeReader reader, int offset) {}
200+
/**
201+
* Work for building a column-at-a-time.
202+
* @param reader reads the values
203+
* @param idx destination in array of {@linkplain Block}s we build
204+
*/
205+
private record ColumnAtATimeWork(BlockLoader.ColumnAtATimeReader reader, int idx) {}
201206

202-
private record RowStrideReaderWork(BlockLoader.RowStrideReader reader, Block.Builder builder, BlockLoader loader, int offset)
207+
/**
208+
* Work for
209+
* @param reader
210+
* @param builder
211+
* @param loader
212+
* @param idx
213+
*/
214+
private record RowStrideReaderWork(BlockLoader.RowStrideReader reader, Block.Builder builder, BlockLoader loader, int idx)
203215
implements
204216
Releasable {
205217
void read(int doc, BlockLoaderStoredFieldsFromLeafLoader storedFields) throws IOException {

0 commit comments

Comments
 (0)