Skip to content

Commit 69ab144

Browse files
committed
Revert "LUCENE-9935: Enable bulk-merge for term vectors with index sort (#140)"
This reverts commit 54fb21e.
1 parent 54fb21e commit 69ab144

File tree

3 files changed

+145
-275
lines changed

3 files changed

+145
-275
lines changed

lucene/core/src/java/org/apache/lucene/codecs/lucene90/compressing/Lucene90CompressingTermVectorsReader.java

Lines changed: 4 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ public final class Lucene90CompressingTermVectorsReader extends TermVectorsReade
9191
private final long numDirtyChunks; // number of incomplete compressed blocks written
9292
private final long numDirtyDocs; // cumulative number of docs in incomplete chunks
9393
private final long maxPointer; // end of the data section
94-
private BlockState blockState = new BlockState(-1, -1, 0);
9594

9695
// used by clone
9796
private Lucene90CompressingTermVectorsReader(Lucene90CompressingTermVectorsReader reader) {
@@ -311,45 +310,25 @@ private static RandomAccessInput slice(IndexInput in) throws IOException {
311310
return new ByteBuffersDataInput(Collections.singletonList(ByteBuffer.wrap(bytes)));
312311
}
313312

314-
/** Checks if a given docID was loaded in the current block state. */
315-
boolean isLoaded(int docID) {
316-
return blockState.docBase <= docID && docID < blockState.docBase + blockState.chunkDocs;
317-
}
318-
319-
private static class BlockState {
320-
final long startPointer;
321-
final int docBase;
322-
final int chunkDocs;
323-
324-
BlockState(long startPointer, int docBase, int chunkDocs) {
325-
this.startPointer = startPointer;
326-
this.docBase = docBase;
327-
this.chunkDocs = chunkDocs;
328-
}
329-
}
330-
331313
@Override
332314
public Fields get(int doc) throws IOException {
333315
ensureOpen();
334316

335317
// seek to the right place
336-
final long startPointer;
337-
if (isLoaded(doc)) {
338-
startPointer = blockState.startPointer; // avoid searching the start pointer
339-
} else {
340-
startPointer = indexReader.getStartPointer(doc);
318+
{
319+
final long startPointer = indexReader.getStartPointer(doc);
320+
vectorsStream.seek(startPointer);
341321
}
342322

343323
// decode
344324
// - docBase: first doc ID of the chunk
345325
// - chunkDocs: number of docs of the chunk
346326
final int docBase = vectorsStream.readVInt();
347-
final int chunkDocs = vectorsStream.readVInt() >>> 1;
327+
final int chunkDocs = vectorsStream.readVInt();
348328
if (doc < docBase || doc >= docBase + chunkDocs || docBase + chunkDocs > numDocs) {
349329
throw new CorruptIndexException(
350330
"docBase=" + docBase + ",chunkDocs=" + chunkDocs + ",doc=" + doc, vectorsStream);
351331
}
352-
this.blockState = new BlockState(startPointer, docBase, chunkDocs);
353332

354333
final int skip; // number of fields to skip
355334
final int numFields; // number of fields of the document we're looking for

0 commit comments

Comments
 (0)