@@ -117,7 +117,8 @@ private Lucene90CompressingTermVectorsReader(Lucene90CompressingTermVectorsReade
117117 this .decompressor = reader .decompressor .clone ();
118118 this .chunkSize = reader .chunkSize ;
119119 this .numDocs = reader .numDocs ;
120- this .reader = new BlockPackedReaderIterator (vectorsStream , packedIntsVersion , PACKED_BLOCK_SIZE , 0 );
120+ this .reader =
121+ new BlockPackedReaderIterator (vectorsStream , packedIntsVersion , PACKED_BLOCK_SIZE , 0 );
121122 this .version = reader .version ;
122123 this .numChunks = reader .numChunks ;
123124 this .numDirtyChunks = reader .numDirtyChunks ;
@@ -146,13 +147,18 @@ public Lucene90CompressingTermVectorsReader(
146147 ChecksumIndexInput metaIn = null ;
147148 try {
148149 // Open the data file
149- final String vectorsStreamFN = IndexFileNames .segmentFileName (segment , segmentSuffix , VECTORS_EXTENSION );
150- vectorsStream = d .openInput (vectorsStreamFN , context .withHints (FileTypeHint .DATA , DataAccessHint .RANDOM ));
151- version = CodecUtil .checkIndexHeader (
152- vectorsStream , formatName , VERSION_START , VERSION_CURRENT , si .getId (), segmentSuffix );
153- assert CodecUtil .indexHeaderLength (formatName , segmentSuffix ) == vectorsStream .getFilePointer ();
154-
155- final String metaStreamFN = IndexFileNames .segmentFileName (segment , segmentSuffix , VECTORS_META_EXTENSION );
150+ final String vectorsStreamFN =
151+ IndexFileNames .segmentFileName (segment , segmentSuffix , VECTORS_EXTENSION );
152+ vectorsStream =
153+ d .openInput (vectorsStreamFN , context .withHints (FileTypeHint .DATA , DataAccessHint .RANDOM ));
154+ version =
155+ CodecUtil .checkIndexHeader (
156+ vectorsStream , formatName , VERSION_START , VERSION_CURRENT , si .getId (), segmentSuffix );
157+ assert CodecUtil .indexHeaderLength (formatName , segmentSuffix )
158+ == vectorsStream .getFilePointer ();
159+
160+ final String metaStreamFN =
161+ IndexFileNames .segmentFileName (segment , segmentSuffix , VECTORS_META_EXTENSION );
156162 metaIn = d .openChecksumInput (metaStreamFN );
157163 CodecUtil .checkIndexHeader (
158164 metaIn ,
@@ -173,15 +179,16 @@ public Lucene90CompressingTermVectorsReader(
173179 // such as file truncation.
174180 CodecUtil .retrieveChecksum (vectorsStream );
175181
176- FieldsIndexReader fieldsIndexReader = new FieldsIndexReader (
177- d ,
178- si .name ,
179- segmentSuffix ,
180- VECTORS_INDEX_EXTENSION ,
181- VECTORS_INDEX_CODEC_NAME ,
182- si .getId (),
183- metaIn ,
184- context );
182+ FieldsIndexReader fieldsIndexReader =
183+ new FieldsIndexReader (
184+ d ,
185+ si .name ,
186+ segmentSuffix ,
187+ VECTORS_INDEX_EXTENSION ,
188+ VECTORS_INDEX_CODEC_NAME ,
189+ si .getId (),
190+ metaIn ,
191+ context );
185192
186193 this .indexReader = fieldsIndexReader ;
187194 this .maxPointer = fieldsIndexReader .getMaxPointer ();
@@ -216,7 +223,8 @@ public Lucene90CompressingTermVectorsReader(
216223 }
217224
218225 decompressor = compressionMode .newDecompressor ();
219- this .reader = new BlockPackedReaderIterator (vectorsStream , packedIntsVersion , PACKED_BLOCK_SIZE , 0 );
226+ this .reader =
227+ new BlockPackedReaderIterator (vectorsStream , packedIntsVersion , PACKED_BLOCK_SIZE , 0 );
220228
221229 CodecUtil .checkFooter (metaIn , null );
222230 metaIn .close ();
@@ -335,8 +343,7 @@ boolean isLoaded(int docID) {
335343 return blockState .docBase <= docID && docID < blockState .docBase + blockState .chunkDocs ;
336344 }
337345
338- private record BlockState (long startPointer , int docBase , int chunkDocs ) {
339- }
346+ private record BlockState (long startPointer , int docBase , int chunkDocs ) {}
340347
341348 @ Override
342349 public void prefetch (int docID ) throws IOException {
@@ -416,13 +423,14 @@ public Fields get(int doc) throws IOException {
416423 totalDistinctFields += vectorsStream .readVInt ();
417424 }
418425 ++totalDistinctFields ;
419- final PackedInts .ReaderIterator it = PackedInts .getReaderIteratorNoHeader (
420- vectorsStream ,
421- PackedInts .Format .PACKED ,
422- packedIntsVersion ,
423- totalDistinctFields ,
424- bitsPerFieldNum ,
425- 1 );
426+ final PackedInts .ReaderIterator it =
427+ PackedInts .getReaderIteratorNoHeader (
428+ vectorsStream ,
429+ PackedInts .Format .PACKED ,
430+ packedIntsVersion ,
431+ totalDistinctFields ,
432+ bitsPerFieldNum ,
433+ 1 );
426434 fieldNums = new int [totalDistinctFields ];
427435 for (int i = 0 ; i < totalDistinctFields ; ++i ) {
428436 fieldNums [i ] = (int ) it .next ();
@@ -490,7 +498,7 @@ public Fields get(int doc) throws IOException {
490498 final int termCount = (int ) numTerms .get (skip + i );
491499 final int [] fieldPrefixLengths = new int [termCount ];
492500 prefixLengths [i ] = fieldPrefixLengths ;
493- for (int j = 0 ; j < termCount ;) {
501+ for (int j = 0 ; j < termCount ; ) {
494502 final LongsRef next = reader .next (termCount - j );
495503 for (int k = 0 ; k < next .length ; ++k ) {
496504 fieldPrefixLengths [j ++] = (int ) next .longs [next .offset + k ];
@@ -511,7 +519,7 @@ public Fields get(int doc) throws IOException {
511519 final int termCount = (int ) numTerms .get (skip + i );
512520 final int [] fieldSuffixLengths = new int [termCount ];
513521 suffixLengths [i ] = fieldSuffixLengths ;
514- for (int j = 0 ; j < termCount ;) {
522+ for (int j = 0 ; j < termCount ; ) {
515523 final LongsRef next = reader .next (termCount - j );
516524 for (int k = 0 ; k < next .length ; ++k ) {
517525 fieldSuffixLengths [j ++] = (int ) next .longs [next .offset + k ];
@@ -532,7 +540,7 @@ public Fields get(int doc) throws IOException {
532540 final int [] termFreqs = new int [totalTerms ];
533541 {
534542 reader .reset (vectorsStream , totalTerms );
535- for (int i = 0 ; i < totalTerms ;) {
543+ for (int i = 0 ; i < totalTerms ; ) {
536544 final LongsRef next = reader .next (totalTerms - i );
537545 for (int k = 0 ; k < next .length ; ++k ) {
538546 termFreqs [i ++] = 1 + (int ) next .longs [next .offset + k ];
@@ -563,15 +571,16 @@ public Fields get(int doc) throws IOException {
563571 final int [][] positionIndex = positionIndex (skip , numFields , numTerms , termFreqs );
564572 final int [][] positions , startOffsets , lengths ;
565573 if (totalPositions > 0 ) {
566- positions = readPositions (
567- skip ,
568- numFields ,
569- flags ,
570- numTerms ,
571- termFreqs ,
572- POSITIONS ,
573- totalPositions ,
574- positionIndex );
574+ positions =
575+ readPositions (
576+ skip ,
577+ numFields ,
578+ flags ,
579+ numTerms ,
580+ termFreqs ,
581+ POSITIONS ,
582+ totalPositions ,
583+ positionIndex );
575584 } else {
576585 positions = new int [numFields ][];
577586 }
@@ -582,10 +591,12 @@ public Fields get(int doc) throws IOException {
582591 for (int i = 0 ; i < charsPerTerm .length ; ++i ) {
583592 charsPerTerm [i ] = Float .intBitsToFloat (vectorsStream .readInt ());
584593 }
585- startOffsets = readPositions (
586- skip , numFields , flags , numTerms , termFreqs , OFFSETS , totalOffsets , positionIndex );
587- lengths = readPositions (
588- skip , numFields , flags , numTerms , termFreqs , OFFSETS , totalOffsets , positionIndex );
594+ startOffsets =
595+ readPositions (
596+ skip , numFields , flags , numTerms , termFreqs , OFFSETS , totalOffsets , positionIndex );
597+ lengths =
598+ readPositions (
599+ skip , numFields , flags , numTerms , termFreqs , OFFSETS , totalOffsets , positionIndex );
589600
590601 for (int i = 0 ; i < numFields ; ++i ) {
591602 final int [] fStartOffsets = startOffsets [i ];
@@ -707,7 +718,8 @@ public Fields get(int doc) throws IOException {
707718 docLen + payloadLen ,
708719 suffixBytes );
709720 suffixBytes .length = docLen ;
710- final BytesRef payloadBytes = new BytesRef (suffixBytes .bytes , suffixBytes .offset + docLen , payloadLen );
721+ final BytesRef payloadBytes =
722+ new BytesRef (suffixBytes .bytes , suffixBytes .offset + docLen , payloadLen );
711723
712724 final int [] fieldFlags = new int [numFields ];
713725 for (int i = 0 ; i < numFields ; ++i ) {
@@ -809,7 +821,7 @@ private int[][] readPositions(
809821 final int totalFreq = positionIndex [i ][termCount ];
810822 final int [] fieldPositions = new int [totalFreq ];
811823 positions [i ] = fieldPositions ;
812- for (int j = 0 ; j < totalFreq ;) {
824+ for (int j = 0 ; j < totalFreq ; ) {
813825 final LongsRef nextPositions = reader .next (totalFreq - j );
814826 for (int k = 0 ; k < nextPositions .length ; ++k ) {
815827 fieldPositions [j ++] = (int ) nextPositions .longs [nextPositions .offset + k ];
0 commit comments