1
1
import { createApiError } from "../error" ;
2
2
import type { CredentialsParams } from "../types/public" ;
3
3
import { checkCredentials } from "./checkCredentials" ;
4
+ import { combineUint8Arrays } from "./combineUint8Arrays" ;
4
5
import { decompress as lz4_decompress } from "../vendor/lz4js" ;
5
6
import { RangeList } from "./RangeList" ;
6
7
@@ -201,7 +202,7 @@ export class XetBlob extends Blob {
201
202
rangeList . add ( term . range . start , term . range . end ) ;
202
203
}
203
204
const listener = this . listener ;
204
- const log = this . internalLogging ? ( ...args : unknown [ ] ) => console . log ( ...args ) : ( ) => { } ;
205
+ const log = this . internalLogging ? ( ...args : unknown [ ] ) => console . log ( ...args ) : ( ) => { } ;
205
206
206
207
async function * readData (
207
208
reconstructionInfo : ReconstructionInfo ,
@@ -327,14 +328,11 @@ export class XetBlob extends Blob {
327
328
totalFetchBytes += result . value . byteLength ;
328
329
329
330
if ( leftoverBytes ) {
330
- const leftoverBytesLength : number = leftoverBytes . length ;
331
- const combinedBytes = new Uint8Array ( leftoverBytesLength + result . value . length ) ;
332
- combinedBytes . set ( leftoverBytes ) ;
333
- combinedBytes . set ( result . value , leftoverBytesLength ) ;
334
- result . value = combinedBytes ;
331
+ result . value = combineUint8Arrays ( leftoverBytes , result . value ) ;
332
+ leftoverBytes = undefined ;
335
333
}
336
334
337
- while ( totalBytesRead < maxBytes && result . value . byteLength ) {
335
+ while ( totalBytesRead < maxBytes && result . value ? .byteLength ) {
338
336
if ( result . value . byteLength < 8 ) {
339
337
// We need 8 bytes to parse the chunk header
340
338
leftoverBytes = result . value ;
@@ -361,8 +359,7 @@ export class XetBlob extends Blob {
361
359
chunkHeader . compression_scheme !== XetChunkCompressionScheme . ByteGroupingLZ4
362
360
) {
363
361
throw new Error (
364
- `Unsupported compression scheme ${
365
- compressionSchemeLabels [ chunkHeader . compression_scheme ] ?? chunkHeader . compression_scheme
362
+ `Unsupported compression scheme ${ compressionSchemeLabels [ chunkHeader . compression_scheme ] ?? chunkHeader . compression_scheme
366
363
} `
367
364
) ;
368
365
}
@@ -379,13 +376,13 @@ export class XetBlob extends Blob {
379
376
chunkHeader . compression_scheme === XetChunkCompressionScheme . LZ4
380
377
? lz4_decompress ( result . value . slice ( 0 , chunkHeader . compressed_length ) , chunkHeader . uncompressed_length )
381
378
: chunkHeader . compression_scheme === XetChunkCompressionScheme . ByteGroupingLZ4
382
- ? bg4_regroup_bytes (
383
- lz4_decompress (
384
- result . value . slice ( 0 , chunkHeader . compressed_length ) ,
385
- chunkHeader . uncompressed_length
386
- )
387
- )
388
- : result . value . slice ( 0 , chunkHeader . compressed_length ) ;
379
+ ? bg4_regroup_bytes (
380
+ lz4_decompress (
381
+ result . value . slice ( 0 , chunkHeader . compressed_length ) ,
382
+ chunkHeader . uncompressed_length
383
+ )
384
+ )
385
+ : result . value . slice ( 0 , chunkHeader . compressed_length ) ;
389
386
390
387
const range = ranges . find ( ( range ) => chunkIndex >= range . start && chunkIndex < range . end ) ;
391
388
const shouldYield = chunkIndex >= term . range . start && chunkIndex < term . range . end ;
@@ -439,8 +436,7 @@ export class XetBlob extends Blob {
439
436
log ( "done" , done , "total read" , totalBytesRead , maxBytes , totalFetchBytes ) ;
440
437
log ( "failed to fetch all data for term" , term . hash ) ;
441
438
throw new Error (
442
- `Failed to fetch all data for term ${ term . hash } , fetched ${ totalFetchBytes } bytes out of ${
443
- fetchInfo . url_range . end - fetchInfo . url_range . start + 1
439
+ `Failed to fetch all data for term ${ term . hash } , fetched ${ totalFetchBytes } bytes out of ${ fetchInfo . url_range . end - fetchInfo . url_range . start + 1
444
440
} `
445
441
) ;
446
442
}
@@ -651,8 +647,8 @@ async function getAccessToken(
651
647
headers : {
652
648
...( initialAccessToken
653
649
? {
654
- Authorization : `Bearer ${ initialAccessToken } ` ,
655
- }
650
+ Authorization : `Bearer ${ initialAccessToken } ` ,
651
+ }
656
652
: { } ) ,
657
653
} ,
658
654
} ) ;
0 commit comments