Skip to content

Commit 8ad4f35

Browse files
manztilan-gold
andcommitted
chore: apply AbortController changes
Co-authored-by: ilan-gold <[email protected]>
1 parent 216214d commit 8ad4f35

File tree

4 files changed

+64
-77
lines changed

4 files changed

+64
-77
lines changed

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -355,7 +355,11 @@ const multiTiff = await fromUrls(
355355

356356
Geotiff.js supports the use of [`AbortController`s](https://developer.mozilla.org/en-US/docs/Web/API/AbortController). Calls to `getRasters`, `readRGB` and `getTileOrStrip` will throw an `Error` with name `AbortSignal` similar to the browser's `fetch` behavior.
357357

358+
You need to set the `cacheSize` parameter to `0` to enable this feature due to cache consistency issues - otherwise, once the cache becomes full,
359+
it will lose its consistency.
360+
358361
```javascript
362+
const tiff = await fromUrl(source, { cacheSize: 0 });
359363
const abortController = new AbortController();
360364
const { signal } = abortController;
361365
abortController.abort();

src/geotiffimage.js

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -475,9 +475,7 @@ class GeoTIFFImage {
475475
if (this.planarConfiguration === 2) {
476476
bytesPerPixel = this.getSampleByteSize(sampleIndex);
477477
}
478-
const promise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder, signal);
479-
promises.push(promise);
480-
promise.then((tile) => {
478+
const promise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder, signal).then((tile) => {
481479
const buffer = tile.data;
482480
const dataView = new DataView(buffer);
483481
const blockHeight = this.getBlockHeight(tile.y);
@@ -511,6 +509,7 @@ class GeoTIFFImage {
511509
}
512510
}
513511
});
512+
promises.push(promise);
514513
}
515514
}
516515
}

src/source/blockedsource.js

Lines changed: 57 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,8 @@ export class BlockedSource extends BaseSource {
5555

5656
// set of blockIds missing for the current requests
5757
this.blockIdsToFetch = new Set();
58+
59+
this.abortedBlockIds = new Set();
5860
}
5961

6062
get fileSize() {
@@ -66,9 +68,9 @@ export class BlockedSource extends BaseSource {
6668
* @param {basesource/Slice[]} slices
6769
*/
6870
async fetch(slices, signal) {
69-
const cachedBlocks = new Map();
70-
const blockRequests = new Map();
71-
const missingBlockIds = new Set();
71+
const blockRequests = [];
72+
const missingBlockIds = [];
73+
const allBlockIds = [];
7274

7375
for (const { offset, length } of slices) {
7476
let top = offset + length;
@@ -80,92 +82,68 @@ export class BlockedSource extends BaseSource {
8082

8183
const firstBlockOffset = Math.floor(offset / this.blockSize) * this.blockSize;
8284

83-
// chunk the current slice into blocks
8485
for (let current = firstBlockOffset; current < top; current += this.blockSize) {
85-
// check if the block is cached, being requested or still missing
8686
const blockId = Math.floor(current / this.blockSize);
87-
88-
if (this.blockCache.has(blockId)) {
89-
cachedBlocks.set(blockId, this.blockCache.get(blockId));
90-
} else if (this.blockRequests.has(blockId)) {
91-
blockRequests.set(blockId, this.blockRequests.get(blockId));
92-
} else if (this.blockIdsToFetch.has(blockId)) {
93-
missingBlockIds.add(blockId);
94-
} else {
87+
if (!this.blockCache.has(blockId) && !this.blockRequests.has(blockId)) {
9588
this.blockIdsToFetch.add(blockId);
96-
missingBlockIds.add(blockId);
89+
missingBlockIds.push(blockId);
90+
}
91+
if (this.blockRequests.has(blockId)) {
92+
blockRequests.push(this.blockRequests.get(blockId));
9793
}
94+
allBlockIds.push(blockId);
9895
}
9996
}
10097

10198
// allow additional block requests to accumulate
10299
await wait();
103100
this.fetchBlocks(signal);
104101

102+
// Gather all of the new requests that this fetch call is contributing to `fetch`.
103+
const missingRequests = [];
105104
for (const blockId of missingBlockIds) {
106-
const block = this.blockRequests.get(blockId);
107-
const cachedBlock = this.blockCache.get(blockId);
108-
109-
if (block) {
110-
blockRequests.set(blockId, block);
111-
} else if (cachedBlock) {
112-
cachedBlocks.set(blockId, cachedBlock);
113-
} else {
114-
throw new Error(`Block ${blockId} is not in the block requests`);
105+
// The requested missing block could already be in the cache
106+
// instead of having its request still be outstanding.
107+
if (this.blockRequests.has(blockId)) {
108+
missingRequests.push(this.blockRequests.get(blockId));
115109
}
116110
}
117111

118-
// actually await all pending requests
119-
let results = await Promise.allSettled(Array.from(blockRequests.values()));
120-
121-
// perform retries if a block was interrupted by a previous signal
122-
if (results.some((result) => result.status === 'rejected')) {
123-
const retriedBlockRequests = new Set();
124-
for (const [blockId, result] of zip(blockRequests.keys(), results)) {
125-
const { rejected, reason } = result;
126-
if (rejected) {
127-
// push some blocks back to the to-fetch list if they were
128-
// aborted, but only when a different signal was used
129-
if (reason.name === 'AbortError' && reason.signal !== signal) {
130-
this.blockIdsToFetch.add(blockId);
131-
retriedBlockRequests.add(blockId);
132-
}
112+
// Actually await all pending requests that are needed for this `fetch`.
113+
await Promise.allSettled(blockRequests.values());
114+
await Promise.allSettled(missingRequests.values());
115+
116+
// Perform retries if a block was interrupted by a previous signal
117+
const abortedBlockRequests = [];
118+
const abortedBlockIds = allBlockIds
119+
.filter((id) => this.abortedBlockIds.has(id) || !this.blockCache.has(id));
120+
abortedBlockIds.forEach((id) => this.blockIdsToFetch.add(id));
121+
// start the retry of some blocks if required
122+
if (abortedBlockIds.length > 0 && signal && !signal.aborted) {
123+
this.fetchBlocks(null);
124+
for (const blockId of abortedBlockIds) {
125+
const block = this.blockRequests.get(blockId);
126+
if (!block) {
127+
throw new Error(`Block ${blockId} is not in the block requests`);
133128
}
129+
abortedBlockRequests.push(block);
134130
}
135-
136-
// start the retry of some blocks if required
137-
if (this.blockIdsToFetch.length > 0) {
138-
this.fetchBlocks(signal);
139-
for (const blockId of retriedBlockRequests) {
140-
const block = this.blockRequests.get(blockId);
141-
if (!block) {
142-
throw new Error(`Block ${blockId} is not in the block requests`);
143-
}
144-
blockRequests.set(blockId, block);
145-
}
146-
results = await Promise.allSettled(Array.from(blockRequests.values()));
147-
}
131+
await Promise.allSettled(Array.from(abortedBlockRequests.values()));
148132
}
149133

150-
// throw an error (either abort error or AggregateError if no abort was done)
151-
if (results.some((result) => result.status === 'rejected')) {
152-
if (signal && signal.aborted) {
153-
throw new AbortError('Request was aborted');
154-
}
155-
throw new AggregateError(
156-
results.filter((result) => result.status === 'rejected').map((result) => result.reason),
157-
'Request failed',
158-
);
134+
// throw an abort error
135+
if (signal && signal.aborted) {
136+
throw new AbortError('Request was aborted');
159137
}
160138

161-
// extract the actual block responses
162-
const values = results.map((result) => result.value);
139+
const blocks = allBlockIds.map((id) => this.blockCache.get(id));
140+
const failedBlocks = blocks.filter((i) => !i);
141+
if (failedBlocks.length) {
142+
throw new AggregateError(failedBlocks, 'Request failed');
143+
}
163144

164145
// create a final Map, with all required blocks for this request to satisfy
165-
const requiredBlocks = new Map(zip(Array.from(blockRequests.keys()), values));
166-
for (const [blockId, block] of cachedBlocks) {
167-
requiredBlocks.set(blockId, block);
168-
}
146+
const requiredBlocks = new Map(zip(allBlockIds, blocks));
169147

170148
// TODO: satisfy each slice
171149
return this.readSliceData(slices, requiredBlocks);
@@ -188,7 +166,7 @@ export class BlockedSource extends BaseSource {
188166

189167
for (const blockId of group.blockIds) {
190168
// make an async IIFE for each block
191-
const blockRequest = (async () => {
169+
this.blockRequests.set(blockId, (async () => {
192170
try {
193171
const response = (await groupRequests)[groupIndex];
194172
const blockOffset = blockId * this.blockSize;
@@ -199,21 +177,24 @@ export class BlockedSource extends BaseSource {
199177
blockOffset,
200178
data.byteLength,
201179
data,
180+
blockId,
202181
);
203182
this.blockCache.set(blockId, block);
204-
return block;
183+
this.abortedBlockIds.delete(blockId);
205184
} catch (err) {
206185
if (err.name === 'AbortError') {
207186
// store the signal here, we need it to determine later if an
208187
// error was caused by this signal
209188
err.signal = signal;
189+
this.blockCache.del(blockId);
190+
this.abortedBlockIds.add(blockId);
191+
} else {
192+
throw err;
210193
}
211-
throw err;
212194
} finally {
213195
this.blockRequests.delete(blockId);
214196
}
215-
})();
216-
this.blockRequests.set(blockId, blockRequest);
197+
})());
217198
}
218199
}
219200
this.blockIdsToFetch.clear();
@@ -265,9 +246,12 @@ export class BlockedSource extends BaseSource {
265246
*/
266247
readSliceData(slices, blocks) {
267248
return slices.map((slice) => {
268-
const top = slice.offset + slice.length;
249+
let top = slice.offset + slice.length;
250+
if (this.fileSize !== null) {
251+
top = Math.min(this.fileSize, top);
252+
}
269253
const blockIdLow = Math.floor(slice.offset / this.blockSize);
270-
const blockIdHigh = Math.floor((slice.offset + slice.length) / this.blockSize);
254+
const blockIdHigh = Math.floor(top / this.blockSize);
271255
const sliceData = new ArrayBuffer(slice.length);
272256
const sliceView = new Uint8Array(sliceData);
273257

src/source/remote.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ function maybeWrapInBlockedSource(source, { blockSize, cacheSize }) {
154154
if (blockSize === null) {
155155
return source;
156156
}
157-
return new BlockedSource(source, blockSize, cacheSize);
157+
return new BlockedSource(source, { blockSize, cacheSize });
158158
}
159159

160160
export function makeFetchSource(url, { headers = {}, credentials, maxRanges = 0, allowFullFile = false, ...blockOptions } = {}) {

0 commit comments

Comments
 (0)