Skip to content

Commit b385ce2

Browse files
committed
Add events pagination updates
1 parent c01c221 commit b385ce2

File tree

3 files changed

+49
-10
lines changed

3 files changed

+49
-10
lines changed

.changelog/6.feature.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add USD ROSE prices at the day of the transacitons

src/fetchEvents.js

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,9 @@ export const fetchEvents = async (NEXUS_API, address, year, before, after, layer
296296
const { items: events, wasClipped: eventsClipped } = await paginatedFetch(
297297
`${NEXUS_API}/${layer}/events`,
298298
{ after, before, rel: address },
299-
"events"
299+
"events",
300+
1000,
301+
(count, page) => setProgress(`Fetching events... (${count} items, page ${page})`)
300302
);
301303

302304
if (eventsClipped) {
@@ -309,7 +311,9 @@ export const fetchEvents = async (NEXUS_API, address, year, before, after, layer
309311
const { items: txItems, wasClipped: txClipped } = await paginatedFetch(
310312
`${NEXUS_API}/${layer}/transactions`,
311313
{ after, before, rel: address },
312-
"transactions"
314+
"transactions",
315+
1000,
316+
(count, page) => setProgress(`Fetching transactions... (${count} items, page ${page})`)
313317
);
314318
transactions = txItems;
315319

src/utils.js

Lines changed: 42 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,29 @@ import axios from "axios";
22

33
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
44

5+
/**
6+
* Fetch with retry logic.
7+
* @param {function} fetchFn - Async function to execute
8+
* @param {number} maxRetries - Maximum number of retries (default 3)
9+
* @param {number} baseDelay - Base delay in ms for exponential backoff (default 1000)
10+
* @returns {Promise<*>} - Result of the fetch function
11+
*/
12+
const fetchWithRetry = async (fetchFn, maxRetries = 3, baseDelay = 1000) => {
13+
let lastError;
14+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
15+
try {
16+
return await fetchFn();
17+
} catch (error) {
18+
lastError = error;
19+
if (attempt < maxRetries) {
20+
const delay = baseDelay * Math.pow(2, attempt);
21+
await sleep(delay);
22+
}
23+
}
24+
}
25+
throw lastError;
26+
};
27+
528
/**
629
* Paginated fetch helper that handles limit/offset pagination.
730
* Stops when items.length < limit or offset >= total_count.
@@ -11,21 +34,26 @@ const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
1134
* @param {object} params - Query parameters (limit will be set automatically)
1235
* @param {string} itemsKey - The key in response.data containing the items array
1336
* @param {number} limit - Page size (default 1000)
37+
* @param {function} onProgress - Optional callback for progress updates (called with items count)
1438
* @returns {Promise<{items: Array, wasClipped: boolean}>}
1539
*/
16-
export const paginatedFetch = async (url, params, itemsKey, limit = 1000) => {
40+
export const paginatedFetch = async (url, params, itemsKey, limit = 1000, onProgress = null) => {
1741
let items = [];
1842
let offset = 0;
1943
let wasClipped = false;
44+
let pageNum = 1;
2045

2146
while (true) {
22-
const response = await axios.get(url, {
23-
params: {
24-
...params,
25-
limit,
26-
offset,
27-
},
28-
});
47+
const currentOffset = offset;
48+
const response = await fetchWithRetry(() =>
49+
axios.get(url, {
50+
params: {
51+
...params,
52+
limit,
53+
offset: currentOffset,
54+
},
55+
})
56+
);
2957

3058
const pageItems = response.data[itemsKey] || [];
3159
items = [...items, ...pageItems];
@@ -35,6 +63,11 @@ export const paginatedFetch = async (url, params, itemsKey, limit = 1000) => {
3563
wasClipped = true;
3664
}
3765

66+
// Report progress if callback provided
67+
if (onProgress) {
68+
onProgress(items.length, pageNum);
69+
}
70+
3871
// Break if we got fewer than the limit (last page)
3972
// Note: When is_total_count_clipped is true, total_count is capped (often at 1000),
4073
// so we can't rely on offset >= total_count to know we're done.
@@ -44,6 +77,7 @@ export const paginatedFetch = async (url, params, itemsKey, limit = 1000) => {
4477
}
4578

4679
offset += pageItems.length;
80+
pageNum++;
4781
await sleep(100);
4882
}
4983

0 commit comments

Comments
 (0)