Skip to content

Commit 0a1f954

Browse files
fix: prevent token tx history screen from fetching duplicated request and showing duplicated txs (#832)
1 parent 11197e6 commit 0a1f954

File tree

3 files changed

+120
-16
lines changed

3 files changed

+120
-16
lines changed

src/reducers/reducer.js

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -889,6 +889,13 @@ const onNewTx = (state, action) => {
889889
*/
890890
const onUpdateTokenHistory = (state, action) => {
891891
const { token, newHistory } = action.payload;
892+
const existingData = get(state.tokensHistory, `${token}.data`, []);
893+
894+
// Create a Set of existing txIds for efficient lookup
895+
const existingTxIds = new Set(existingData.map((tx) => tx.txId));
896+
897+
// Filter out any transactions that already exist to prevent duplicates
898+
const uniqueNewHistory = newHistory.filter((tx) => !existingTxIds.has(tx.txId));
892899

893900
return {
894901
...state,
@@ -897,8 +904,8 @@ const onUpdateTokenHistory = (state, action) => {
897904
[token]: {
898905
...state.tokensHistory[token],
899906
data: [
900-
...get(state.tokensHistory, `${token}.data`, []),
901-
...newHistory,
907+
...existingData,
908+
...uniqueNewHistory,
902909
]
903910
}
904911
},

src/sagas/tokens.js

Lines changed: 89 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,15 @@ const BALANCE_FETCH_MAX_RETRIES = 3;
6363
*/
6464
const pendingBalanceRequests = new Map();
6565

66+
/**
67+
* Map to track pending history fetch requests per tokenId.
68+
* This enables request deduplication and force upgrade capability.
69+
*
70+
* Structure: Map<tokenId, { force: boolean }>
71+
* - force: whether the pending request should force a fresh fetch
72+
*/
73+
const pendingHistoryRequests = new Map();
74+
6675
/**
6776
* This saga will create a channel to queue TOKEN_FETCH_BALANCE_REQUESTED actions and
6877
* consumers that will run in parallel consuming those actions.
@@ -208,8 +217,83 @@ function* fetchTokenBalance(action) {
208217
}
209218
}
210219

220+
/**
221+
* This saga will create a channel to queue TOKEN_FETCH_HISTORY_REQUESTED actions and
222+
* consumers that will run in parallel consuming those actions.
223+
*
224+
* 1. Request deduplication: Only one fetch per tokenId at a time
225+
* 2. Force upgrade: If a force=true request arrives while a force=false is pending,
226+
* the pending request is upgraded to force=true
227+
* 3. Race condition prevention: Prevents multiple consumers from processing the same tokenId
228+
*/
229+
function* fetchTokenHistoryQueue() {
230+
const fetchTokenHistoryChannel = yield call(channel);
231+
232+
// Fork CONCURRENT_FETCH_REQUESTS threads to download token history
233+
for (let i = 0; i < CONCURRENT_FETCH_REQUESTS; i += 1) {
234+
yield fork(fetchTokenHistoryConsumer, fetchTokenHistoryChannel);
235+
}
236+
237+
while (true) {
238+
const action = yield take(types.TOKEN_FETCH_HISTORY_REQUESTED);
239+
const { tokenId, force } = action;
240+
241+
// Check if there's already a pending request for this tokenId
242+
if (pendingHistoryRequests.has(tokenId)) {
243+
const pending = pendingHistoryRequests.get(tokenId);
244+
245+
// Upgrade to force=true if the new request has force=true
246+
if (force && !pending.force) {
247+
log.debug(`Upgrading pending history request for ${tokenId} to force=true`);
248+
pending.force = true;
249+
}
250+
251+
// Skip queueing duplicate request - the existing one will handle it
252+
log.debug(`Skipping duplicate history request for ${tokenId}, pending request exists`);
253+
continue;
254+
}
255+
256+
// Create new pending entry and queue the request
257+
pendingHistoryRequests.set(tokenId, { force });
258+
yield put(fetchTokenHistoryChannel, action);
259+
}
260+
}
261+
262+
/**
263+
* This saga will consume the fetchTokenHistoryChannel for TOKEN_FETCH_HISTORY_REQUEST actions
264+
* and wait until the TOKEN_FETCH_HISTORY_SUCCESS action is dispatched with the specific tokenId
265+
*/
266+
function* fetchTokenHistoryConsumer(fetchTokenHistoryChannel) {
267+
while (true) {
268+
const action = yield take(fetchTokenHistoryChannel);
269+
270+
yield fork(fetchTokenHistory, action);
271+
// Wait until the success action is dispatched before consuming another action
272+
yield take(
273+
specificTypeAndPayload([
274+
types.TOKEN_FETCH_HISTORY_SUCCESS,
275+
types.TOKEN_FETCH_HISTORY_FAILED,
276+
], {
277+
tokenId: action.tokenId,
278+
}),
279+
);
280+
}
281+
}
282+
283+
/**
284+
* Fetches the history for a specific token.
285+
*
286+
* 1. Checks pendingHistoryRequests for the latest force value (supports force upgrade)
287+
* 2. Properly cleans up pending request tracking on completion
288+
*
289+
* @param {Object} action - The action containing tokenId and force flag
290+
*/
211291
function* fetchTokenHistory(action) {
212-
const { tokenId, force } = action;
292+
const { tokenId } = action;
293+
294+
// Get the current force value from pending requests (may have been upgraded)
295+
const pendingRequest = pendingHistoryRequests.get(tokenId);
296+
const force = pendingRequest?.force ?? action.force;
213297

214298
try {
215299
const wallet = yield select((state) => state.wallet);
@@ -230,6 +314,9 @@ function* fetchTokenHistory(action) {
230314
} catch (e) {
231315
log.error('Error while fetching token history.', e);
232316
yield put(tokenFetchHistoryFailed(tokenId));
317+
} finally {
318+
// Clean up pending request tracking
319+
pendingHistoryRequests.delete(tokenId);
233320
}
234321
}
235322

@@ -374,8 +461,8 @@ export function* fetchTokenData(tokenId, force = false) {
374461
export function* saga() {
375462
yield all([
376463
fork(fetchTokenBalanceQueue),
464+
fork(fetchTokenHistoryQueue),
377465
fork(fetchTokenMetadataQueue),
378-
takeEvery(types.TOKEN_FETCH_HISTORY_REQUESTED, fetchTokenHistory),
379466
takeEvery(types.NEW_TOKEN, routeTokenChange),
380467
takeEvery(types.SET_TOKENS, routeTokenChange),
381468
]);

src/screens/MainScreen.js

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -238,6 +238,9 @@ class MainScreen extends React.Component {
238238
class TxHistoryView extends React.Component {
239239
state = { loading: false, canLoadMore: true };
240240

241+
// Synchronous flag to prevent race conditions with setState
242+
isLoadingMore = false;
243+
241244
renderItem = ({ item, index }) => {
242245
const isFirst = (index === 0);
243246
const isLast = (index === (this.props.txList.length - 1));
@@ -263,23 +266,30 @@ class TxHistoryView extends React.Component {
263266
};
264267

265268
loadMoreHistory = async () => {
266-
if (!this.state.canLoadMore) {
267-
// Already loaded all history
269+
if (!this.state.canLoadMore || this.isLoadingMore) {
270+
// Already loaded all history or currently loading
268271
return;
269272
}
270273

274+
// Set synchronous flag immediately to prevent race conditions
275+
this.isLoadingMore = true;
271276
this.setState({ loading: true });
272-
const newHistory = await fetchMoreHistory(
273-
this.props.wallet,
274-
this.props.token.uid,
275-
this.props.txList
276-
);
277277

278-
if (newHistory.length) {
279-
this.props.updateTokenHistory(this.props.token.uid, newHistory);
280-
this.setState({ loading: false });
281-
} else {
282-
this.setState({ canLoadMore: false, loading: false });
278+
try {
279+
const newHistory = await fetchMoreHistory(
280+
this.props.wallet,
281+
this.props.token.uid,
282+
this.props.txList
283+
);
284+
285+
if (newHistory.length) {
286+
this.props.updateTokenHistory(this.props.token.uid, newHistory);
287+
this.setState({ loading: false });
288+
} else {
289+
this.setState({ canLoadMore: false, loading: false });
290+
}
291+
} finally {
292+
this.isLoadingMore = false;
283293
}
284294
}
285295

0 commit comments

Comments
 (0)