Skip to content

Commit af0471e

Browse files
committed
feat: request batching
1 parent 4a3a487 commit af0471e

File tree

3 files changed

+218
-10
lines changed

3 files changed

+218
-10
lines changed
Lines changed: 197 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,197 @@
1+
import type {BaseQueryFn} from '@reduxjs/toolkit/query';
2+
3+
import type {FetchData, PaginatedTableData, SortParams} from './types';
4+
5+
interface PaginatedTableParams<T, F> {
6+
offset: number;
7+
fetchData: FetchData<T, F>;
8+
filters: F;
9+
limit: number;
10+
sortParams?: SortParams;
11+
columnsIds: string[];
12+
tableName: string;
13+
}
14+
15+
interface QueuedRequest<T, F> {
16+
params: PaginatedTableParams<T, F>;
17+
resolve: (result: {data: PaginatedTableData<T>} | {error: unknown}) => void;
18+
reject: (error: unknown) => void;
19+
signal?: AbortSignal;
20+
}
21+
22+
interface BatchGroup<T, F> {
23+
requests: QueuedRequest<T, F>[];
24+
batchKey: string;
25+
minOffset: number;
26+
maxOffset: number;
27+
totalLimit: number;
28+
}
29+
30+
class RequestBatcher {
31+
private requestQueue = new Map<string, QueuedRequest<any, any>[]>();
32+
private batchTimeout: NodeJS.Timeout | null = null;
33+
private readonly BATCH_DELAY = 50; // ms
34+
35+
queueRequest<T, F>(
36+
params: PaginatedTableParams<T, F>,
37+
signal?: AbortSignal,
38+
): Promise<{data: PaginatedTableData<T>} | {error: unknown}> {
39+
return new Promise((resolve, reject) => {
40+
const batchKey = this.createBatchKey(params);
41+
42+
if (!this.requestQueue.has(batchKey)) {
43+
this.requestQueue.set(batchKey, []);
44+
}
45+
46+
this.requestQueue.get(batchKey)!.push({
47+
params,
48+
resolve,
49+
reject,
50+
signal,
51+
});
52+
53+
// Reset the batch timeout
54+
if (this.batchTimeout) {
55+
clearTimeout(this.batchTimeout);
56+
}
57+
58+
this.batchTimeout = setTimeout(() => {
59+
this.processBatch();
60+
}, this.BATCH_DELAY);
61+
});
62+
}
63+
64+
private createBatchKey<T, F>(params: PaginatedTableParams<T, F>): string {
65+
return JSON.stringify({
66+
tableName: params.tableName,
67+
filters: params.filters,
68+
sortParams: params.sortParams,
69+
columnsIds: params.columnsIds,
70+
limit: params.limit,
71+
});
72+
}
73+
74+
private groupConsecutiveRequests<T, F>(requests: QueuedRequest<T, F>[]): BatchGroup<T, F>[] {
75+
if (requests.length === 0) {
76+
return [];
77+
}
78+
79+
const sorted = requests.sort((a, b) => a.params.offset - b.params.offset);
80+
const groups: BatchGroup<T, F>[] = [];
81+
let currentGroup: QueuedRequest<T, F>[] = [sorted[0]];
82+
83+
const limit = sorted[0].params.limit;
84+
85+
for (let i = 1; i < sorted.length; i++) {
86+
const expectedOffset = currentGroup[currentGroup.length - 1].params.offset + limit;
87+
88+
if (sorted[i].params.offset === expectedOffset) {
89+
// Consecutive request
90+
currentGroup.push(sorted[i]);
91+
} else {
92+
// Non-consecutive, create a new group
93+
groups.push(this.createBatchGroup(currentGroup));
94+
currentGroup = [sorted[i]];
95+
}
96+
}
97+
98+
// Add the last group
99+
groups.push(this.createBatchGroup(currentGroup));
100+
101+
return groups;
102+
}
103+
104+
private createBatchGroup<T, F>(requests: QueuedRequest<T, F>[]): BatchGroup<T, F> {
105+
const minOffset = Math.min(...requests.map((r) => r.params.offset));
106+
const maxOffset = Math.max(...requests.map((r) => r.params.offset));
107+
const limit = requests[0].params.limit;
108+
const totalLimit = requests.length * limit;
109+
110+
return {
111+
requests,
112+
batchKey: this.createBatchKey(requests[0].params),
113+
minOffset,
114+
maxOffset,
115+
totalLimit,
116+
};
117+
}
118+
119+
private async executeBatch<T, F>(group: BatchGroup<T, F>): Promise<void> {
120+
const firstRequest = group.requests[0];
121+
const batchParams = {
122+
...firstRequest.params,
123+
offset: group.minOffset,
124+
limit: group.totalLimit,
125+
};
126+
127+
try {
128+
const response = await firstRequest.params.fetchData({
129+
limit: batchParams.limit,
130+
offset: batchParams.offset,
131+
filters: batchParams.filters,
132+
sortParams: batchParams.sortParams,
133+
columnsIds: batchParams.columnsIds,
134+
signal: firstRequest.signal,
135+
});
136+
137+
// Split the response data among individual requests
138+
this.splitAndDistributeResponse(group, response);
139+
} catch (error) {
140+
// If batch fails, reject all requests in the group
141+
group.requests.forEach((request) => {
142+
request.resolve({error});
143+
});
144+
}
145+
}
146+
147+
private splitAndDistributeResponse<T, F>(
148+
group: BatchGroup<T, F>,
149+
batchResponse: PaginatedTableData<T>,
150+
): void {
151+
const limit = group.requests[0].params.limit;
152+
153+
group.requests.forEach((request, index) => {
154+
const startIndex = index * limit;
155+
const endIndex = startIndex + limit;
156+
const chunkData = batchResponse.data.slice(startIndex, endIndex);
157+
158+
const chunkResponse: PaginatedTableData<T> = {
159+
data: chunkData,
160+
total: batchResponse.total,
161+
found: batchResponse.found,
162+
};
163+
164+
request.resolve({data: chunkResponse});
165+
});
166+
}
167+
168+
private async processBatch(): Promise<void> {
169+
const allQueues = Array.from(this.requestQueue.entries());
170+
this.requestQueue.clear();
171+
this.batchTimeout = null;
172+
173+
for (const [_batchKey, requests] of allQueues) {
174+
const groups = this.groupConsecutiveRequests(requests);
175+
176+
// Execute each group (consecutive chunks) as a separate batch
177+
await Promise.all(groups.map((group) => this.executeBatch(group)));
178+
}
179+
}
180+
}
181+
182+
// Singleton instance
183+
export const requestBatcher = new RequestBatcher();
184+
185+
// Enhanced base query that uses batching
186+
export const createBatchedBaseQuery = <T, F>(originalBaseQuery: BaseQueryFn): BaseQueryFn => {
187+
return async (args, api, extraOptions) => {
188+
// Check if this is a fetchTableChunk request
189+
if (typeof args === 'object' && args && 'fetchData' in args) {
190+
const params = args as PaginatedTableParams<T, F>;
191+
return await requestBatcher.queueRequest(params);
192+
}
193+
194+
// For non-batchable requests, use original base query
195+
return originalBaseQuery(args, api, extraOptions);
196+
};
197+
};

src/components/PaginatedTable/useScrollBasedChunks.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
2222

2323
// Bad performance in Safari - reduce overscan counts
2424
const DEFAULT_RENDER_OVERSCAN = isSafari ? 1 : 2;
25-
const DEFAULT_FETCH_OVERSCAN = isSafari ? 2 : 4;
25+
const DEFAULT_FETCH_OVERSCAN = 4;
2626

2727
export const useScrollBasedChunks = ({
2828
scrollContainerRef,

src/store/reducers/tableData.ts

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import type {BaseQueryFn, EndpointBuilder} from '@reduxjs/toolkit/query';
22

33
import type {FetchData, PaginatedTableData, SortParams} from '../../components/PaginatedTable';
4+
import {requestBatcher} from '../../components/PaginatedTable/requestBatcher';
45

56
import {api} from './api';
67

@@ -18,19 +19,29 @@ function endpoints<T, F>(build: EndpointBuilder<BaseQueryFn, string, string>) {
1819
return {
1920
fetchTableChunk: build.query<PaginatedTableData<T>, PaginatedTableParams<T, F>>({
2021
queryFn: async (
21-
{offset, limit, sortParams, filters, columnsIds, fetchData},
22+
{offset, limit, sortParams, filters, columnsIds, fetchData, tableName},
2223
{signal},
2324
) => {
2425
try {
25-
const response = await fetchData({
26-
limit,
27-
offset,
28-
filters,
29-
sortParams,
30-
columnsIds,
26+
// Use the request batcher for potential merging
27+
const result = await requestBatcher.queueRequest(
28+
{
29+
offset,
30+
limit,
31+
sortParams,
32+
filters,
33+
columnsIds,
34+
fetchData,
35+
tableName,
36+
},
3137
signal,
32-
});
33-
return {data: response};
38+
);
39+
40+
if ('error' in result) {
41+
return {error: result.error};
42+
}
43+
44+
return result;
3445
} catch (error) {
3546
return {error: error};
3647
}

0 commit comments

Comments
 (0)