Skip to content

Commit 22d6a1c

Browse files
authored
chore(events): Clamp batchSize between [min, max] (#177)
* chore(events): Clamp batchSize between [min, max] * fix tests * lint
1 parent 9a64d7d commit 22d6a1c

File tree

4 files changed

+37
-7
lines changed

4 files changed

+37
-7
lines changed

src/events/batch-event-processor.spec.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@ describe('BatchEventProcessor', () => {
77
it('should return a batch and remove items from the queue', () => {
88
const eventQueue = new ArrayBackedNamedEventQueue<Event>('test-queue');
99
const processor = new BatchEventProcessor(eventQueue, 2);
10+
// force batch size to 2 for testing
11+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
12+
// @ts-ignore
13+
processor['batchSize'] = 2;
1014
expect(processor.isEmpty()).toBeTruthy();
1115
expect(processor.nextBatch()).toHaveLength(0);
1216
const timestamp = new Date().getTime();
@@ -24,4 +28,23 @@ describe('BatchEventProcessor', () => {
2428
expect(processor.isEmpty()).toBeTruthy();
2529
});
2630
});
31+
32+
describe('batchSize', () => {
33+
const queue = new ArrayBackedNamedEventQueue<Event>('test-queue');
34+
35+
it('should clamp batch size to min', () => {
36+
const processor = new BatchEventProcessor(queue, 2);
37+
expect(processor['batchSize']).toBe(100);
38+
});
39+
40+
it('should clamp batch size to max', () => {
41+
const processor = new BatchEventProcessor(queue, 100_000_000);
42+
expect(processor['batchSize']).toBe(10_000);
43+
});
44+
45+
it('should set batch size if within bounds', () => {
46+
const processor = new BatchEventProcessor(queue, 1_000);
47+
expect(processor['batchSize']).toBe(1_000);
48+
});
49+
});
2750
});

src/events/batch-event-processor.ts

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
11
import Event from './event';
22
import NamedEventQueue from './named-event-queue';
33

4+
const MIN_BATCH_SIZE = 100;
5+
const MAX_BATCH_SIZE = 10_000;
6+
47
export default class BatchEventProcessor {
5-
constructor(
6-
private readonly eventQueue: NamedEventQueue<Event>,
7-
private readonly batchSize: number,
8-
) {}
8+
private readonly batchSize: number;
9+
10+
constructor(private readonly eventQueue: NamedEventQueue<Event>, batchSize: number) {
11+
// clamp batch size between min and max
12+
this.batchSize = Math.max(MIN_BATCH_SIZE, Math.min(MAX_BATCH_SIZE, batchSize));
13+
}
914

1015
nextBatch(): Event[] {
1116
return this.eventQueue.splice(this.batchSize);

src/events/default-event-dispatcher.spec.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,10 @@ const createDispatcher = (
3232
};
3333
const config = { ...defaultConfig, ...configOverrides };
3434
const batchProcessor = new BatchEventProcessor(eventQueue, batchSize);
35+
// force batch size to 2 for testing
36+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
37+
// @ts-ignore
38+
batchProcessor['batchSize'] = 2;
3539
const dispatcher = new DefaultEventDispatcher(
3640
batchProcessor,
3741
configOverrides.networkStatusListener || mockNetworkStatusListener,

src/events/default-event-dispatcher.ts

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,7 @@ export type EventDispatcherConfig = {
2525
maxRetries?: number;
2626
};
2727

28-
// TODO: Have more realistic default batch size based on average event payload size once we have
29-
// more concrete data.
30-
export const DEFAULT_EVENT_DISPATCHER_BATCH_SIZE = 100;
28+
export const DEFAULT_EVENT_DISPATCHER_BATCH_SIZE = 1_000;
3129
export const DEFAULT_EVENT_DISPATCHER_CONFIG: Omit<
3230
EventDispatcherConfig,
3331
'ingestionUrl' | 'sdkKey'

0 commit comments

Comments
 (0)