Skip to content

Commit 0693e62

Browse files
authored
feat: Added @confluentinc/kafka-javascript to benchmarks and added handleBackPressure option. (#127)
* feat: Added @confluentinc/kafka-javascript to benchmarks and added handleBackPressure option. Signed-off-by: Paolo Insogna <[email protected]>
1 parent c073961 commit 0693e62

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1560
-884
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ jobs:
4242
- name: Install dependencies
4343
run: pnpm install --frozen-lockfile
4444
- name: Start Kafka (${{ matrix.confluent-kafka-version }}) Cluster
45-
run: docker compose -f docker/compose.yml up -d --wait
45+
run: docker compose up -d --wait
4646
env:
4747
KAFKA_VERSION: ${{ matrix.confluent-kafka-version }}
4848
- name: Run Tests

BENCHMARKS.md

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
# Producer (Single)
2+
3+
```
4+
╔═══════════════════════════════════════════════╤═════════╤═══════════════╤═══════════╤══════════════════════════╗
5+
║ Slower tests │ Samples │ Result │ Tolerance │ Difference with previous ║
6+
╟───────────────────────────────────────────────┼─────────┼───────────────┼───────────┼──────────────────────────╢
7+
║ node-rdkafka │ 100 │ 68.30 op/sec │ ± 67.58 % │ ║
8+
║ @confluentinc/kafka-javascript (node-rdkafka) │ 100 │ 220.26 op/sec │ ± 1.24 % │ + 222.47 % ║
9+
║ @confluentinc/kafka-javascript (KafkaJS) │ 100 │ 250.59 op/sec │ ± 1.25 % │ + 13.77 % ║
10+
║ KafkaJS │ 100 │ 383.82 op/sec │ ± 3.91 % │ + 53.17 % ║
11+
╟───────────────────────────────────────────────┼─────────┼───────────────┼───────────┼──────────────────────────╢
12+
║ Fastest test │ Samples │ Result │ Tolerance │ Difference with previous ║
13+
╟───────────────────────────────────────────────┼─────────┼───────────────┼───────────┼──────────────────────────╢
14+
║ @platformatic/kafka │ 100 │ 582.59 op/sec │ ± 3.97 % │ + 51.79 % ║
15+
╚═══════════════════════════════════════════════╧═════════╧═══════════════╧═══════════╧══════════════════════════╝
16+
```
17+
18+
# Producer (Batch)
19+
20+
```
21+
╔═══════════════════════════════════════════════╤═════════╤═══════════════╤═══════════╤══════════════════════════╗
22+
║ Slower tests │ Samples │ Result │ Tolerance │ Difference with previous ║
23+
╟───────────────────────────────────────────────┼─────────┼───────────────┼───────────┼──────────────────────────╢
24+
║ node-rdkafka │ 100 │ 86.92 op/sec │ ± 86.84 % │ ║
25+
║ @confluentinc/kafka-javascript (KafkaJS) │ 100 │ 218.23 op/sec │ ± 3.89 % │ + 151.06 % ║
26+
║ KafkaJS │ 100 │ 285.14 op/sec │ ± 4.67 % │ + 30.66 % ║
27+
║ @platformatic/kafka │ 100 │ 336.80 op/sec │ ± 5.46 % │ + 18.12 % ║
28+
╟───────────────────────────────────────────────┼─────────┼───────────────┼───────────┼──────────────────────────╢
29+
║ Fastest test │ Samples │ Result │ Tolerance │ Difference with previous ║
30+
╟───────────────────────────────────────────────┼─────────┼───────────────┼───────────┼──────────────────────────╢
31+
║ @confluentinc/kafka-javascript (node-rdkafka) │ 100 │ 594.68 op/sec │ ± 2.26 % │ + 76.57 % ║
32+
╚═══════════════════════════════════════════════╧═════════╧═══════════════╧═══════════╧══════════════════════════╝
33+
```
34+
35+
# Consumer
36+
37+
```
38+
╔════════════════════════════════════════════════════════╤═════════╤══════════════════╤═══════════╤══════════════════════════╗
39+
║ Slower tests │ Samples │ Result │ Tolerance │ Difference with previous ║
40+
╟────────────────────────────────────────────────────────┼─────────┼──────────────────┼───────────┼──────────────────────────╢
41+
║ @confluentinc/kafka-javascript (node-rdkafka, stream) │ 10000 │ 23245.80 op/sec │ ± 43.73 % │ ║
42+
║ node-rdkafka (stream) │ 10000 │ 25933.93 op/sec │ ± 32.86 % │ + 11.56 % ║
43+
║ @confluentinc/kafka-javascript (node-rdkafka, evented) │ 10000 │ 41766.69 op/sec │ ± 77.85 % │ + 61.05 % ║
44+
║ @confluentinc/kafka-javascript (KafkaJS) │ 10000 │ 49387.87 op/sec │ ± 63.30 % │ + 18.25 % ║
45+
║ node-rdkafka (evented) │ 10000 │ 55369.02 op/sec │ ± 77.81 % │ + 12.11 % ║
46+
║ KafkaJS │ 10000 │ 172692.11 op/sec │ ± 52.70 % │ + 211.89 % ║
47+
╟────────────────────────────────────────────────────────┼─────────┼──────────────────┼───────────┼──────────────────────────╢
48+
║ Fastest test │ Samples │ Result │ Tolerance │ Difference with previous ║
49+
╟────────────────────────────────────────────────────────┼─────────┼──────────────────┼───────────┼──────────────────────────╢
50+
║ @platformatic/kafka │ 10008 │ 338994.74 op/sec │ ± 38.21 % │ + 96.30 % ║
51+
╚════════════════════════════════════════════════════════╧═════════╧══════════════════╧═══════════╧══════════════════════════╝
52+
```

benchmarks/consumer.ts

Lines changed: 157 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
1+
import ConfluentRDKafka, { KafkaJS as ConfluentKafka } from '@confluentinc/kafka-javascript'
12
import RDKafka from '@platformatic/rdkafka'
23
import { printResults, Tracker, type Result } from 'cronometro'
34
import { Kafka as KafkaJS, logLevel } from 'kafkajs'
45
import { randomUUID } from 'node:crypto'
56
import { Consumer, MessagesStreamModes, PromiseWithResolvers } from '../src/index.ts'
67
import { brokers, topic } from './utils/definitions.ts'
78

8-
const iterations = 10000
9+
const iterations = 100000
10+
const maxBytes = 2048
911

1012
function rdkafkaEvented (): Promise<Result> {
1113
const { promise, resolve, reject } = PromiseWithResolvers<Result>()
@@ -18,7 +20,7 @@ function rdkafkaEvented (): Promise<Result> {
1820
'metadata.broker.list': brokers.join(','),
1921
'enable.auto.commit': false,
2022
'fetch.min.bytes': 1,
21-
'fetch.message.max.bytes': 200,
23+
'fetch.message.max.bytes': maxBytes,
2224
'fetch.wait.max.ms': 10
2325
},
2426
{ 'auto.offset.reset': 'earliest' }
@@ -78,7 +80,106 @@ function rdkafkaStream (): Promise<Result> {
7880
'metadata.broker.list': brokers.join(','),
7981
'enable.auto.commit': false,
8082
'fetch.min.bytes': 1,
81-
'fetch.message.max.bytes': 200,
83+
'fetch.message.max.bytes': maxBytes,
84+
'fetch.wait.max.ms': 10
85+
},
86+
{ 'auto.offset.reset': 'earliest' },
87+
{ topics: [topic], waitInterval: 0, highWaterMark: 1024, objectMode: true }
88+
)
89+
90+
let i = 0
91+
let last = process.hrtime.bigint()
92+
stream.on('data', () => {
93+
i++
94+
tracker.track(last)
95+
last = process.hrtime.bigint()
96+
97+
if (i === iterations) {
98+
stream.removeAllListeners('data')
99+
stream.pause()
100+
101+
stream.destroy()
102+
resolve(tracker.results)
103+
}
104+
})
105+
106+
stream.on('error', reject)
107+
108+
return promise
109+
}
110+
111+
function confluentRdkafkaEvented (): Promise<Result> {
112+
const { promise, resolve, reject } = PromiseWithResolvers<Result>()
113+
const tracker = new Tracker()
114+
115+
const consumer = new ConfluentRDKafka.KafkaConsumer(
116+
{
117+
'client.id': 'benchmarks',
118+
'group.id': randomUUID(),
119+
'metadata.broker.list': brokers.join(','),
120+
'enable.auto.commit': false,
121+
'fetch.min.bytes': 1,
122+
'fetch.message.max.bytes': maxBytes,
123+
'fetch.wait.max.ms': 10
124+
},
125+
{ 'auto.offset.reset': 'earliest' }
126+
)
127+
128+
let i = 0
129+
let last = process.hrtime.bigint()
130+
consumer.on('data', () => {
131+
i++
132+
tracker.track(last)
133+
last = process.hrtime.bigint()
134+
135+
if (i === iterations) {
136+
consumer.removeAllListeners('data')
137+
consumer.pause([
138+
{
139+
topic,
140+
partition: 0
141+
},
142+
{
143+
topic,
144+
partition: 1
145+
},
146+
{
147+
topic,
148+
partition: 2
149+
}
150+
])
151+
152+
setTimeout(() => {
153+
consumer.disconnect()
154+
resolve(tracker.results)
155+
}, 100)
156+
}
157+
})
158+
159+
consumer.on('ready', () => {
160+
consumer.subscribe([topic])
161+
consumer.consume()
162+
})
163+
164+
consumer.on('event.error', reject)
165+
166+
consumer.connect()
167+
168+
return promise
169+
}
170+
171+
function confluentRdkafkaStream (): Promise<Result> {
172+
const { promise, resolve, reject } = PromiseWithResolvers<Result>()
173+
const tracker = new Tracker()
174+
175+
const stream = ConfluentRDKafka.KafkaConsumer.createReadStream(
176+
{
177+
'client.id': 'benchmarks',
178+
'group.id': randomUUID(),
179+
'metadata.broker.list': brokers.join(','),
180+
'enable.auto.commit': false,
181+
'fetch.min.bytes': 1,
182+
'fetch.message.max.bytes': maxBytes,
82183
'fetch.wait.max.ms': 10
83184
},
84185
{ 'auto.offset.reset': 'earliest' },
@@ -111,7 +212,7 @@ async function kafkajs (): Promise<Result> {
111212
const tracker = new Tracker()
112213

113214
const client = new KafkaJS({ clientId: 'benchmarks', brokers, logLevel: logLevel.ERROR })
114-
const consumer = client.consumer({ groupId: randomUUID(), maxWaitTimeInMs: 10, maxBytes: 200 })
215+
const consumer = client.consumer({ groupId: randomUUID(), maxWaitTimeInMs: 10, maxBytes })
115216

116217
await consumer.connect()
117218
await consumer.subscribe({ topics: [topic], fromBeginning: true })
@@ -123,13 +224,54 @@ async function kafkajs (): Promise<Result> {
123224
await consumer.run({
124225
autoCommit: false,
125226
partitionsConsumedConcurrently: 1,
126-
async eachMessage ({ pause }) {
227+
async eachMessage () {
228+
i++
229+
tracker.track(last)
230+
last = process.hrtime.bigint()
231+
232+
if (i === iterations) {
233+
consumer.disconnect()
234+
resolve(tracker.results)
235+
}
236+
}
237+
})
238+
239+
return promise
240+
}
241+
242+
async function confluentKafkaJS (): Promise<Result> {
243+
const { promise, resolve } = PromiseWithResolvers<Result>()
244+
const tracker = new Tracker()
245+
246+
const client = new ConfluentKafka.Kafka({
247+
kafkaJS: {
248+
clientId: 'benchmarks',
249+
brokers
250+
}
251+
})
252+
const consumer = client.consumer({
253+
kafkaJS: {
254+
groupId: randomUUID(),
255+
maxWaitTimeInMs: 10,
256+
maxBytes,
257+
fromBeginning: true,
258+
autoCommit: false
259+
}
260+
})
261+
262+
await consumer.connect()
263+
await consumer.subscribe({ topics: [topic] })
264+
265+
let i = 0
266+
let last = process.hrtime.bigint()
267+
await consumer.run({
268+
partitionsConsumedConcurrently: 1,
269+
async eachMessage () {
127270
i++
128271
tracker.track(last)
129272
last = process.hrtime.bigint()
130273

131274
if (i === iterations) {
132-
pause()
133275
consumer.disconnect()
134276
resolve(tracker.results)
135277
}
@@ -148,7 +290,7 @@ async function platformaticKafka (): Promise<Result> {
148290
groupId: randomUUID(),
149291
bootstrapBrokers: brokers,
150292
minBytes: 1,
151-
maxBytes: 200,
293+
maxBytes,
152294
maxWaitTime: 10,
153295
autocommit: false
154296
})
@@ -163,8 +305,10 @@ async function platformaticKafka (): Promise<Result> {
163305
last = process.hrtime.bigint()
164306

165307
if (i === iterations) {
166-
consumer.close(true, () => {
167-
resolve(tracker.results)
308+
process.nextTick(() => {
309+
consumer.close(true, () => {
310+
resolve(tracker.results)
311+
})
168312
})
169313
}
170314
})
@@ -177,7 +321,10 @@ async function platformaticKafka (): Promise<Result> {
177321
const results = {
178322
'node-rdkafka (evented)': await rdkafkaEvented(),
179323
'node-rdkafka (stream)': await rdkafkaStream(),
180-
kafkajs: await kafkajs(),
324+
'@confluentinc/kafka-javascript (node-rdkafka, evented)': await confluentRdkafkaEvented(),
325+
'@confluentinc/kafka-javascript (node-rdkafka, stream)': await confluentRdkafkaStream(),
326+
KafkaJS: await kafkajs(),
327+
'@confluentinc/kafka-javascript (KafkaJS)': await confluentKafkaJS(),
181328
'@platformatic/kafka': await platformaticKafka()
182329
}
183330

benchmarks/crc32c.ts

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import cronometro from 'cronometro'
2+
// @ts-ignore - Not explicitly exported by kafkajs
3+
import kafkaJsCRC32 from 'kafkajs/src/protocol/recordBatch/crc32C/crc32C.js'
4+
import { randomBytes } from 'node:crypto'
5+
import { jsCRC32C, loadNativeCRC32C } from '../src/index.ts'
6+
7+
const nativeCRC32C = loadNativeCRC32C()!
8+
9+
await cronometro(
10+
{
11+
kafkajs () {
12+
const value = randomBytes(1024)
13+
return kafkaJsCRC32(value)
14+
},
15+
'@platformatic/kafka (JS)' () {
16+
const value = randomBytes(1024)
17+
return jsCRC32C(value)
18+
},
19+
'@platformatic/kafka (Native)' () {
20+
const value = randomBytes(1024)
21+
return nativeCRC32C(value)
22+
}
23+
},
24+
{ print: { compare: true, compareMode: 'previous' } }
25+
)

benchmarks/murmur2.ts

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,16 @@ import kafkaJsMurmur2 from 'kafkajs/src/producer/partitioners/default/murmur2.js
44
import { randomBytes } from 'node:crypto'
55
import { murmur2 } from '../src/index.ts'
66

7-
await cronometro({
8-
kafkajs () {
9-
const value = randomBytes(16)
10-
return kafkaJsMurmur2(value)
7+
await cronometro(
8+
{
9+
kafkajs () {
10+
const value = randomBytes(16)
11+
return kafkaJsMurmur2(value)
12+
},
13+
'@platformatic/kafka' () {
14+
const value = randomBytes(16)
15+
return murmur2(value)
16+
}
1117
},
12-
'@platformatic/kafka' () {
13-
const value = randomBytes(16)
14-
return murmur2(value)
15-
}
16-
})
18+
{ print: { compare: true, compareMode: 'previous' } }
19+
)

0 commit comments

Comments
 (0)