Skip to content

Commit 16d9547

Browse files
committed
Update performance example with more cases
1 parent 82b57d5 commit 16d9547

File tree

3 files changed

+155
-17
lines changed

3 files changed

+155
-17
lines changed

examples/performance/package.json

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"name": "performance",
3+
"version": "1.0.0",
4+
"main": "performance-promisified.js",
5+
"scripts": {
6+
"test": "echo \"Error: no test specified\" && exit 1"
7+
},
8+
"keywords": [],
9+
"author": "",
10+
"license": "ISC",
11+
"description": "",
12+
"dependencies": {
13+
"@confluentinc/kafka-javascript": "file:../..",
14+
"kafkajs": "^2.2.4"
15+
}
16+
}
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
const { runProducer, runConsumer, runConsumeTransformProduce } = require('./performance-primitives');
2+
3+
const { CompressionTypes } = require('../../').KafkaJS;
4+
5+
const brokers = process.env.KAFKA_BROKERS || 'localhost:9092';
6+
const topic = process.env.KAFKA_TOPIC || 'test-topic';
7+
const topic2 = process.env.KAFKA_TOPIC2 || 'test-topic2';
8+
const messageCount = process.env.MESSAGE_COUNT ? +process.env.MESSAGE_COUNT : 1000000;
9+
const messageSize = process.env.MESSAGE_SIZE ? +process.env.MESSAGE_SIZE : 256;
10+
const batchSize = process.env.BATCH_SIZE ? +process.env.BATCH_SIZE : 100;
11+
const compression = process.env.COMPRESSION || CompressionTypes.NONE;
12+
const warmupMessages = process.env.WARMUP_MESSAGES ? +process.env.WARMUP_MESSAGES : (batchSize * 10);
13+
14+
(async function () {
15+
const producer = process.argv.includes('--producer');
16+
const consumer = process.argv.includes('--consumer');
17+
const ctp = process.argv.includes('--ctp');
18+
const all = process.argv.includes('--all');
19+
20+
if (producer || all) {
21+
console.log("=== Running Basic Producer Performance Test:")
22+
console.log(` Brokers: ${brokers}`);
23+
console.log(` Topic: ${topic}`);
24+
console.log(` Message Count: ${messageCount}`);
25+
console.log(` Message Size: ${messageSize}`);
26+
console.log(` Batch Size: ${batchSize}`);
27+
console.log(` Compression: ${compression}`);
28+
console.log(` Warmup Messages: ${warmupMessages}`);
29+
const producerRate = await runProducer(brokers, topic, batchSize, warmupMessages, messageCount, messageSize, compression);
30+
console.log("=== Producer Rate: ", producerRate);
31+
}
32+
33+
if (consumer || all) {
34+
// If user runs this without --producer then they are responsible for seeding the topic.
35+
console.log("=== Running Basic Consumer Performance Test:")
36+
console.log(` Brokers: ${brokers}`);
37+
console.log(` Topic: ${topic}`);
38+
console.log(` Message Count: ${messageCount}`);
39+
const consumerRate = await runConsumer(brokers, topic, messageCount);
40+
console.log("=== Consumer Rate: ", consumerRate);
41+
}
42+
43+
if (ctp || all) {
44+
console.log("=== Running Consume-Transform-Produce Performance Test:")
45+
console.log(` Brokers: ${brokers}`);
46+
console.log(` ConsumeTopic: ${topic}`);
47+
console.log(` ProduceTopic: ${topic2}`);
48+
console.log(` Message Count: ${messageCount}`);
49+
// Seed the topic with messages
50+
await runProducer(brokers, topic, batchSize, warmupMessages, messageCount, messageSize, compression);
51+
const ctpRate = await runConsumeTransformProduce(brokers, topic, topic2, messageCount);
52+
console.log("=== Consume-Transform-Produce Rate: ", ctpRate);
53+
}
54+
55+
})();

examples/performance/performance-promisified.js renamed to examples/performance/performance-primitives.js

Lines changed: 84 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,13 @@
1-
const { Kafka, CompressionTypes, ErrorCodes } = require('../../').KafkaJS;
1+
const { Kafka, ErrorCodes } = require('../../').KafkaJS;
22
const { randomBytes } = require('crypto');
33
const { hrtime } = require('process');
44

5+
module.exports = {
6+
runProducer,
7+
runConsumer,
8+
runConsumeTransformProduce,
9+
};
10+
511
async function runProducer(brokers, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression) {
612
let totalMessagesSent = 0;
713
let totalBytesSent = 0;
@@ -61,7 +67,6 @@ async function runProducer(brokers, topic, batchSize, warmupMessages, totalMessa
6167
}
6268
await Promise.all(promises);
6369
}
64-
console.log({messagesDispatched, totalMessageCnt})
6570
let elapsed = hrtime(startTime);
6671
let durationNanos = elapsed[0] * 1e9 + elapsed[1];
6772
let rate = (totalBytesSent / durationNanos) * 1e9 / (1024 * 1024); /* MB/s */
@@ -81,7 +86,7 @@ async function runConsumer(brokers, topic, totalMessageCnt) {
8186
'group.id': 'test-group' + Math.random(),
8287
'enable.auto.commit': false,
8388
'auto.offset.reset': 'earliest',
84-
});
89+
});
8590
await consumer.connect();
8691
await consumer.subscribe({ topic });
8792

@@ -101,8 +106,8 @@ async function runConsumer(brokers, topic, totalMessageCnt) {
101106
rate = (totalMessageSize / durationNanos) * 1e9 / (1024 * 1024); /* MB/s */
102107
console.log(`Recvd ${messagesReceived} messages, ${totalMessageSize} bytes; rate is ${rate} MB/s`);
103108
consumer.pause([{ topic }]);
104-
// } else if (messagesReceived % 100 == 0) {
105-
// console.log(`Recvd ${messagesReceived} messages, ${totalMessageSize} bytes`);
109+
// } else if (messagesReceived % 100 == 0) {
110+
// console.log(`Recvd ${messagesReceived} messages, ${totalMessageSize} bytes`);
106111
}
107112
}
108113
});
@@ -135,15 +140,77 @@ async function runConsumer(brokers, topic, totalMessageCnt) {
135140
return rate;
136141
}
137142

138-
const brokers = process.env.KAFKA_BROKERS || 'localhost:9092';
139-
const topic = process.env.KAFKA_TOPIC || 'test-topic';
140-
const messageCount = process.env.MESSAGE_COUNT ? +process.env.MESSAGE_COUNT : 1000000;
141-
const messageSize = process.env.MESSAGE_SIZE ? +process.env.MESSAGE_SIZE : 256;
142-
const batchSize = process.env.BATCH_SIZE ? +process.env.BATCH_SIZE : 100;
143-
const compression = process.env.COMPRESSION || CompressionTypes.NONE;
144-
const warmupMessages = process.env.WARMUP_MESSAGES ? +process.env.WARMUP_MESSAGES : (batchSize * 10);
145-
146-
runProducer(brokers, topic, batchSize, warmupMessages, messageCount, messageSize, compression).then(async (producerRate) => {
147-
const consumerRate = await runConsumer(brokers, topic, messageCount);
148-
console.log(producerRate, consumerRate);
149-
});
143+
async function runConsumeTransformProduce(brokers, consumeTopic, produceTopic, totalMessageCnt) {
144+
const kafka = new Kafka({
145+
'client.id': 'kafka-test-performance',
146+
'metadata.broker.list': brokers,
147+
});
148+
149+
const producer = kafka.producer({
150+
/* We want things to be flushed immediately as we'll be awaiting this. */
151+
'linger.ms': 0
152+
});
153+
await producer.connect();
154+
155+
const consumer = kafka.consumer({
156+
'group.id': 'test-group' + Math.random(),
157+
'enable.auto.commit': false,
158+
'auto.offset.reset': 'earliest',
159+
});
160+
await consumer.connect();
161+
await consumer.subscribe({ topic: consumeTopic });
162+
163+
let messagesReceived = 0;
164+
let totalMessageSize = 0;
165+
let startTime;
166+
let rate;
167+
consumer.run({
168+
eachMessage: async ({ topic, partition, message }) => {
169+
await producer.send({
170+
topic: produceTopic,
171+
messages: [{ value: message.value }],
172+
})
173+
messagesReceived++;
174+
totalMessageSize += message.value.length;
175+
if (messagesReceived === 1) {
176+
consumer.pause([{ topic }]);
177+
} else if (messagesReceived === totalMessageCnt) {
178+
let elapsed = hrtime(startTime);
179+
let durationNanos = elapsed[0] * 1e9 + elapsed[1];
180+
rate = (totalMessageSize / durationNanos) * 1e9 / (1024 * 1024); /* MB/s */
181+
console.log(`Recvd, transformed and sent ${messagesReceived} messages, ${totalMessageSize} bytes; rate is ${rate} MB/s`);
182+
consumer.pause([{ topic }]);
183+
// } else if (messagesReceived % 1 == 0) {
184+
// console.log(`Recvd ${messagesReceived} messages, ${totalMessageSize} bytes`);
185+
}
186+
}
187+
});
188+
189+
// Wait until the first message is received
190+
await new Promise((resolve) => {
191+
let interval = setInterval(() => {
192+
if (messagesReceived > 0) {
193+
clearInterval(interval);
194+
resolve();
195+
}
196+
}, 100);
197+
});
198+
199+
console.log("Starting consume-transform-produce.")
200+
201+
totalMessageSize = 0;
202+
startTime = hrtime();
203+
consumer.resume([{ topic: consumeTopic }]);
204+
await new Promise((resolve) => {
205+
let interval = setInterval(() => {
206+
if (messagesReceived >= totalMessageCnt) {
207+
clearInterval(interval);
208+
resolve();
209+
}
210+
}, 1000);
211+
});
212+
213+
await consumer.disconnect();
214+
await producer.disconnect();
215+
return rate;
216+
}

0 commit comments

Comments
 (0)