Skip to content

Commit a8c288d

Browse files
authored
upgrade examples to es6 (#1029)
1 parent 8429f0c commit a8c288d

File tree

1 file changed

+35
-35
lines changed

1 file changed

+35
-35
lines changed

README.md

Lines changed: 35 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ npm install node-rdkafka
9292
To use the module, you must `require` it.
9393

9494
```js
95-
var Kafka = require('node-rdkafka');
95+
const Kafka = require('node-rdkafka');
9696
```
9797

9898
## Configuration
@@ -140,7 +140,7 @@ console.log(Kafka.librdkafkaVersion);
140140
A `Producer` sends messages to Kafka. The `Producer` constructor takes a configuration object, as shown in the following example:
141141

142142
```js
143-
var producer = new Kafka.Producer({
143+
const producer = new Kafka.Producer({
144144
'metadata.broker.list': 'kafka-host1:9092,kafka-host2:9092'
145145
});
146146
```
@@ -150,7 +150,7 @@ A `Producer` requires only `metadata.broker.list` (the Kafka brokers) to be crea
150150
The following example illustrates a list with several `librdkafka` options set.
151151

152152
```js
153-
var producer = new Kafka.Producer({
153+
const producer = new Kafka.Producer({
154154
'client.id': 'kafka',
155155
'metadata.broker.list': 'localhost:9092',
156156
'compression.codec': 'gzip',
@@ -171,14 +171,14 @@ You can easily use the `Producer` as a writable stream immediately after creatio
171171
```js
172172
// Our producer with its Kafka brokers
173173
// This call returns a new writable stream to our topic 'topic-name'
174-
var stream = Kafka.Producer.createWriteStream({
174+
const stream = Kafka.Producer.createWriteStream({
175175
'metadata.broker.list': 'kafka-host1:9092,kafka-host2:9092'
176176
}, {}, {
177177
topic: 'topic-name'
178178
});
179179

180180
// Writes a message to the stream
181-
var queuedSuccess = stream.write(Buffer.from('Awesome message'));
181+
const queuedSuccess = stream.write(Buffer.from('Awesome message'));
182182

183183
if (queuedSuccess) {
184184
console.log('We queued our message!');
@@ -190,7 +190,7 @@ if (queuedSuccess) {
190190

191191
// NOTE: MAKE SURE TO LISTEN TO THIS IF YOU WANT THE STREAM TO BE DURABLE
192192
// Otherwise, any error will bubble up as an uncaught exception.
193-
stream.on('error', function (err) {
193+
stream.on('error', (err) => {
194194
// Here's where we'll know if something went wrong sending to Kafka
195195
console.error('Error in our kafka stream');
196196
console.error(err);
@@ -205,7 +205,7 @@ The Standard API is more performant, particularly when handling high volumes of
205205
However, it requires more manual setup to use. The following example illustrates its use:
206206

207207
```js
208-
var producer = new Kafka.Producer({
208+
const producer = new Kafka.Producer({
209209
'metadata.broker.list': 'localhost:9092',
210210
'dr_cb': true
211211
});
@@ -214,7 +214,7 @@ var producer = new Kafka.Producer({
214214
producer.connect();
215215

216216
// Wait for the ready event before proceeding
217-
producer.on('ready', function() {
217+
producer.on('ready', () => {
218218
try {
219219
producer.produce(
220220
// Topic to send the message to
@@ -239,7 +239,7 @@ producer.on('ready', function() {
239239
});
240240

241241
// Any errors we encounter, including connection errors
242-
producer.on('event.error', function(err) {
242+
producer.on('event.error', (err) => {
243243
console.error('Error from producer');
244244
console.error(err);
245245
})
@@ -279,7 +279,7 @@ Some configuration properties that end in `_cb` indicate that an event should be
279279
The following example illustrates an event:
280280

281281
```js
282-
var producer = new Kafka.Producer({
282+
const producer = new Kafka.Producer({
283283
'client.id': 'my-client', // Specifies an identifier to use to help trace activity in Kafka
284284
'metadata.broker.list': 'localhost:9092', // Connect to a Kafka instance on localhost
285285
'dr_cb': true // Specifies that we want a delivery-report event to be generated
@@ -288,7 +288,7 @@ var producer = new Kafka.Producer({
288288
// Poll for events every 100 ms
289289
producer.setPollInterval(100);
290290

291-
producer.on('delivery-report', function(err, report) {
291+
producer.on('delivery-report', (err, report) => {
292292
// Report of delivery statistics here:
293293
//
294294
console.log(report);
@@ -313,7 +313,7 @@ The following table describes types of events.
313313
The higher level producer is a variant of the producer which can propagate callbacks to you upon message delivery.
314314

315315
```js
316-
var producer = new Kafka.HighLevelProducer({
316+
const producer = new Kafka.HighLevelProducer({
317317
'metadata.broker.list': 'localhost:9092',
318318
});
319319
```
@@ -330,7 +330,7 @@ producer.produce(topicName, null, Buffer.from('alliance4ever'), null, Date.now()
330330
Additionally you can add serializers to modify the value of a produce for a key or value before it is sent over to Kafka.
331331

332332
```js
333-
producer.setValueSerializer(function(value) {
333+
producer.setValueSerializer((value) => {
334334
return Buffer.from(JSON.stringify(value));
335335
});
336336
```
@@ -342,7 +342,7 @@ Otherwise the behavior of the class should be exactly the same.
342342
To read messages from Kafka, you use a `KafkaConsumer`. You instantiate a `KafkaConsumer` object as follows:
343343

344344
```js
345-
var consumer = new Kafka.KafkaConsumer({
345+
const consumer = new Kafka.KafkaConsumer({
346346
'group.id': 'kafka',
347347
'metadata.broker.list': 'localhost:9092',
348348
}, {});
@@ -357,10 +357,10 @@ The `group.id` and `metadata.broker.list` properties are required for a consumer
357357
Rebalancing is managed internally by `librdkafka` by default. If you would like to override this functionality, you may provide your own logic as a rebalance callback.
358358

359359
```js
360-
var consumer = new Kafka.KafkaConsumer({
360+
const consumer = new Kafka.KafkaConsumer({
361361
'group.id': 'kafka',
362362
'metadata.broker.list': 'localhost:9092',
363-
'rebalance_cb': function(err, assignment) {
363+
'rebalance_cb': (err, assignment) => {
364364

365365
if (err.code === Kafka.CODES.ERRORS.ERR__ASSIGN_PARTITIONS) {
366366
// Note: this can throw when you are disconnected. Take care and wrap it in
@@ -385,10 +385,10 @@ var consumer = new Kafka.KafkaConsumer({
385385
When you commit in `node-rdkafka`, the standard way is to queue the commit request up with the next `librdkafka` request to the broker. When doing this, there isn't a way to know the result of the commit. Luckily there is another callback you can listen to to get this information
386386

387387
```js
388-
var consumer = new Kafka.KafkaConsumer({
388+
const consumer = new Kafka.KafkaConsumer({
389389
'group.id': 'kafka',
390390
'metadata.broker.list': 'localhost:9092',
391-
'offset_commit_cb': function(err, topicPartitions) {
391+
'offset_commit_cb': (err, topicPartitions) => {
392392

393393
if (err) {
394394
// There was an error committing
@@ -426,11 +426,11 @@ The stream API is the easiest way to consume messages. The following example ill
426426

427427
```js
428428
// Read from the librdtesting-01 topic... note that this creates a new stream on each call!
429-
var stream = KafkaConsumer.createReadStream(globalConfig, topicConfig, {
429+
const stream = KafkaConsumer.createReadStream(globalConfig, topicConfig, {
430430
topics: ['librdtesting-01']
431431
});
432432

433-
stream.on('data', function(message) {
433+
stream.on('data', (message) => {
434434
console.log('Got message');
435435
console.log(message.value.toString());
436436
});
@@ -455,15 +455,15 @@ The following example illustrates flowing mode:
455455
consumer.connect();
456456

457457
consumer
458-
.on('ready', function() {
458+
.on('ready', () => {
459459
consumer.subscribe(['librdtesting-01']);
460460

461461
// Consume from the librdtesting-01 topic. This is what determines
462462
// the mode we are running in. By not specifying a callback (or specifying
463463
// only a callback) we get messages as soon as they are available.
464464
consumer.consume();
465465
})
466-
.on('data', function(data) {
466+
.on('data', (data) => {
467467
// Output the actual message contents
468468
console.log(data.value.toString());
469469
});
@@ -474,17 +474,17 @@ The following example illustrates non-flowing mode:
474474
consumer.connect();
475475

476476
consumer
477-
.on('ready', function() {
477+
.on('ready', () => {
478478
// Subscribe to the librdtesting-01 topic
479479
// This makes subsequent consumes read from that topic.
480480
consumer.subscribe(['librdtesting-01']);
481481

482482
// Read one message every 1000 milliseconds
483-
setInterval(function() {
483+
setInterval(() => {
484484
consumer.consume(1);
485485
}, 1000);
486486
})
487-
.on('data', function(data) {
487+
.on('data', (data) => {
488488
console.log('Message found! Contents below.');
489489
console.log(data.value.toString());
490490
});
@@ -524,15 +524,15 @@ The following table lists events for this API.
524524
Some times you find yourself in the situation where you need to know the latest (and earliest) offset for one of your topics. Connected producers and consumers both allow you to query for these through `queryWaterMarkOffsets` like follows:
525525

526526
```js
527-
var timeout = 5000, partition = 0;
528-
consumer.queryWatermarkOffsets('my-topic', partition, timeout, function(err, offsets) {
529-
var high = offsets.highOffset;
530-
var low = offsets.lowOffset;
527+
const timeout = 5000, partition = 0;
528+
consumer.queryWatermarkOffsets('my-topic', partition, timeout, (err, offsets) => {
529+
const high = offsets.highOffset;
530+
const low = offsets.lowOffset;
531531
});
532532

533-
producer.queryWatermarkOffsets('my-topic', partition, timeout, function(err, offsets) {
534-
var high = offsets.highOffset;
535-
var low = offsets.lowOffset;
533+
producer.queryWatermarkOffsets('my-topic', partition, timeout, (err, offsets) => {
534+
const high = offsets.highOffset;
535+
const low = offsets.lowOffset;
536536
});
537537

538538
An error will be returned if the client was not connected or the request timed out within the specified interval.
@@ -578,12 +578,12 @@ When fetching metadata for a specific topic, if a topic reference does not exist
578578
Please see the documentation on `Client.getMetadata` if you want to set configuration parameters, e.g. `acks`, on a topic to produce messages to.
579579

580580
```js
581-
var opts = {
581+
const opts = {
582582
topic: 'librdtesting-01',
583583
timeout: 10000
584584
};
585585

586-
producer.getMetadata(opts, function(err, metadata) {
586+
producer.getMetadata(opts, (err, metadata) => {
587587
if (err) {
588588
console.error('Error getting metadata');
589589
console.error(err);
@@ -616,7 +616,7 @@ client.createTopic({
616616
topic: topicName,
617617
num_partitions: 1,
618618
replication_factor: 1
619-
}, function(err) {
619+
}, (err) => {
620620
// Done!
621621
});
622622
```

0 commit comments

Comments
 (0)