Skip to content

Commit 9bef0d8

Browse files
committed
Add binding level debug logging and client name to logs
* Add binding level debug logging and client name to logs * Add C++ changes for binding-level logging
1 parent 52a6ccb commit 9bef0d8

File tree

9 files changed

+152
-31
lines changed

9 files changed

+152
-31
lines changed

lib/kafkajs/_admin.js

Lines changed: 35 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,12 @@ const { kafkaJSToRdKafkaConfig,
33
createKafkaJsErrorFromLibRdKafkaError,
44
DefaultLogger,
55
CompatibilityErrorMessages,
6+
createBindingMessageMetadata,
67
logLevel,
8+
checkAllowedKeys,
79
loggerTrampoline,
8-
checkAllowedKeys } = require('./_common');
10+
severityToLogLevel,
11+
} = require('./_common');
912
const error = require('./_error');
1013

1114
/**
@@ -57,6 +60,20 @@ class Admin {
5760
*/
5861
#connectPromiseFunc = null;
5962

63+
/**
64+
* The client name used by the admin client for logging - determined by librdkafka
65+
* using a combination of clientId and an integer.
66+
* @type {string|undefined}
67+
*/
68+
#clientName = undefined;
69+
70+
/**
71+
* Convenience function to create the metadata object needed for logging.
72+
*/
73+
#createAdminBindingMessageMetadata() {
74+
return createBindingMessageMetadata(this.#clientName);
75+
}
76+
6077
/**
6178
* @constructor
6279
* @param {import("../../types/kafkajs").AdminConstructorConfig} config
@@ -82,6 +99,17 @@ class Admin {
8299
}
83100

84101
const rdKafkaConfig = kafkaJSToRdKafkaConfig(kjsConfig);
102+
103+
/* Set the logger */
104+
if (Object.hasOwn(kjsConfig, 'logger')) {
105+
this.#logger = kjsConfig.logger;
106+
}
107+
108+
/* Set the log level - INFO for compatibility with kafkaJS, or DEBUG if that is turned
109+
* on using the logLevel property. rdKafkaConfig.log_level is guaranteed to be set if we're
110+
* here, and containing the correct value. */
111+
this.#logger.setLogLevel(severityToLogLevel[rdKafkaConfig.log_level]);
112+
85113
return rdKafkaConfig;
86114
}
87115

@@ -121,7 +149,7 @@ class Admin {
121149
if (this.#state === AdminState.CONNECTING) {
122150
this.#connectPromiseFunc['reject'](err);
123151
} else {
124-
this.#logger.error(err);
152+
this.#logger.error(`Error: ${err.message}`, this.#createAdminBindingMessageMetadata());
125153
}
126154
}
127155

@@ -145,8 +173,11 @@ class Admin {
145173
this.#internalClient = RdKafka.AdminClient.create(config, {
146174
'error': this.#errorCb.bind(this),
147175
'ready': this.#readyCb.bind(this),
148-
'event.log': (msg) => loggerTrampoline(msg, this.#logger)
176+
'event.log': (msg) => loggerTrampoline(msg, this.#logger),
149177
});
178+
179+
this.#clientName = this.#internalClient.name;
180+
this.#logger.info("Admin client connected", this.#createAdminBindingMessageMetadata());
150181
} catch (err) {
151182
reject(createKafkaJsErrorFromLibRdKafkaError(err));
152183
}
@@ -174,6 +205,7 @@ class Admin {
174205
/* AdminClient disconnect for node-rdkakfa is synchronous. */
175206
this.#internalClient.disconnect();
176207
this.#state = AdminState.DISCONNECTED;
208+
this.#logger.info("Admin client disconnected", this.#createAdminBindingMessageMetadata());
177209
resolve();
178210
} catch (err) {
179211
reject(createKafkaJsErrorFromLibRdKafkaError(err));

lib/kafkajs/_common.js

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,20 @@ class DefaultLogger {
113113
}
114114
}
115115

116+
/**
117+
* Convenience function to create a new object to be used as metadata for log messages.
118+
* Returned object is intended to be used immediately and not stored.
119+
*
120+
* @param {string|undefined} clientName
121+
*/
122+
function createBindingMessageMetadata(clientName) {
123+
return {
124+
name: clientName,
125+
fac: 'BINDING',
126+
timestamp: Date.now(),
127+
};
128+
}
129+
116130
/**
117131
* Trampoline for user defined logger, if any.
118132
* @param {{severity: number, fac: string, message: string}} msg
@@ -128,16 +142,16 @@ function loggerTrampoline(msg, logger) {
128142
case logLevel.NOTHING:
129143
break;
130144
case logLevel.ERROR:
131-
logger.error(msg.message, { fac: msg.fac, timestamp: Date.now() });
145+
logger.error(msg.message, { fac: msg.fac, timestamp: Date.now(), name: msg.name });
132146
break;
133147
case logLevel.WARN:
134-
logger.warn(msg.message, { fac: msg.fac, timestamp: Date.now() });
148+
logger.warn(msg.message, { fac: msg.fac, timestamp: Date.now(), name: msg.name });
135149
break;
136150
case logLevel.INFO:
137-
logger.info(msg.message, { fac: msg.fac, timestamp: Date.now() });
151+
logger.info(msg.message, { fac: msg.fac, timestamp: Date.now(), name: msg.name });
138152
break;
139153
case logLevel.DEBUG:
140-
logger.debug(msg.message, { fac: msg.fac, timestamp: Date.now() });
154+
logger.debug(msg.message, { fac: msg.fac, timestamp: Date.now(), name: msg.name });
141155
break;
142156
default:
143157
throw new error.KafkaJSError("Invalid logLevel", {
@@ -706,6 +720,7 @@ module.exports = {
706720
topicPartitionOffsetMetadataToKafkaJS,
707721
createKafkaJsErrorFromLibRdKafkaError,
708722
convertToRdKafkaHeaders,
723+
createBindingMessageMetadata,
709724
notImplemented,
710725
logLevel,
711726
loggerTrampoline,

lib/kafkajs/_consumer.js

Lines changed: 54 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ const {
66
topicPartitionOffsetToRdKafka,
77
topicPartitionOffsetMetadataToRdKafka,
88
topicPartitionOffsetMetadataToKafkaJS,
9+
createBindingMessageMetadata,
910
createKafkaJsErrorFromLibRdKafkaError,
1011
notImplemented,
1112
loggerTrampoline,
@@ -171,6 +172,20 @@ class Consumer {
171172
*/
172173
#topicPartitionToBatchPayload = new Map();
173174

175+
/**
176+
* The client name used by the consumer for logging - determined by librdkafka
177+
* using a combination of clientId and an integer.
178+
* @type {string|undefined}
179+
*/
180+
#clientName = undefined;
181+
182+
/**
183+
* Convenience function to create the metadata object needed for logging.
184+
*/
185+
#createConsumerBindingMessageMetadata() {
186+
return createBindingMessageMetadata(this.#clientName);
187+
}
188+
174189
/**
175190
* @constructor
176191
* @param {import("../../types/kafkajs").ConsumerConfig} kJSConfig
@@ -222,8 +237,8 @@ class Consumer {
222237
}
223238

224239
/* TODO: we should cry more about this and render the consumer unusable. */
225-
await Promise.all(seekPromises).catch(err => this.#logger.error("Seek error. This is effectively a fatal error:" + err));
226-
240+
await Promise.all(seekPromises)
241+
.catch(err => this.#logger.error(`Seek error. This is effectively a fatal error: ${err}`), this.#createConsumerBindingMessageMetadata());
227242

228243
/* Clear the cache and stored offsets.
229244
* We need to do this only if topicPartitions = null (global cache expiry).
@@ -242,6 +257,9 @@ class Consumer {
242257
#rebalanceCallback(err, assignment) {
243258
err = LibrdKafkaError.create(err);
244259
const userSpecifiedRebalanceCb = this.#userConfig['rebalance_cb'];
260+
this.#logger.info(
261+
`Received rebalance event with message: '${err.message}' and ${assignment.length} partition(s)`,
262+
this.#createConsumerBindingMessageMetadata());
245263

246264
let assignmentFnCalled = false;
247265
function assignmentFn(userAssignment) {
@@ -559,6 +577,9 @@ class Consumer {
559577
/* Slight optimization for cases where the size of messages in our subscription is less than the cache size. */
560578
this.#internalClient.setDefaultIsTimeoutOnlyForFirstMessage(true);
561579

580+
this.#clientName = this.#internalClient.name;
581+
this.#logger.info('Consumer connected', this.#createConsumerBindingMessageMetadata());
582+
562583
// Resolve the promise.
563584
this.#connectPromiseFunc['resolve']();
564585
}
@@ -571,7 +592,7 @@ class Consumer {
571592
if (this.#state === ConsumerState.CONNECTING) {
572593
this.#connectPromiseFunc['reject'](err);
573594
} else {
574-
this.#logger.error(err);
595+
this.#logger.error(err, this.#createConsumerBindingMessageMetadata());
575596
}
576597
}
577598

@@ -660,8 +681,7 @@ class Consumer {
660681
this.#lastConsumedOffsets.set(key, offset + 1);
661682
} catch (e) {
662683
/* Not much we can do, except log the error. */
663-
if (this.#logger)
664-
this.#logger.error(`Consumer encountered error while storing offset. Error details: ${e}:${e.stack}`);
684+
this.#logger.error(`Consumer encountered error while storing offset. Error details: ${e}:${e.stack}`, this.#createConsumerBindingMessageMetadata());
665685
}
666686
}
667687

@@ -782,6 +802,7 @@ class Consumer {
782802
}
783803

784804
this.#fetchInProgress = true;
805+
this.#logger.debug(`Attempting to fetch ${this.#messageCache.maxSize} messages to the message cache`, this.#createConsumerBindingMessageMetadata());
785806
return new Promise((resolve, reject) => {
786807
this.#internalClient.consume(this.#messageCache.maxSize, (err, messages) => {
787808
this.#fetchInProgress = false;
@@ -828,6 +849,7 @@ class Consumer {
828849
}
829850

830851
this.#fetchInProgress = true;
852+
this.#logger.debug(`Attempting to fetch ${this.#messageCache.maxSize} messages to the message cache`, this.#createConsumerBindingMessageMetadata());
831853
return new Promise((resolve, reject) => {
832854
this.#internalClient.consume(this.#messageCache.maxSize, (err, messages) => {
833855
this.#fetchInProgress = false;
@@ -968,6 +990,7 @@ class Consumer {
968990
});
969991

970992
this.#storedSubscriptions = subscription.replace ? topics : this.#storedSubscriptions.concat(topics);
993+
this.#logger.debug(`${subscription.replace ? 'Replacing' : 'Adding'} topics [${topics.join(', ')}] to subscription`, this.#createConsumerBindingMessageMetadata());
971994
this.#internalClient.subscribe(this.#storedSubscriptions);
972995
}
973996

@@ -1043,12 +1066,12 @@ class Consumer {
10431066
* However, we don't do this inside the catch, but just outside it. This is because throwing an
10441067
* error is not the only case where we might want to seek back.
10451068
*
1046-
* So - do nothing but a debug log, but at this point eachMessageProcessed is false.
1069+
* So - do nothing but a log, but at this point eachMessageProcessed is false.
1070+
* TODO: log error only if error type is not KafkaJSError and if no pause() has been called, else log debug.
10471071
*/
1048-
this.#logger.debug(`Consumer encountered error while processing message. Error details: ${e}: ${e.stack}. The same message may be reprocessed.`);
1049-
1050-
/* TODO: log error if error type is not KafkaJSError and if no pause() has been called */
1051-
this.#logger.error(`Consumer encountered error while processing message. Error details: ${e}: ${e.stack}. The same message may be reprocessed.`);
1072+
this.#logger.error(
1073+
`Consumer encountered error while processing message. Error details: ${e}: ${e.stack}. The same message may be reprocessed.`,
1074+
this.#createConsumerBindingMessageMetadata());
10521075
}
10531076

10541077
/* If the message is unprocessed, due to an error, or because the user has not resolved it, we seek back. */
@@ -1067,8 +1090,7 @@ class Consumer {
10671090
this.#lastConsumedOffsets.set(partitionKey(m), Number(m.offset) + 1);
10681091
} catch (e) {
10691092
/* Not much we can do, except log the error. */
1070-
if (this.#logger)
1071-
this.#logger.error(`Consumer encountered error while storing offset. Error details: ${JSON.stringify(e)}`);
1093+
this.#logger.error(`Consumer encountered error while storing offset. Error details: ${JSON.stringify(e)}`, this.#createConsumerBindingMessageMetadata());
10721094
}
10731095
}
10741096

@@ -1123,13 +1145,13 @@ class Consumer {
11231145
* if the user has not called `resolveOffset` manually in case of using eachBatch without
11241146
* eachBatchAutoResolve being set.
11251147
*
1126-
* So - do nothing but a debug log, but at this point eachMessageProcessed needs to be false unless
1148+
* So - do nothing but a log, but at this point eachMessageProcessed needs to be false unless
11271149
* the user has explicitly marked it as true.
1150+
* TODO: log error only if error type is not KafkaJSError and if no pause() has been called, else log debug.
11281151
*/
1129-
this.#logger.debug(`Consumer encountered error while processing message. Error details: ${e}: ${e.stack}. The same message may be reprocessed.`);
1130-
1131-
/* TODO: log error if error type is not KafkaJSError and if no pause() has been called */
1132-
this.#logger.error(`Consumer encountered error while processing message. Error details: ${e}: ${e.stack}. The same message may be reprocessed.`);
1152+
this.#logger.error(
1153+
`Consumer encountered error while processing message. Error details: ${e}: ${e.stack}. The same message may be reprocessed.`,
1154+
this.#createConsumerBindingMessageMetadata());
11331155

11341156
/* The value of eachBatchAutoResolve is not important. The only place where a message is marked processed
11351157
* despite an error is if the user says so, and the user can use resolveOffset for both the possible
@@ -1177,6 +1199,7 @@ class Consumer {
11771199
/* Invalidate the message cache if needed */
11781200
const locallyStale = this.#messageCache.popLocallyStale();
11791201
if (this.#messageCache.isStale()) {
1202+
this.#logger.debug("Scheduling worker termination", this.#createConsumerBindingMessageMetadata());
11801203
this.#workerTerminationScheduled = true;
11811204
break;
11821205
} else if (locallyStale.length !== 0) {
@@ -1191,7 +1214,7 @@ class Consumer {
11911214
/* Since this error cannot be exposed to the user in the current situation, just log and retry.
11921215
* This is due to restartOnFailure being set to always true. */
11931216
if (this.#logger)
1194-
this.#logger.error(`Consumer encountered error while consuming. Retrying. Error details: ${e} : ${e.stack}`);
1217+
this.#logger.error(`Consumer encountered error while consuming. Retrying. Error details: ${e} : ${e.stack}`, this.#createConsumerBindingMessageMetadata());
11951218
});
11961219

11971220
nextIdx = -1;
@@ -1229,14 +1252,15 @@ class Consumer {
12291252
while (!this.#disconnectStarted) {
12301253
this.#workerTerminationScheduled = false;
12311254
const workersToSpawn = Math.max(1, Math.min(this.#concurrency, this.#partitionCount));
1255+
this.#logger.debug(`Spawning ${workersToSpawn} workers`, this.#createConsumerBindingMessageMetadata());
12321256
this.#workers =
12331257
Array(workersToSpawn)
12341258
.fill()
12351259
.map((_, i) =>
12361260
this.#worker(config, perMessageProcessor.bind(this), fetcher.bind(this))
12371261
.catch(e => {
12381262
if (this.#logger)
1239-
this.#logger.error(`Worker ${i} encountered an error: ${e}:${e.stack}`);
1263+
this.#logger.error(`Worker ${i} encountered an error: ${e}:${e.stack}`, this.#createConsumerBindingMessageMetadata());
12401264
}));
12411265

12421266
/* Best we can do is log errors on worker issues - handled by the catch block above. */
@@ -1427,7 +1451,10 @@ class Consumer {
14271451
/* It's assumed that topicPartition is already assigned, and thus can be seeked to and committed to.
14281452
* Errors are logged to detect bugs in the internal code. */
14291453
/* TODO: is it worth awaiting seeks to finish? */
1430-
this.#internalClient.seek(topicPartitionOffset, 0, err => err ? this.#logger.error(err) : null);
1454+
this.#internalClient.seek(topicPartitionOffset, 0, err => {
1455+
if (err)
1456+
this.#logger.error(`Error while calling seek from within seekInternal: ${err}`, this.#createConsumerBindingMessageMetadata());
1457+
});
14311458
offsetsToCommit.push({
14321459
topic: topicPartition.topic,
14331460
partition: topicPartition.partition,
@@ -1539,6 +1566,8 @@ class Consumer {
15391566
throw new error.KafkaJSError('Pause can only be called while connected.', { code: error.ErrorCodes.ERR__STATE });
15401567
}
15411568

1569+
this.#logger.debug(`Pausing ${topics.length} topics`, this.#createConsumerBindingMessageMetadata());
1570+
15421571
const toppars = [];
15431572
for (let topic of topics) {
15441573
if (typeof topic.topic !== 'string') {
@@ -1612,6 +1641,8 @@ class Consumer {
16121641
throw new error.KafkaJSError('Resume can only be called while connected.', { code: error.ErrorCodes.ERR__STATE });
16131642
}
16141643

1644+
this.#logger.debug(`Resuming ${topics.length} topics`, this.#createConsumerBindingMessageMetadata());
1645+
16151646
const toppars = [];
16161647
for (let topic of topics) {
16171648
if (typeof topic.topic !== 'string') {
@@ -1677,6 +1708,8 @@ class Consumer {
16771708

16781709
this.#disconnectStarted = true;
16791710
this.#workerTerminationScheduled = true;
1711+
1712+
this.#logger.debug("Signalling disconnection attempt to workers", this.#createConsumerBindingMessageMetadata());
16801713
while (!(await acquireOrLog(this.#lock, this.#logger))); /* Just retry... */
16811714

16821715
this.#state = ConsumerState.DISCONNECTING;
@@ -1691,6 +1724,7 @@ class Consumer {
16911724
return;
16921725
}
16931726
this.#state = ConsumerState.DISCONNECTED;
1727+
this.#logger.info("Consumer disconnected", this.#createConsumerBindingMessageMetadata());
16941728
resolve();
16951729
};
16961730
this.#internalClient.disconnect(cb);

0 commit comments

Comments
 (0)