@@ -97,11 +97,11 @@ function createReplacementErrorMessage(cOrP, fnCall, property, propertyVal, repl
97
97
}
98
98
return `'${ property } ' is not supported as a property to '${ fnCall } ', but must be passed to the ${ cOrP } during creation.\n` +
99
99
`Before: \n` +
100
- `\tconst ${ cOrP } = kafka.${ cOrP } ({ ... });\n` +
100
+ `\tconst ${ cOrP } = kafka.${ cOrP } ({ kafkaJs: { ... }, });\n` +
101
101
`\tawait ${ cOrP } .connect();\n` +
102
102
`\t${ cOrP } .${ fnCall } ({ ${ propertyVal } , ... });\n` +
103
103
`After: \n` +
104
- `\tconst ${ cOrP } = kafka.${ cOrP } ({ ${ replacementVal } , ... });\n` +
104
+ `\tconst ${ cOrP } = kafka.${ cOrP } ({ kafkaJs: { ${ replacementVal } , ... }, });\n` +
105
105
`\tawait ${ cOrP } .connect();\n` +
106
106
`\t${ cOrP } .${ fnCall } ({ ... });\n` +
107
107
( isLK ? `For more details on what can be used inside the rdKafka block, see https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md\n` : '' ) ;
@@ -179,24 +179,28 @@ const CompatibilityErrorMessages = Object.freeze({
179
179
/**
180
180
* Converts the common configuration from KafkaJS to a format that can be used by node-rdkafka.
181
181
* @param {object } config
182
- * @returns {{globalConfig: import(" ../../types/config").ConsumerGlobalConfig|import("../../types/config").ProducerTopicConfig, topicConfig: import(" ../../types/config").ConsumerTopicConfig|import("../../types/config").ProducerTopicConfig} }
182
+ * @returns {import(' ../../types/config').ProducerGlobalConfig | import(' ../../types/config').ConsumerGlobalConfig } the converted configuration
183
183
* @throws {error.KafkaJSError } if the configuration is invalid.
184
184
* The error code will be ERR__INVALID_ARG in case of invalid arguments or features that are not supported.
185
185
* The error code will be ERR__NOT_IMPLEMENTED in case of features that are not yet implemented.
186
186
*/
187
- async function kafkaJSToRdKafkaConfig ( config ) {
188
- const globalConfig = { } ;
189
- const topicConfig = { } ;
190
-
191
- if ( ! Array . isArray ( config [ "brokers" ] ) ) {
192
- throw new error . KafkaJSError ( CompatibilityErrorMessages . brokerString ( ) , {
193
- code : error . ErrorCodes . ERR__INVALID_ARG ,
194
- } ) ;
187
+ function kafkaJSToRdKafkaConfig ( config ) {
188
+ /* Since the kafkaJs block is specified, we operate in
189
+ * kafkaJs compatibility mode. That means we change the defaults
190
+ * match the kafkaJs defaults. */
191
+ const rdkafkaConfig = { } ;
192
+
193
+ if ( Object . hasOwn ( config , "brokers" ) ) {
194
+ if ( ! Array . isArray ( config [ "brokers" ] ) ) {
195
+ throw new error . KafkaJSError ( CompatibilityErrorMessages . brokerString ( ) , {
196
+ code : error . ErrorCodes . ERR__INVALID_ARG ,
197
+ } ) ;
198
+ }
199
+ rdkafkaConfig [ "bootstrap.servers" ] = config [ "brokers" ] . join ( "," ) ;
195
200
}
196
- globalConfig [ "bootstrap.servers" ] = config [ "brokers" ] . join ( "," ) ;
197
201
198
202
if ( Object . hasOwn ( config , "clientId" ) ) {
199
- globalConfig [ "client.id" ] = config . clientId ;
203
+ rdkafkaConfig [ "client.id" ] = config . clientId ;
200
204
}
201
205
202
206
let withSASL = false ;
@@ -224,18 +228,18 @@ async function kafkaJSToRdKafkaConfig(config) {
224
228
} ) ;
225
229
}
226
230
227
- globalConfig [ "sasl.mechanism" ] = mechanism ;
228
- globalConfig [ "sasl.username" ] = sasl . username ;
229
- globalConfig [ "sasl.password" ] = sasl . password ;
231
+ rdkafkaConfig [ "sasl.mechanism" ] = mechanism ;
232
+ rdkafkaConfig [ "sasl.username" ] = sasl . username ;
233
+ rdkafkaConfig [ "sasl.password" ] = sasl . password ;
230
234
withSASL = true ;
231
235
}
232
236
233
237
if ( Object . hasOwn ( config , "ssl" ) && config . ssl && withSASL ) {
234
- globalConfig [ "security.protocol" ] = "sasl_ssl" ;
238
+ rdkafkaConfig [ "security.protocol" ] = "sasl_ssl" ;
235
239
} else if ( withSASL ) {
236
- globalConfig [ "security.protocol" ] = "sasl_plaintext" ;
240
+ rdkafkaConfig [ "security.protocol" ] = "sasl_plaintext" ;
237
241
} else if ( Object . hasOwn ( config , "ssl" ) && config . ssl ) {
238
- globalConfig [ "security.protocol" ] = "ssl" ;
242
+ rdkafkaConfig [ "security.protocol" ] = "ssl" ;
239
243
}
240
244
241
245
/* TODO: add best-effort support for ssl besides just true/false */
@@ -246,14 +250,14 @@ async function kafkaJSToRdKafkaConfig(config) {
246
250
}
247
251
248
252
if ( Object . hasOwn ( config , "requestTimeout" ) ) {
249
- globalConfig [ "socket.timeout.ms" ] = config . requestTimeout ;
253
+ rdkafkaConfig [ "socket.timeout.ms" ] = config . requestTimeout ;
250
254
} else {
251
255
/* KafkaJS default */
252
- globalConfig [ "socket.timeout.ms" ] = 30000 ;
256
+ rdkafkaConfig [ "socket.timeout.ms" ] = 30000 ;
253
257
}
254
258
255
259
if ( Object . hasOwn ( config , "enforceRequestTimeout" ) && ! config . enforceRequestTimeout ) {
256
- globalConfig [ "socket.timeout.ms" ] = 300000 ;
260
+ rdkafkaConfig [ "socket.timeout.ms" ] = 300000 ;
257
261
}
258
262
259
263
const connectionTimeout = config . connectionTimeout ?? 1000 ;
@@ -262,14 +266,14 @@ async function kafkaJSToRdKafkaConfig(config) {
262
266
263
267
/* The minimum value for socket.connection.setup.timeout.ms is 1000. */
264
268
totalConnectionTimeout = Math . max ( totalConnectionTimeout , 1000 ) ;
265
- globalConfig [ "socket.connection.setup.timeout.ms" ] = totalConnectionTimeout ;
269
+ rdkafkaConfig [ "socket.connection.setup.timeout.ms" ] = totalConnectionTimeout ;
266
270
267
271
const retry = config . retry ?? { } ;
268
272
const { maxRetryTime, initialRetryTime, factor, multiplier, retries, restartOnFailure } = retry ;
269
273
270
- globalConfig [ "retry.backoff.max.ms" ] = maxRetryTime ?? 30000 ;
271
- globalConfig [ "retry.backoff.ms" ] = initialRetryTime ?? 300 ;
272
- globalConfig [ "retries" ] = retries ?? 5 ;
274
+ rdkafkaConfig [ "retry.backoff.max.ms" ] = maxRetryTime ?? 30000 ;
275
+ rdkafkaConfig [ "retry.backoff.ms" ] = initialRetryTime ?? 300 ;
276
+ rdkafkaConfig [ "retries" ] = retries ?? 5 ;
273
277
274
278
if ( ( typeof factor === 'number' ) || ( typeof multiplier === 'number' ) ) {
275
279
throw new error . KafkaJSError ( CompatibilityErrorMessages . retryFactorMultiplier ( ) , {
@@ -295,6 +299,7 @@ async function kafkaJSToRdKafkaConfig(config) {
295
299
} ) ;
296
300
}
297
301
302
+ rdkafkaConfig [ "log_level" ] = 6 /* LOG_INFO - default in KafkaJS compatibility mode. */ ;
298
303
if ( Object . hasOwn ( config , "logLevel" ) ) {
299
304
let setLevel = config . logLevel ;
300
305
@@ -303,19 +308,20 @@ async function kafkaJSToRdKafkaConfig(config) {
303
308
}
304
309
switch ( setLevel ) {
305
310
case logLevel . NOTHING :
306
- globalConfig [ "log_level" ] = 0 ; /* LOG_EMERG - we don't have a true log nothing yet */
311
+ rdkafkaConfig [ "log_level" ] = 0 ; /* LOG_EMERG - we don't have a true log nothing yet */
307
312
break ;
308
313
case logLevel . ERROR :
309
- globalConfig [ "log_level" ] = 3 /* LOG_ERR */ ;
314
+ rdkafkaConfig [ "log_level" ] = 3 /* LOG_ERR */ ;
310
315
break ;
311
316
case logLevel . WARN :
312
- globalConfig [ "log_level" ] = 4 /* LOG_WARNING */ ;
317
+ rdkafkaConfig [ "log_level" ] = 4 /* LOG_WARNING */ ;
313
318
break ;
314
319
case logLevel . INFO :
315
- globalConfig [ "log_level" ] = 6 /* LOG_INFO */ ;
320
+ rdkafkaConfig [ "log_level" ] = 6 /* LOG_INFO */ ;
316
321
break ;
317
322
case logLevel . DEBUG :
318
- globalConfig [ "debug" ] = "all" /* this will set librdkafka log_level to 7 */ ;
323
+ rdkafkaConfig [ "debug" ] = "all" /* Turn on debug logs for everything, otherwise this log level is not useful*/ ;
324
+ rdkafkaConfig [ "log_level" ] = 7 /* LOG_DEBUG */ ;
319
325
break ;
320
326
default :
321
327
throw new error . KafkaJSError ( CompatibilityErrorMessages . logLevelName ( setLevel ) , {
@@ -324,20 +330,7 @@ async function kafkaJSToRdKafkaConfig(config) {
324
330
}
325
331
}
326
332
327
- if ( config . rdKafka ) {
328
- if ( config . rdKafka . constructor === Function ) {
329
- await config . rdKafka ( globalConfig , topicConfig ) ;
330
- } else {
331
- Object . assign ( globalConfig , config . rdKafka . globalConfig ) ;
332
- Object . assign ( topicConfig , config . rdKafka . topicConfig ) ;
333
- }
334
- }
335
-
336
-
337
- if ( ! Object . hasOwn ( globalConfig , 'log_level' ) )
338
- globalConfig [ 'log_level' ] = Object . hasOwn ( globalConfig , 'debug' ) ? 7 /* LOG_DEBUG */ : 6 /* LOG_INFO */ ;
339
-
340
- return { globalConfig, topicConfig } ;
333
+ return rdkafkaConfig ;
341
334
}
342
335
343
336
function checkAllowedKeys ( allowedKeysSpecific , config ) {
@@ -421,7 +414,6 @@ function createKafkaJsErrorFromLibRdKafkaError(librdKafkaError) {
421
414
err = new error . KafkaJSError ( librdKafkaError , properties ) ;
422
415
}
423
416
424
- console . log ( "Converted err = " + JSON . stringify ( err , null , 2 ) + " librdkafka erro = " + JSON . stringify ( librdKafkaError , null , 2 ) ) ;
425
417
return err ;
426
418
}
427
419
0 commit comments