@@ -5,7 +5,7 @@ use clap::Parser;
55use crate :: {
66 data_storage:: AvailableDataStorages ,
77 info_storage:: AvailableInfoStorages ,
8- notifiers:: { Format , Hook } ,
8+ notifiers:: { impls :: kafka_notifier :: ExtraKafkaOptions , Format , Hook } ,
99 protocol:: extensions:: Extensions ,
1010} ;
1111
@@ -284,6 +284,91 @@ pub struct AMQPHooksOptions {
284284 pub auto_delete : bool ,
285285}
286286
287+ #[ derive( Parser , Debug , Clone ) ]
288+ pub struct KafkaHookOptions {
289+ /// Kafka urls.
290+ /// List of brokers to connect to in the format `host:port`.
291+ /// If you have multiple brokers, separate them with commas.
292+ /// Corresponds to `bootstrap.servers` in Kafka configuration.
293+ #[ arg(
294+ name = "hooks-kafka-urls" ,
295+ long,
296+ env = "RUSTUS_HOOKS_KAFKA_URLS" ,
297+ use_value_delimiter = true
298+ ) ]
299+ pub urls : String ,
300+ /// Kafka producer client.id.
301+ #[ arg(
302+ name = "hooks-kafka-client-id" ,
303+ long,
304+ env = "RUSTUS_HOOKS_KAFKA_CLIENT_ID"
305+ ) ]
306+ pub client_id : Option < String > ,
307+ /// Kafka topic. If specified, all events will be sent to this topic.
308+ #[ arg( name = "hooks-kafka-topic" , long, env = "RUSTUS_HOOKS_KAFKA_TOPIC" ) ]
309+ pub topic : Option < String > ,
310+ /// Kafka topic prefix. In case if specifeid, prefix will be added to all topics
311+ /// and all events will be sent to different topics.
312+ #[ arg( name = "hooks-kafka-prefix" , long, env = "RUSTUS_HOOKS_KAFKA_PREFIX" ) ]
313+ pub prefix : Option < String > ,
314+ /// Kafka required acks.
315+ /// This parameter is used to configure how many replicas
316+ /// must acknowledge the message.
317+ ///
318+ /// Corresponds to `request.required.acks` in Kafka configuration.
319+ /// Possible values are:
320+ /// * -1 - all replicas must acknowledge the message;
321+ /// * 0 - no replicas must acknowledge the message;
322+ /// * ...1000 - number of replicas that must acknowledge the message.
323+ #[ arg(
324+ name = "hooks-kafka-required-acks" ,
325+ long,
326+ env = "RUSTUS_HOOKS_KAFKA_REQUIRED_ACKS"
327+ ) ]
328+ pub required_acks : Option < String > ,
329+
330+ /// Compression codec.
331+ /// This parameter is used to compress messages before sending them to Kafka.
332+ /// Possible values are:
333+ /// * none - no compression;
334+ /// * gzip - gzip compression;
335+ /// * snappy - snappy compression.
336+ /// * lz4 - lz4 compression.
337+ /// * zstd - zstd compression.
338+ /// Corresponds to `compression.codec` in Kafka configuration.
339+ #[ arg(
340+ name = "hooks-kafka-compression" ,
341+ long,
342+ env = "RUSTUS_HOOKS_KAFKA_COMPRESSION"
343+ ) ]
344+ pub compression : Option < String > ,
345+
346+ /// Kafka idle timeout in seconds.
347+ /// After this amount of time in seconds, the connection will be dropped.
348+ /// Corresponds to `connections.max.idle.ms` in Kafka configuration.
349+ #[ arg(
350+ name = "hooks-kafka-idle-timeout" ,
351+ long,
352+ env = "RUSTUS_HOOKS_KAFKA_IDLE_TIMEOUT"
353+ ) ]
354+ pub idle_timeout : Option < u64 > ,
355+
356+ /// Kafka send timeout in seconds.
357+ /// After this amount of time in seconds, the message will be dropped.
358+ #[ arg(
359+ name = "hooks-kafka-send-timeout" ,
360+ long,
361+ env = "RUSTUS_HOOKS_KAFKA_SEND_TIMEOUT"
362+ ) ]
363+ pub send_timeout : Option < u64 > ,
364+
365+ /// Extra options for Kafka.
366+ /// This parameter is used to pass additional options to Kafka.
367+ /// All options must be in the format `key=value`, separated by semicolon.
368+ /// Example: `key1=value1;key2=value2`.
369+ pub extra_kafka_opts : Option < ExtraKafkaOptions > ,
370+ }
371+
287372#[ derive( Parser , Debug , Clone ) ]
288373#[ allow( clippy:: struct_excessive_bools) ]
289374pub struct NotificationsOptions {
@@ -336,6 +421,9 @@ pub struct NotificationsOptions {
336421
337422 #[ command( flatten) ]
338423 pub amqp_hook_opts : AMQPHooksOptions ,
424+
425+ #[ command( flatten) ]
426+ pub kafka_hook_opts : KafkaHookOptions ,
339427}
340428
341429#[ derive( Debug , Parser , Clone ) ]
0 commit comments