@@ -12,7 +12,7 @@ import org.apache.kafka.common.serialization.ByteArraySerializer
12
12
13
13
import scala .collection .JavaConverters ._
14
14
15
- class ConsumeMarkersActor (clients : KafkaClients , config : KmqConfig ) extends Actor with StrictLogging {
15
+ class ConsumeMarkersActor (clients : KafkaClients , config : KmqConfig , extraConfig : Option [java.util. Map [ String , Object ]] = None ) extends Actor with StrictLogging {
16
16
17
17
private val OneSecond = 1000L
18
18
@@ -26,11 +26,22 @@ class ConsumeMarkersActor(clients: KafkaClients, config: KmqConfig) extends Acto
26
26
private var commitMarkerOffsetsActor : ActorRef = _
27
27
28
28
override def preStart (): Unit = {
29
- markerConsumer = clients.createConsumer(config.getRedeliveryConsumerGroupId,
30
- classOf [MarkerKey .MarkerKeyDeserializer ],
31
- classOf [MarkerValue .MarkerValueDeserializer ])
32
-
33
- producer = clients.createProducer(classOf [ByteArraySerializer ], classOf [ByteArraySerializer ])
29
+ markerConsumer = extraConfig match {
30
+ // extraConfig is not empty
31
+ case Some (cfg) => clients.createConsumer(config.getRedeliveryConsumerGroupId,
32
+ classOf [MarkerKey .MarkerKeyDeserializer ],
33
+ classOf [MarkerValue .MarkerValueDeserializer ], cfg)
34
+ // extraConfig is empty
35
+ case None => clients.createConsumer(config.getRedeliveryConsumerGroupId,
36
+ classOf [MarkerKey .MarkerKeyDeserializer ],
37
+ classOf [MarkerValue .MarkerValueDeserializer ])
38
+ }
39
+ producer = extraConfig match {
40
+ // extraConfig is not empty
41
+ case Some (cfg) => clients.createProducer(classOf [ByteArraySerializer ], classOf [ByteArraySerializer ], cfg)
42
+ // extraConfig is empty
43
+ case None => clients.createProducer(classOf [ByteArraySerializer ], classOf [ByteArraySerializer ])
44
+ }
34
45
35
46
setupMarkerConsumer()
36
47
setupOffsetCommitting()
@@ -62,7 +73,7 @@ class ConsumeMarkersActor(clients: KafkaClients, config: KmqConfig) extends Acto
62
73
63
74
private def partitionAssigned (p : Partition , endOffset : Offset ): Unit = {
64
75
val redeliverActorProps = Props (
65
- new RedeliverActor (p, new RetryingRedeliverer (new DefaultRedeliverer (p, producer, config, clients))))
76
+ new RedeliverActor (p, new RetryingRedeliverer (new DefaultRedeliverer (p, producer, config, clients, extraConfig ))))
66
77
.withDispatcher(" kmq.redeliver-dispatcher" )
67
78
val redeliverActor = context.actorOf(
68
79
redeliverActorProps,
@@ -75,7 +86,7 @@ class ConsumeMarkersActor(clients: KafkaClients, config: KmqConfig) extends Acto
75
86
76
87
private def setupOffsetCommitting (): Unit = {
77
88
commitMarkerOffsetsActor = context.actorOf(
78
- Props (new CommitMarkerOffsetsActor (config.getMarkerTopic, clients)),
89
+ Props (new CommitMarkerOffsetsActor (config.getMarkerTopic, clients, extraConfig )),
79
90
" commit-marker-offsets" )
80
91
81
92
commitMarkerOffsetsActor ! DoCommit
@@ -170,4 +181,4 @@ case object DoCommit
170
181
case class RedeliverMarkers (markers : List [MarkerKey ])
171
182
case object DoRedeliver
172
183
173
- case object DoConsume
184
+ case object DoConsume
0 commit comments