2424
2525package com .bakdata .kafka ;
2626
27- import static com .bakdata .kafka .KafkaContainerHelper .DEFAULT_TOPIC_SETTINGS ;
28- import static com .bakdata .kafka .TestUtil .newKafkaCluster ;
29- import static java .util .Collections .emptyMap ;
27+ import static com .bakdata .kafka .KafkaTest .POLL_TIMEOUT ;
28+ import static com .bakdata .kafka .KafkaTest .newCluster ;
3029import static org .assertj .core .api .Assertions .assertThat ;
3130
32- import com .bakdata .kafka .util . ImprovedAdminClient ;
31+ import com .bakdata .kafka .SenderBuilder . SimpleProducerRecord ;
3332import com .ginsberg .junit .exit .ExpectSystemExitWithStatus ;
3433import java .time .Duration ;
3534import java .util .List ;
3635import java .util .regex .Pattern ;
36+ import org .apache .kafka .clients .consumer .ConsumerConfig ;
3737import org .apache .kafka .clients .consumer .ConsumerRecord ;
38+ import org .apache .kafka .clients .producer .ProducerConfig ;
3839import org .apache .kafka .common .serialization .Serdes ;
3940import org .apache .kafka .common .serialization .Serdes .StringSerde ;
40- import org .apache .kafka .streams .KeyValue ;
41+ import org .apache .kafka .common .serialization .StringDeserializer ;
42+ import org .apache .kafka .common .serialization .StringSerializer ;
4143import org .apache .kafka .streams .kstream .Consumed ;
4244import org .junit .jupiter .api .Test ;
4345import org .testcontainers .kafka .KafkaContainer ;
@@ -214,7 +216,7 @@ public SerdeConfig defaultSerializationConfig() {
214216 @ ExpectSystemExitWithStatus (1 )
215217 void shouldExitWithErrorInTopology () throws InterruptedException {
216218 final String input = "input" ;
217- try (final KafkaContainer kafkaCluster = newKafkaCluster ();
219+ try (final KafkaContainer kafkaCluster = newCluster ();
218220 final KafkaStreamsApplication <?> app = new SimpleKafkaStreamsApplication <>(() -> new StreamsApp () {
219221 @ Override
220222 public void buildTopology (final TopologyBuilder builder ) {
@@ -240,8 +242,12 @@ public SerdeConfig defaultSerializationConfig() {
240242 "--bootstrap-server" , kafkaCluster .getBootstrapServers (),
241243 "--input-topics" , input
242244 );
243- new KafkaContainerHelper (kafkaCluster ).send ()
244- .to (input , List .of (new KeyValue <>("foo" , "bar" )));
245+ new KafkaTestClient (KafkaEndpointConfig .builder ()
246+ .bootstrapServers (kafkaCluster .getBootstrapServers ())
247+ .build ()).send ()
248+ .with (ProducerConfig .KEY_SERIALIZER_CLASS_CONFIG , StringSerializer .class )
249+ .with (ProducerConfig .VALUE_SERIALIZER_CLASS_CONFIG , StringSerializer .class )
250+ .to (input , List .of (new SimpleProducerRecord <>("foo" , "bar" )));
245251 Thread .sleep (Duration .ofSeconds (10 ).toMillis ());
246252 }
247253 }
@@ -251,7 +257,7 @@ public SerdeConfig defaultSerializationConfig() {
251257 void shouldExitWithSuccessCodeOnShutdown () {
252258 final String input = "input" ;
253259 final String output = "output" ;
254- try (final KafkaContainer kafkaCluster = newKafkaCluster ();
260+ try (final KafkaContainer kafkaCluster = newCluster ();
255261 final KafkaStreamsApplication <?> app = new SimpleKafkaStreamsApplication <>(() -> new StreamsApp () {
256262 @ Override
257263 public void buildTopology (final TopologyBuilder builder ) {
@@ -270,20 +276,24 @@ public SerdeConfig defaultSerializationConfig() {
270276 }
271277 })) {
272278 kafkaCluster .start ();
273- final KafkaContainerHelper kafkaContainerHelper = new KafkaContainerHelper ( kafkaCluster );
274- try ( final ImprovedAdminClient admin = kafkaContainerHelper . admin ()) {
275- admin . getTopicClient (). createTopic ( output , DEFAULT_TOPIC_SETTINGS , emptyMap ());
276- }
279+ final KafkaTestClient testClient = new KafkaTestClient ( KafkaEndpointConfig . builder ()
280+ . bootstrapServers ( kafkaCluster . getBootstrapServers ())
281+ . build ());
282+ testClient . createTopic ( output );
277283
278284 runApp (app ,
279285 "--bootstrap-server" , kafkaCluster .getBootstrapServers (),
280286 "--input-topics" , input ,
281287 "--output-topic" , output
282288 );
283- kafkaContainerHelper .send ()
284- .to (input , List .of (new KeyValue <>("foo" , "bar" )));
285- final List <ConsumerRecord <String , String >> keyValues = kafkaContainerHelper .read ()
286- .from (output , Duration .ofSeconds (10 ));
289+ testClient .send ()
290+ .with (ProducerConfig .KEY_SERIALIZER_CLASS_CONFIG , StringSerializer .class )
291+ .with (ProducerConfig .VALUE_SERIALIZER_CLASS_CONFIG , StringSerializer .class )
292+ .to (input , List .of (new SimpleProducerRecord <>("foo" , "bar" )));
293+ final List <ConsumerRecord <String , String >> keyValues = testClient .read ()
294+ .with (ConsumerConfig .KEY_DESERIALIZER_CLASS_CONFIG , StringDeserializer .class )
295+ .with (ConsumerConfig .VALUE_DESERIALIZER_CLASS_CONFIG , StringDeserializer .class )
296+ .from (output , POLL_TIMEOUT );
287297 assertThat (keyValues )
288298 .hasSize (1 )
289299 .anySatisfy (kv -> {
0 commit comments