3939import org .testcontainers .utility .DockerImageName ;
4040
4141public class KafkaReadIT {
42- private static final String TOPIC_NAME = "topic-" + UUID .randomUUID ();
42+ private static final String [] TOPIC_NAMES = {
43+ "topic-" + UUID .randomUUID (),
44+ "topic-" + UUID .randomUUID ()
45+ };
4346
44- private static final String OUTPUT_FILE_NAME_PREFIX = UUID . randomUUID (). toString ();
45- private static final String OUTPUT_FILE_NAME = OUTPUT_FILE_NAME_PREFIX + "-00000-of-00001.txt" ;
47+ // The TextIO connector appends this suffix to the pipeline output file.
48+ private static final String OUTPUT_FILE_SUFFIX = "-00000-of-00001.txt" ;
4649
4750 private static KafkaContainer kafka ;
4851 private static String bootstrapServer ;
@@ -54,40 +57,61 @@ public void setUp() throws ExecutionException, InterruptedException {
5457 kafka .start ();
5558 bootstrapServer = kafka .getBootstrapServers ();
5659
57- // Create a topic .
60+ // Create topics .
5861 Properties properties = new Properties ();
5962 properties .put ("bootstrap.servers" , bootstrapServer );
6063 AdminClient adminClient = AdminClient .create (properties );
61- var topic = new NewTopic (TOPIC_NAME , 1 , (short ) 1 );
62- adminClient .createTopics (Arrays .asList (topic ));
64+ for (String topicName : TOPIC_NAMES ) {
65+ var topic = new NewTopic (topicName , 1 , (short ) 1 );
66+ adminClient .createTopics (Arrays .asList (topic ));
67+ }
6368
64- // Send a message to the topic .
65- properties .put ("key.serializer" , "org.apache.kafka.common.serialization.StringSerializer " );
69+ // Send messages to the topics .
70+ properties .put ("key.serializer" , "org.apache.kafka.common.serialization.LongSerializer " );
6671 properties .put ("value.serializer" , "org.apache.kafka.common.serialization.StringSerializer" );
67- KafkaProducer <String , String > producer = new KafkaProducer <>(properties );
68- ProducerRecord <String , String > record = new ProducerRecord <>(TOPIC_NAME , "key-0" , "event-0" );
69- Future future = producer .send (record );
70- future .get ();
72+ KafkaProducer <Long , String > producer = new KafkaProducer <>(properties );
73+ for (String topicName : TOPIC_NAMES ) {
74+ var record = new ProducerRecord <>(topicName , 0L , topicName + "-event-0" );
75+ Future future = producer .send (record );
76+ future .get ();
77+ }
7178 }
7279
7380 @ After
7481 public void tearDown () throws IOException {
7582 kafka .stop ();
76- Files .deleteIfExists (Paths .get (OUTPUT_FILE_NAME ));
83+ for (String topicName : TOPIC_NAMES ) {
84+ Files .deleteIfExists (Paths .get (topicName + OUTPUT_FILE_SUFFIX ));
85+ }
7786 }
7887
7988 @ Test
8089 public void testApacheKafkaRead () throws IOException {
8190 PipelineResult .State state = KafkaRead .main (new String [] {
8291 "--runner=DirectRunner" ,
8392 "--bootstrapServer=" + bootstrapServer ,
84- "--topic=" + TOPIC_NAME ,
85- "--outputPath=" + OUTPUT_FILE_NAME_PREFIX
93+ "--topic=" + TOPIC_NAMES [ 0 ] ,
94+ "--outputPath=" + TOPIC_NAMES [ 0 ] // Use the topic name as the output file name.
8695 });
8796 assertEquals (PipelineResult .State .DONE , state );
97+ verifyOutput (TOPIC_NAMES [0 ]);
98+ }
99+
100+ @ Test
101+ public void testApacheKafkaReadTopics () throws IOException {
102+ PipelineResult .State state = KafkaReadTopics .main (new String [] {
103+ "--runner=DirectRunner" ,
104+ "--bootstrapServer=" + bootstrapServer ,
105+ "--topic1=" + TOPIC_NAMES [0 ],
106+ "--topic2=" + TOPIC_NAMES [1 ]
107+ });
108+ assertEquals (PipelineResult .State .DONE , state );
109+ verifyOutput (TOPIC_NAMES [0 ]);
110+ verifyOutput (TOPIC_NAMES [1 ]);
111+ }
88112
89- // Verify the pipeline wrote the output.
90- String output = Files .readString (Paths .get (OUTPUT_FILE_NAME ));
91- assertTrue (output .contains (" event-0" ));
113+ private void verifyOutput ( String topic ) throws IOException {
114+ String output = Files .readString (Paths .get (topic + OUTPUT_FILE_SUFFIX ));
115+ assertTrue (output .contains (topic + "- event-0" ));
92116 }
93117}
0 commit comments