@@ -7,6 +7,7 @@ import datadog.trace.bootstrap.instrumentation.api.Tags
77import datadog.trace.common.writer.ListWriter
88import datadog.trace.core.DDSpan
99import datadog.trace.core.datastreams.StatsGroup
10+ import datadog.trace.test.util.Flaky
1011import org.apache.kafka.clients.consumer.ConsumerConfig
1112import org.apache.kafka.clients.consumer.ConsumerRecord
1213import org.apache.kafka.clients.consumer.KafkaConsumer
@@ -150,6 +151,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
150151 return true
151152 }
152153
154+ @Flaky
153155 def " test kafka produce and consume" () {
154156 setup :
155157 // Create and start a Kafka container using Testcontainers
@@ -292,6 +294,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
292294 kafkaContainer. stop()
293295 }
294296
297+ @Flaky
295298 def " test producing message too large" () {
296299 setup :
297300 // set a low max request size, so that we can crash it
@@ -314,6 +317,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
314317 producer. close()
315318 }
316319
320+ @Flaky
317321 def " test spring kafka template produce and consume" () {
318322 setup :
319323 KafkaContainer kafkaContainer = new KafkaContainer (DockerImageName . parse(" confluentinc/cp-kafka:latest" )). withEmbeddedZookeeper(). withEnv(" KAFKA_CREATE_TOPICS" , SHARED_TOPIC )
@@ -457,6 +461,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
457461 kafkaContainer. stop()
458462 }
459463
464+ @Flaky
460465 def " test pass through tombstone" () {
461466 setup :
462467 KafkaContainer kafkaContainer = new KafkaContainer (DockerImageName . parse(" confluentinc/cp-kafka:latest" )). withEmbeddedZookeeper(). withEnv(" KAFKA_CREATE_TOPICS" , SHARED_TOPIC )
@@ -528,6 +533,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
528533
529534 }
530535
536+ @Flaky
531537 def "test records (TopicPartition ) kafka consume" () {
532538 setup:
533539 KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse(" confluentinc/ cp- kafka :latest" )).withEmbeddedZookeeper().withEnv(" KAFKA_CREATE_TOPICS " , SHARED_TOPIC)
@@ -588,6 +594,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
588594
589595 }
590596
597+ @Flaky
591598 def " test records(TopicPartition ). subList kafka consume" () {
592599 setup:
593600
@@ -650,6 +657,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
650657
651658 }
652659
660+ @Flaky
653661 def " test records(TopicPartition ). forEach kafka consume" () {
654662 setup:
655663 KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse(" confluentinc/ cp- kafka :latest" )).withEmbeddedZookeeper().withEnv(" KAFKA_CREATE_TOPICS " , SHARED_TOPIC)
@@ -711,6 +719,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
711719
712720 }
713721
722+ @Flaky
714723 def " test iteration backwards over ConsumerRecords " () {
715724 setup:
716725 KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse(" confluentinc/ cp- kafka :latest" )).withEmbeddedZookeeper().withEnv(" KAFKA_CREATE_TOPICS " , SHARED_TOPIC)
@@ -825,6 +834,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase {
825834
826835 }
827836
837+ @Flaky
828838 def " test kafka client header propagation manual config" () {
829839 setup:
830840 KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse(" confluentinc/ cp- kafka :latest" )).withEmbeddedZookeeper().withEnv(" KAFKA_CREATE_TOPICS " , SHARED_TOPIC)
0 commit comments