From b84dec93f120c045f76831cca2dc7aec978bf725 Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Fri, 29 Aug 2025 19:59:35 +0500 Subject: [PATCH 01/13] add task functions to kotlin module --- .../kotlin/test/controllers/TimeController.kt | 7 +- .../test/service/KafkaReadingService.kt | 76 ++++++++++++++++++- .../test/service/KafkaSendingService.kt | 10 ++- .../src/main/resources/application.yml | 8 ++ 4 files changed, 98 insertions(+), 3 deletions(-) diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt index a23d5cb7..df3b8b3f 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt @@ -33,9 +33,14 @@ class TimeController( logger.info { "Called method getNow. TraceId = $traceId" } val nowFromRemote = publicApiService.getZonedTime() val now = nowFromRemote ?: LocalDateTime.now(clock) - kafkaSendingService.sendNotification("Current time = $now") + val message = "Current time = $now" + kafkaSendingService.sendNotification(message) .thenRun { logger.info { "Awaiting acknowledgement from Kafka" } } .get() + + kafkaSendingService.sendNotificationToOtherTopic(message) + .thenRun { logger.info { "Awaiting acknowledgement from Kafka with batch" } } + .get() return now } } diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt index 05c87676..bf232822 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt @@ -9,7 +9,10 @@ package io.github.mfvanek.spring.boot3.kotlin.test.service import io.github.oshai.kotlinlogging.KotlinLogging import io.github.oshai.kotlinlogging.withLoggingContext +import io.micrometer.tracing.Link import io.micrometer.tracing.Tracer +import io.micrometer.tracing.propagation.Propagator +import io.opentelemetry.context.propagation.TextMapGetter import org.apache.kafka.clients.consumer.ConsumerRecord import org.springframework.beans.factory.annotation.Value import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate @@ -27,7 +30,8 @@ class KafkaReadingService( @Value("\${app.tenant.name}") private val tenantName: String, private val tracer: Tracer, private val clock: Clock, - private val jdbcTemplate: NamedParameterJdbcTemplate + private val jdbcTemplate: NamedParameterJdbcTemplate, + private val propagator: Propagator ) { @KafkaListener(topics = ["\${spring.kafka.template.default-topic}"]) fun listen(message: ConsumerRecord, ack: Acknowledgment) { @@ -50,4 +54,74 @@ class KafkaReadingService( ) ) } + + @KafkaListener( + id = "\${spring.kafka.opentelemetry.additional-consumer-groupId}", + topics = ["\${spring.kafka.opentelemetry.additional-topic}"], + batch = "true" + ) + fun listenAdditional(records: List>, ack: Acknowledgment) { + // По умолчанию здесь не будет контекста трассировки. Вся обработка, связанная с трассировкой, должна быть выполнена вручную. + // Есть несколько вариантов: + // 1) создать новый спан и контекст трассировки для всех последующих операций; + // 2) брать контекст трассировки из каждой записи и использовать его на время обработки этой записи; + // 3) взять контекст трассировки из первой записи и использовать его для всех последующих операций. + // Рекомендуется использовать комбинацию вариантов 1 и 2. + + // Реализация варианта №1: вручную создать спан, что приведет к созданию контекста трассировки. + // Этого можно и не делать, но тогда логирование ниже и все последующие операции будут без traceId. + val batchSpan = tracer.startScopedSpan("batch-processing") + // val batchSpan = tracer.spanBuilder() + // .setParent(tracer.traceContextBuilder().build()) + // .start() + logger.info { "current span: ${tracer.currentSpan()}" } + try { + logger.info { + "Received from Kafka ${records.size} records" + } // Это сообщение будет в логах со своим собственным traceId, если создан спан выше + records.forEach { record -> + restoreContextAndProcessSingleRecordIfNeed(record, ack) + } + ack.acknowledge() + } catch (e: Throwable) { + batchSpan.error(e) + throw e + } finally { + batchSpan.end() + } + } + + private fun restoreContextAndProcessSingleRecordIfNeed(record: ConsumerRecord, ack: Acknowledgment) { + // Реализация варианта №2. + // Берём заголовок traceparent из записи и восстанавливаем контекст трассировки на основе него. + // В результате цепочка спанов продолжится. Все последующие вызовы пойдут с тем же самым traceId. + // Если в записи из Кафки не будет заголовка traceparent, то будет использоваться текущий контекст трассировки (при его наличии). + val kafkaPropagatorGetter = Propagator.Getter> { carrier, _ -> + carrier.headers().find { it.key() == "traceparent" }?.value()?.decodeToString() + } + + val builder = propagator.extract(record, kafkaPropagatorGetter) + // val spanFromRecord = builder.name("processing-record-from-kafka").start() + val spanFromRecord = builder + .addLink(Link.NOOP) + .setParent(tracer.traceContextBuilder().build()) + .start() + try { + tracer.withSpan(spanFromRecord).use { + processSingleRecordIfNeed(record, ack) + } + } catch (e: Throwable) { + spanFromRecord.error(e) + throw e + } finally { + spanFromRecord.end() + } + } + + private fun processSingleRecordIfNeed(record: ConsumerRecord, ack: Acknowledgment) { + withLoggingContext("tenant.name" to tenantName) { + processMessage(record) + ack.acknowledge() + } + } } diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaSendingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaSendingService.kt index 7e965252..88f4289b 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaSendingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaSendingService.kt @@ -21,7 +21,8 @@ private val logger = KotlinLogging.logger {} @Service class KafkaSendingService( @Value("\${app.tenant.name}") private val tenantName: String, - private val kafkaTemplate: KafkaTemplate + private val kafkaTemplate: KafkaTemplate, + @Value("\${spring.kafka.template.additional-topic}") private val additionalTopic: String, ) { fun sendNotification(message: String): CompletableFuture> { withLoggingContext("tenant.name" to tenantName) { @@ -29,4 +30,11 @@ class KafkaSendingService( return kafkaTemplate.sendDefault(UUID.randomUUID(), message) } } + + fun sendNotificationToOtherTopic(message: String): CompletableFuture> { + withLoggingContext("tenant.name" to tenantName) { + logger.info { "Sending message \"$message\" to $additionalTopic of Kafka" } + return kafkaTemplate.send(additionalTopic, UUID.randomUUID(), message) + } + } } diff --git a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml index 5ffd1157..91d1b97e 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml @@ -33,14 +33,19 @@ spring: template: default-topic: open.telemetry.sb3.queue observation-enabled: true # Important!!! + additional-topic: open.telemetry.sb3.queue.additional producer: key-serializer: org.apache.kafka.common.serialization.UUIDSerializer + properties: + linger.ms: 100 + batch.size: 10000 listener: observation-enabled: true # Important!!! ack-mode: manual_immediate consumer: auto-offset-reset: earliest group-id: ${spring.kafka.template.default-topic}-group + additional-groupId: ${spring.kafka.template.additional-topic}-group client-id: open.telemetry.client bootstrap-servers: localhost:9092 security: @@ -49,6 +54,9 @@ spring: sasl: mechanism: PLAIN jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="${demo.kafka.opentelemetry.username}" password="${demo.kafka.opentelemetry.password}"; + opentelemetry: + additional-topic: open.telemetry.sb3.queue.additional + additional-consumer-groupId: open.telemetry.sb3.queue.additional-group jdbc: template: query-timeout: 1s From eb3073e130243206a0468eb2f5712e409888d8f9 Mon Sep 17 00:00:00 2001 From: Marina Zharinova Date: Sun, 31 Aug 2025 20:38:07 +0500 Subject: [PATCH 02/13] add span column, change unique constraint --- .../db/changelog/db.changelog-master.yaml | 4 +++ .../db/changelog/sql/add_span_column.sql | 7 +++++ .../sql/set_span_and_trace_unique.sql | 7 +++++ .../test/service/KafkaReadingService.kt | 31 +++---------------- .../src/main/resources/application.yml | 1 + 5 files changed, 24 insertions(+), 26 deletions(-) create mode 100644 db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql create mode 100644 db-migrations/src/main/resources/db/changelog/sql/set_span_and_trace_unique.sql diff --git a/db-migrations/src/main/resources/db/changelog/db.changelog-master.yaml b/db-migrations/src/main/resources/db/changelog/db.changelog-master.yaml index 95804e80..cb315ac1 100644 --- a/db-migrations/src/main/resources/db/changelog/db.changelog-master.yaml +++ b/db-migrations/src/main/resources/db/changelog/db.changelog-master.yaml @@ -3,3 +3,7 @@ databaseChangeLog: file: db/changelog/sql/schema.sql - include: file: db/changelog/sql/storage.sql + - include: + file: db/changelog/sql/add_span_column.sql + - include: + file: db/changelog/sql/set_span_and_trace_unique.sql diff --git a/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql b/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql new file mode 100644 index 00000000..b83dfa2d --- /dev/null +++ b/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql @@ -0,0 +1,7 @@ +--liquibase formatted sql + +--changeset marina.zharinova:2025.08.31:add span column +alter table otel_demo.storage add column span_id varchar(64); + +--changeset marina.zharinova:2025.08.31:comment on span_id +comment on column otel_demo.storage.span_id is 'SpanId of operation'; diff --git a/db-migrations/src/main/resources/db/changelog/sql/set_span_and_trace_unique.sql b/db-migrations/src/main/resources/db/changelog/sql/set_span_and_trace_unique.sql new file mode 100644 index 00000000..bc99e5d4 --- /dev/null +++ b/db-migrations/src/main/resources/db/changelog/sql/set_span_and_trace_unique.sql @@ -0,0 +1,7 @@ +--liquibase formatted sql + +--changeset ivan.vakhrushev:2025.08.31:remove unique from trace_id +alter table otel_demo.storage drop constraint storage_trace_id_key; + +--changeset marina.zharinova:2025.08.31:add constraint on trace_id with span_id +alter table otel_demo.storage add constraint trace_span_unique unique(trace_id, span_id); diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt index bf232822..4a3449f1 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt @@ -9,10 +9,8 @@ package io.github.mfvanek.spring.boot3.kotlin.test.service import io.github.oshai.kotlinlogging.KotlinLogging import io.github.oshai.kotlinlogging.withLoggingContext -import io.micrometer.tracing.Link import io.micrometer.tracing.Tracer import io.micrometer.tracing.propagation.Propagator -import io.opentelemetry.context.propagation.TextMapGetter import org.apache.kafka.clients.consumer.ConsumerRecord import org.springframework.beans.factory.annotation.Value import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate @@ -44,12 +42,14 @@ class KafkaReadingService( private fun processMessage(message: ConsumerRecord) { val currentSpan = tracer.currentSpan() val traceId = currentSpan?.context()?.traceId().orEmpty() + val spanId = currentSpan?.context()?.spanId() logger.info { "Received record: ${message.value()} with traceId $traceId" } jdbcTemplate.update( - "insert into otel_demo.storage(message, trace_id, created_at) values(:msg, :traceId, :createdAt);", + "insert into otel_demo.storage(message, trace_id, span_id, created_at) values(:msg, :traceId, :currentSpan, :createdAt);", mapOf( "msg" to message.value(), "traceId" to traceId, + "currentSpan" to spanId, "createdAt" to LocalDateTime.now(clock) ) ) @@ -61,24 +61,12 @@ class KafkaReadingService( batch = "true" ) fun listenAdditional(records: List>, ack: Acknowledgment) { - // По умолчанию здесь не будет контекста трассировки. Вся обработка, связанная с трассировкой, должна быть выполнена вручную. - // Есть несколько вариантов: - // 1) создать новый спан и контекст трассировки для всех последующих операций; - // 2) брать контекст трассировки из каждой записи и использовать его на время обработки этой записи; - // 3) взять контекст трассировки из первой записи и использовать его для всех последующих операций. - // Рекомендуется использовать комбинацию вариантов 1 и 2. - - // Реализация варианта №1: вручную создать спан, что приведет к созданию контекста трассировки. - // Этого можно и не делать, но тогда логирование ниже и все последующие операции будут без traceId. val batchSpan = tracer.startScopedSpan("batch-processing") - // val batchSpan = tracer.spanBuilder() - // .setParent(tracer.traceContextBuilder().build()) - // .start() logger.info { "current span: ${tracer.currentSpan()}" } try { logger.info { "Received from Kafka ${records.size} records" - } // Это сообщение будет в логах со своим собственным traceId, если создан спан выше + } records.forEach { record -> restoreContextAndProcessSingleRecordIfNeed(record, ack) } @@ -92,20 +80,11 @@ class KafkaReadingService( } private fun restoreContextAndProcessSingleRecordIfNeed(record: ConsumerRecord, ack: Acknowledgment) { - // Реализация варианта №2. - // Берём заголовок traceparent из записи и восстанавливаем контекст трассировки на основе него. - // В результате цепочка спанов продолжится. Все последующие вызовы пойдут с тем же самым traceId. - // Если в записи из Кафки не будет заголовка traceparent, то будет использоваться текущий контекст трассировки (при его наличии). val kafkaPropagatorGetter = Propagator.Getter> { carrier, _ -> carrier.headers().find { it.key() == "traceparent" }?.value()?.decodeToString() } - val builder = propagator.extract(record, kafkaPropagatorGetter) - // val spanFromRecord = builder.name("processing-record-from-kafka").start() - val spanFromRecord = builder - .addLink(Link.NOOP) - .setParent(tracer.traceContextBuilder().build()) - .start() + val spanFromRecord = builder.name("processing-record-from-kafka").start() try { tracer.withSpan(spanFromRecord).use { processSingleRecordIfNeed(record, ack) diff --git a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml index 91d1b97e..56fca636 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml @@ -29,6 +29,7 @@ spring: url: jdbc:postgresql://localhost:6432/otel_demo_db?prepareThreshold=0&targetServerType=primary&hostRecheckSeconds=2&connectTimeout=1&socketTimeout=600 liquibase: change-log: classpath:/db/changelog/db.changelog-master.yaml + clear-checksums: true kafka: template: default-topic: open.telemetry.sb3.queue From 0c15d7ad316a0e0fcefd00c661c442fd17b05fa5 Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Sat, 13 Sep 2025 20:15:55 +0500 Subject: [PATCH 03/13] =?UTF-8?q?=D0=BF=D1=80=D0=B0=D0=B2=D0=BA=D0=B8=20?= =?UTF-8?q?=D0=BF=D0=BE=20=D0=B1=D0=B0=D1=82=D1=87=D0=B5=D0=B2=D0=BE=D0=B9?= =?UTF-8?q?=20=D0=BE=D1=82=D0=BF=D1=80=D0=B0=D0=B2=D0=BA=D0=B5=20=D0=B8=20?= =?UTF-8?q?=D1=82=D0=B5=D1=81=D1=82=D1=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config/checkstyle/checkstyle.xml | 2 +- .../kotlin/test/controllers/TimeController.kt | 2 +- .../test/service/KafkaReadingService.kt | 4 +- .../src/main/resources/application.yml | 1 - .../test/controllers/TimeControllerTest.kt | 17 +-- .../reactive/controllers/TimeController.java | 26 ++-- .../reactive/service/KafkaReadingService.java | 46 +++++++ .../reactive/service/KafkaSendingService.java | 11 ++ .../src/main/resources/application.yml | 5 + .../controllers/TimeControllerTest.java | 10 +- .../test/controllers/TimeController.java | 3 + .../test/service/KafkaReadingService.java | 51 +++++++- .../test/service/KafkaSendingService.java | 8 ++ .../src/main/resources/application.yml | 5 + .../test/controllers/HomeControllerTest.java | 30 +++++ .../test/controllers/TimeControllerTest.java | 13 +- .../boot3/test/service/KafkaTracingTest.java | 115 ++++++++++++++++++ .../test/service/PublicApiServiceTest.java | 12 ++ .../test/support/KafkaConsumerUtils.java | 4 +- .../spring/boot3/test/support/TestBase.java | 18 +++ 20 files changed, 346 insertions(+), 37 deletions(-) create mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/HomeControllerTest.java create mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml index 8566f59e..d82924e7 100644 --- a/config/checkstyle/checkstyle.xml +++ b/config/checkstyle/checkstyle.xml @@ -56,7 +56,7 @@ - + diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt index df3b8b3f..faaecfe6 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeController.kt @@ -25,7 +25,7 @@ class TimeController( private val kafkaSendingService: KafkaSendingService, private val publicApiService: PublicApiService ) { - + // http://localhost:8090/current-time @GetMapping(path = ["/current-time"]) fun getNow(): LocalDateTime { logger.trace { "tracer $tracer" } diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt index 4a3449f1..7ede7eae 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt @@ -56,8 +56,8 @@ class KafkaReadingService( } @KafkaListener( - id = "\${spring.kafka.opentelemetry.additional-consumer-groupId}", - topics = ["\${spring.kafka.opentelemetry.additional-topic}"], + id = "\${spring.kafka.consumer.additional-groupId}", + topics = ["\${spring.kafka.template.additional-topic}"], batch = "true" ) fun listenAdditional(records: List>, ack: Acknowledgment) { diff --git a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml index 56fca636..91d1b97e 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml @@ -29,7 +29,6 @@ spring: url: jdbc:postgresql://localhost:6432/otel_demo_db?prepareThreshold=0&targetServerType=primary&hostRecheckSeconds=2&connectTimeout=1&socketTimeout=600 liquibase: change-log: classpath:/db/changelog/db.changelog-master.yaml - clear-checksums: true kafka: template: default-topic: open.telemetry.sb3.queue diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt index 6f0797a6..6f818c76 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt @@ -91,12 +91,15 @@ class TimeControllerTest : TestBase() { assertThat(output.all) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\"") - val messageFromDb = namedParameterJdbcTemplate.queryForObject( - "select message from otel_demo.storage where trace_id = :traceId", - mapOf("traceId" to traceId), - String::class.java - ) - assertThat(messageFromDb).isEqualTo(received.value()) + val tracesFromDb = namedParameterJdbcTemplate + .query( + "select trace_id from otel_demo.storage where message like :message", + mapOf("message" to received.value()) + ) { rs, _ -> + rs.getString("trace_id") + } + assertThat(tracesFromDb.size).isEqualTo(2) + assertThat(tracesFromDb.stream().filter { it == traceId }).hasSize(2) } @Order(2) @@ -179,7 +182,7 @@ class TimeControllerTest : TestBase() { .await() .atMost(10, TimeUnit.SECONDS) .pollInterval(Duration.ofMillis(500L)) - .until { countRecordsInTable() >= 1L } + .until { countRecordsInTable() >= 2L } } } diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java index 8217ac43..cb265cfc 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java @@ -32,22 +32,26 @@ public class TimeController { private final KafkaSendingService kafkaSendingService; private final PublicApiService publicApiService; - // http://localhost:8080/current-time + // http://localhost:8081/current-time @GetMapping(path = "/current-time") public Mono getNow() { - log.trace("tracer {}", tracer); - - final String traceId = Optional.ofNullable(tracer.currentSpan()) - .map(Span::context) - .map(TraceContext::traceId) - .orElse(null); - log.info("Called method getNow. TraceId = {}", traceId); - - return publicApiService.getZonedTime() + return Mono.just(tracer) + .map(tracer -> { + log.trace("tracer {}", tracer); + return Optional.ofNullable(tracer.currentSpan()) + .map(Span::context) + .map(TraceContext::traceId) + .orElse(null); + }) + .doOnNext(traceId -> log.info("Called method getNow. TraceId = {}", traceId)) + .then(publicApiService.getZonedTime()) .defaultIfEmpty(LocalDateTime.now(clock)) .flatMap(now -> kafkaSendingService.sendNotification("Current time = " + now) .doOnSuccess(v -> log.info("Awaiting acknowledgement from Kafka")) .thenReturn(now) - ); + ) + .flatMap(now -> kafkaSendingService.sendNotificationToOtherTopic("Current time = " + now) + .doOnSuccess(v -> log.info("Awaiting acknowledgement from Kafka with batch")) + .thenReturn(now)); } } diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java index 2fddb203..4e7e19ab 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java @@ -7,8 +7,10 @@ package io.github.mfvanek.spring.boot3.reactive.service; +import io.micrometer.tracing.ScopedSpan; import io.micrometer.tracing.Span; import io.micrometer.tracing.Tracer; +import io.micrometer.tracing.propagation.Propagator; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -21,6 +23,8 @@ import java.time.Clock; import java.time.LocalDateTime; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.UUID; @@ -34,9 +38,51 @@ public class KafkaReadingService { private final NamedParameterJdbcTemplate jdbcTemplate; @Value("${app.tenant.name}") private String tenantName; + private final Propagator propagator; @KafkaListener(topics = "${spring.kafka.template.default-topic}") public void listen(ConsumerRecord message, Acknowledgment ack) { + processSingleRecordIfNeed(message, ack); + } + + @KafkaListener( + id = "${spring.kafka.consumer.additional-groupId}", + topics = "${spring.kafka.template.additional-topic}", + batch = "true" + ) + public void listenAdditional(List> records, Acknowledgment ack) { + final ScopedSpan batchSpan = tracer.startScopedSpan("batch-processing"); + log.info("current span: {}", tracer.currentSpan()); + try { + log.info( + "Received from Kafka {} records", records.size() + ); + records.forEach(record -> + restoreContextAndProcessSingleRecordIfNeed(record, ack)); + ack.acknowledge(); + } catch (Exception e) { + batchSpan.error(e); + throw e; + } finally { + batchSpan.end(); + } + } + + private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record, Acknowledgment ack) { + final Propagator.Getter> kafkaPropagatorGetter = (carrier, key) -> Arrays.toString(carrier.headers().lastHeader("traceparent").value()); + final Span.Builder builder = propagator.extract(record, kafkaPropagatorGetter); + final Span spanFromRecord = builder.name("processing-record-from-kafka").start(); + try (Tracer.SpanInScope ignored = tracer.withSpan(spanFromRecord)) { + processSingleRecordIfNeed(record, ack); + } catch (Exception e) { + spanFromRecord.error(e); + throw e; + } finally { + spanFromRecord.end(); + } + } + + private void processSingleRecordIfNeed(ConsumerRecord message, Acknowledgment ack) { try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { final Span currentSpan = tracer.currentSpan(); final String traceId = currentSpan != null ? currentSpan.context().traceId() : ""; diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaSendingService.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaSendingService.java index c254adb2..9f608187 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaSendingService.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaSendingService.java @@ -28,6 +28,7 @@ public class KafkaSendingService { private final KafkaTemplate kafkaTemplate; @Value("${app.tenant.name}") private String tenantName; + @Value("${spring.kafka.template.additional-topic}") private String additionalTopic; public Mono> sendNotification(@Nonnull final String message) { return Mono.deferContextual(contextView -> { @@ -38,4 +39,14 @@ public Mono> sendNotification(@Nonnull final String mes } }); } + + public Mono> sendNotificationToOtherTopic(@Nonnull final String message) { + return Mono.deferContextual(contextView -> { + try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { + log.info("Sending message \"{}\" to {} of Kafka", message, additionalTopic); + return Mono.fromFuture(() -> kafkaTemplate.send(additionalTopic, UUID.randomUUID(), message)) + .subscribeOn(Schedulers.boundedElastic()); + } + }); + } } diff --git a/spring-boot-3-demo-app-reactive/src/main/resources/application.yml b/spring-boot-3-demo-app-reactive/src/main/resources/application.yml index 138f52b1..a3904d66 100644 --- a/spring-boot-3-demo-app-reactive/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-reactive/src/main/resources/application.yml @@ -26,14 +26,19 @@ spring: template: default-topic: open.telemetry.sb3.queue observation-enabled: true # Important!!! + additional-topic: open.telemetry.sb3.queue.additional producer: key-serializer: org.apache.kafka.common.serialization.UUIDSerializer + properties: + linger.ms: 100 + batch.size: 10000 listener: observation-enabled: true # Important!!! ack-mode: manual_immediate consumer: auto-offset-reset: earliest group-id: ${spring.kafka.template.default-topic}-group + additional-groupId: ${spring.kafka.template.additional-topic}-group client-id: open.telemetry.client bootstrap-servers: localhost:9092 security: diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java index b57913df..4a31ccc7 100644 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java @@ -101,10 +101,10 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int assertThat(output.getAll()) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\""); - final String messageFromDb = namedParameterJdbcTemplate.queryForObject("select message from otel_demo.storage where trace_id = :traceId", - Map.of("traceId", traceId), String.class); - assertThat(messageFromDb) - .isEqualTo(received.value()); + final List tracesFromDb = namedParameterJdbcTemplate.query("select trace_id from otel_demo.storage where message like :message", + Map.of("message", received.value()), (rs, rowNum) -> rs.getString("trace_id")); + assertThat(tracesFromDb.size()).isEqualTo(2); + assertThat(tracesFromDb.stream().filter(it -> it.equals(traceId))).hasSize(1); } @Order(2) @@ -167,6 +167,6 @@ private void awaitStoringIntoDatabase() { .await() .atMost(10, TimeUnit.SECONDS) .pollInterval(Duration.ofMillis(500L)) - .until(() -> countRecordsInTable() >= 1L); + .until(() -> countRecordsInTable() >= 2L); } } diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/controllers/TimeController.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/controllers/TimeController.java index 19df50de..b755a3ad 100644 --- a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/controllers/TimeController.java +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/controllers/TimeController.java @@ -47,6 +47,9 @@ public LocalDateTime getNow() { kafkaSendingService.sendNotification("Current time = " + now) .thenRun(() -> log.info("Awaiting acknowledgement from Kafka")) .get(); + kafkaSendingService.sendNotificationToOtherTopic("Current time = " + now) + .thenRun(() -> log.info("Awaiting acknowledgement from Kafka with batch")) + .get(); return now; } } diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java index 8dadc668..02a54811 100644 --- a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java @@ -7,8 +7,10 @@ package io.github.mfvanek.spring.boot3.test.service; +import io.micrometer.tracing.ScopedSpan; import io.micrometer.tracing.Span; import io.micrometer.tracing.Tracer; +import io.micrometer.tracing.propagation.Propagator; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -21,6 +23,8 @@ import java.time.Clock; import java.time.LocalDateTime; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.UUID; @@ -34,17 +38,62 @@ public class KafkaReadingService { private final Tracer tracer; private final Clock clock; private final NamedParameterJdbcTemplate jdbcTemplate; + private final Propagator propagator; @KafkaListener(topics = "${spring.kafka.template.default-topic}") public void listen(ConsumerRecord message, Acknowledgment ack) { + processSingleRecordIfNeed(message, ack); + } + + @KafkaListener( + id = "${spring.kafka.consumer.additional-groupId}", + topics = "${spring.kafka.template.additional-topic}", + batch = "true" + ) + public void listenAdditional(List> records, Acknowledgment ack) { + final ScopedSpan batchSpan = tracer.startScopedSpan("batch-processing"); + log.info("current span: {}", tracer.currentSpan()); + try { + log.info( + "Received from Kafka {} records", records.size() + ); + records.forEach(record -> + restoreContextAndProcessSingleRecordIfNeed(record, ack)); + ack.acknowledge(); + } catch (Exception e) { + batchSpan.error(e); + throw e; + } finally { + batchSpan.end(); + } + } + + private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record, Acknowledgment ack) { + final Propagator.Getter> kafkaPropagatorGetter = (carrier, key) -> Arrays.toString(carrier.headers().lastHeader("traceparent").value()); + final Span.Builder builder = propagator.extract(record, kafkaPropagatorGetter); + final Span spanFromRecord = builder.name("processing-record-from-kafka").start(); + try (Tracer.SpanInScope ignored = tracer.withSpan(spanFromRecord)) { + processSingleRecordIfNeed(record, ack); + } catch (Exception e) { + spanFromRecord.error(e); + throw e; + } finally { + spanFromRecord.end(); + } + } + + private void processSingleRecordIfNeed(ConsumerRecord message, Acknowledgment ack) { try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { final Span currentSpan = tracer.currentSpan(); final String traceId = currentSpan != null ? currentSpan.context().traceId() : ""; + final String spanId = currentSpan != null ? currentSpan.context().spanId() : ""; log.info("Received record: {} with traceId {}", message.value(), traceId); - jdbcTemplate.update("insert into otel_demo.storage(message, trace_id, created_at) values(:msg, :traceId, :createdAt);", + jdbcTemplate.update( + "insert into otel_demo.storage(message, trace_id, span_id, created_at) values(:msg, :traceId, :currentSpan, :createdAt);", Map.ofEntries( Map.entry("msg", message.value()), Map.entry("traceId", traceId), + Map.entry("currentSpan", spanId), Map.entry("createdAt", LocalDateTime.now(clock)) ) ); diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaSendingService.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaSendingService.java index 5076980b..a7ad5b62 100644 --- a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaSendingService.java +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaSendingService.java @@ -27,6 +27,7 @@ public class KafkaSendingService { @Value("${app.tenant.name}") private String tenantName; private final KafkaTemplate kafkaTemplate; + @Value("${spring.kafka.template.additional-topic}") private String additionalTopic; public CompletableFuture> sendNotification(@Nonnull final String message) { try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { @@ -34,4 +35,11 @@ public CompletableFuture> sendNotification(@Nonnull fin return kafkaTemplate.sendDefault(UUID.randomUUID(), message); } } + + public CompletableFuture> sendNotificationToOtherTopic(@Nonnull final String message) { + try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { + log.info("Sending message \"{}\" to \"{}\" of Kafka", message, additionalTopic); + return kafkaTemplate.send(additionalTopic, UUID.randomUUID(), message); + } + } } diff --git a/spring-boot-3-demo-app/src/main/resources/application.yml b/spring-boot-3-demo-app/src/main/resources/application.yml index d94fd72b..aa478faf 100644 --- a/spring-boot-3-demo-app/src/main/resources/application.yml +++ b/spring-boot-3-demo-app/src/main/resources/application.yml @@ -33,14 +33,19 @@ spring: template: default-topic: open.telemetry.sb3.queue observation-enabled: true # Important!!! + additional-topic: open.telemetry.sb3.queue.additional producer: key-serializer: org.apache.kafka.common.serialization.UUIDSerializer + properties: + linger.ms: 100 + batch.size: 10000 listener: observation-enabled: true # Important!!! ack-mode: manual_immediate consumer: auto-offset-reset: earliest group-id: ${spring.kafka.template.default-topic}-group + additional-groupId: ${spring.kafka.template.additional-topic}-group client-id: open.telemetry.client bootstrap-servers: localhost:9092 security: diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/HomeControllerTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/HomeControllerTest.java new file mode 100644 index 00000000..d591c3a4 --- /dev/null +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/HomeControllerTest.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.test.controllers; + +import io.github.mfvanek.spring.boot3.test.support.TestBase; +import org.junit.jupiter.api.Test; + +import static io.github.mfvanek.spring.boot3.test.filters.TraceIdInResponseServletFilter.TRACE_ID_HEADER_NAME; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +class HomeControllerTest extends TestBase { + + @Test + void homeControllerShouldWork() { + final String result = webTestClient.get() + .uri("/") + .exchange() + .expectStatus().isEqualTo(200) + .expectHeader().exists(TRACE_ID_HEADER_NAME) + .expectBody(String.class) + .returnResult() + .getResponseBody(); + assertThat(result).isEqualTo("Hello!"); + } +} diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java index 374794d4..c1b67b94 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java @@ -91,7 +91,8 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int .isBefore(LocalDateTime.now(clock)); assertThat(output.getAll()) .contains("Called method getNow. TraceId = " + traceId) - .contains("Awaiting acknowledgement from Kafka"); + .contains("Awaiting acknowledgement from Kafka") + .contains("Awaiting acknowledgement from Kafka with batch"); final ConsumerRecord received = consumerRecords.poll(10, TimeUnit.SECONDS); assertThat(received).isNotNull(); @@ -102,10 +103,10 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int assertThat(output.getAll()) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\""); - final String messageFromDb = namedParameterJdbcTemplate.queryForObject("select message from otel_demo.storage where trace_id = :traceId", - Map.of("traceId", traceId), String.class); - assertThat(messageFromDb) - .isEqualTo(received.value()); + final List tracesFromDb = namedParameterJdbcTemplate.query("select trace_id from otel_demo.storage where message like :message", + Map.of("message", received.value()), (rs, rowNum) -> rs.getString("trace_id")); + assertThat(tracesFromDb.size()).isEqualTo(2); + assertThat(tracesFromDb.stream().filter(it -> it.equals(traceId))).hasSize(1); } @Order(2) @@ -168,6 +169,6 @@ private void awaitStoringIntoDatabase() { .await() .atMost(10, TimeUnit.SECONDS) .pollInterval(Duration.ofMillis(500L)) - .until(() -> countRecordsInTable() >= 1L); + .until(() -> countRecordsInTable() >= 2L); } } diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java new file mode 100644 index 00000000..fc8db197 --- /dev/null +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java @@ -0,0 +1,115 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.test.service; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.github.mfvanek.spring.boot3.test.support.TestBase; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.Tracer; + +import java.util.Optional; + +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.apache.kafka.common.record.TimestampType; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.support.Acknowledgment; +import org.springframework.test.context.bean.override.mockito.MockitoBean; + +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyMap; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class KafkaTracingTest extends TestBase { + + @MockitoBean + private ConsumerFactory consumerFactory; + @Autowired + private KafkaReadingService kafkaReadingService; + @Autowired + private Tracer tracer; + @Autowired + private ObservationRegistry observationRegistry; + + @Test + @SuppressFBWarnings(value = "PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS", justification = "Justification for suppressing this warning.") + void errorSpanWhenListenerFails() { + try (Consumer mockConsumer = mock(Consumer.class)) { + when(consumerFactory.createConsumer()).thenReturn(mockConsumer); + when(mockConsumer.poll(any(Duration.class))).thenReturn(new ConsumerRecords<>(Map.of( + new TopicPartition("test-topic", 0), + List.of(createTestConsumerRecord("test-message")) + ))); + doThrow(new RuntimeException("Commit failed")) + .when(mockConsumer) + .commitSync(anyMap()); + final Acknowledgment mockAck = mock(Acknowledgment.class); + doAnswer(invocation -> { + mockConsumer.commitSync(); + return null; + }).when(mockAck).acknowledge(); + + final AtomicReference thrownException = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + Observation.createNotStarted("test", observationRegistry).observe(() -> { + try { + kafkaReadingService.listenAdditional(List.of(createTestConsumerRecord("test-message")), mockAck); + } catch (Exception e) { + thrownException.set(e); + } finally { + latch.countDown(); + } + assertThat(Objects.requireNonNull(tracer.currentSpan()).error(thrownException.get())).isNotNull(); + }); + assertThat(thrownException.get()).isNotNull(); + assertThat(thrownException.get().getMessage()).contains("Cannot invoke " + + "\"org.apache.kafka.common.header.Header.value()\" because the return value of " + + "\"org.apache.kafka.common.header.Headers.lastHeader(String)\" is null"); + } + } + + private ConsumerRecord createTestConsumerRecord(String value) { + final Headers headers = new org.apache.kafka.common.header.internals.RecordHeaders(); + headers.add(new RecordHeader("header", "1".getBytes(StandardCharsets.UTF_8))); + + return new ConsumerRecord<>( + "test-topic", + 0, + 0L, + System.currentTimeMillis(), + TimestampType.CREATE_TIME, + 0L, + 0, + 0, + UUID.randomUUID(), + value, + headers, + Optional.empty() + ); + } +} diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java index b6df16f9..247f9b27 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java @@ -84,4 +84,16 @@ void retriesOnceToGetZonedTime(@Nonnull final CapturedOutput output) { verify(2, getRequestedFor(urlPathMatching("/" + zoneName))); } + + @Test + void throwsJsonProcessingExceptionWithBdResponse(CapturedOutput output) { + final String zoneName = stubBadResponse(); + Observation.createNotStarted("test", observationRegistry).observe(() -> { + final LocalDateTime result = publicApiService.getZonedTime(); + assertThat(result).isNull(); + assertThat(Objects.requireNonNull(tracer.currentSpan()).context().traceId()).isNotNull(); + assertThat(output.getAll()).contains("Failed to convert response"); + }); + verify(1, getRequestedFor(urlPathMatching("/" + zoneName))); + } } diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/KafkaConsumerUtils.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/KafkaConsumerUtils.java index a87fada4..f4c896fe 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/KafkaConsumerUtils.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/KafkaConsumerUtils.java @@ -31,7 +31,7 @@ public class KafkaConsumerUtils { public KafkaMessageListenerContainer setUpKafkaConsumer( @Nonnull final KafkaProperties kafkaProperties, @Nonnull final BlockingQueue> consumerRecords) { - final var containerProperties = new ContainerProperties(kafkaProperties.getTemplate().getDefaultTopic()); + final var containerProperties = new ContainerProperties(kafkaProperties.getTemplate().getDefaultTopic(), "open.telemetry.sb3.queue.additional"); final Map consumerProperties = KafkaTestUtils.consumerProps(KafkaInitializer.getBootstrapSevers(), "test-group", "false"); consumerProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); consumerProperties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); @@ -41,7 +41,7 @@ public KafkaMessageListenerContainer setUpKafkaConsumer( final var container = new KafkaMessageListenerContainer<>(consumer, containerProperties); container.setupMessageListener((MessageListener) consumerRecords::add); container.start(); - ContainerTestUtils.waitForAssignment(container, 1); + ContainerTestUtils.waitForAssignment(container, 2); return container; } } diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/TestBase.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/TestBase.java index e5b257da..4b0b649b 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/TestBase.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/TestBase.java @@ -92,6 +92,12 @@ protected String stubErrorResponse() { return zoneName; } + protected String stubBadResponse() { + final String zoneName = TimeZone.getDefault().getID(); + stubBadResponse(zoneName); + return zoneName; + } + @SneakyThrows private void stubErrorResponse(@Nonnull final String zoneName, @Nonnull final RuntimeException errorForResponse) { stubFor(get(urlPathMatching("/" + zoneName)) @@ -101,6 +107,18 @@ private void stubErrorResponse(@Nonnull final String zoneName, @Nonnull final Ru )); } + @SneakyThrows + private void stubBadResponse(String zoneName) { + stubFor( + get(urlPathMatching("/" + zoneName)) + .willReturn( + aResponse() + .withStatus(200) + .withBody(objectMapper.writeValueAsString("Bad response")) + ) + ); + } + @TestConfiguration static class CustomClockConfiguration { From ee1a2d30c1d3f6c4e8b40ce4c189e14984f8254b Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Sat, 13 Sep 2025 21:00:07 +0500 Subject: [PATCH 04/13] rearrange imports --- .../mfvanek/spring/boot3/test/service/KafkaTracingTest.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java index fc8db197..32250ab4 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java @@ -12,9 +12,6 @@ import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationRegistry; import io.micrometer.tracing.Tracer; - -import java.util.Optional; - import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -33,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; From 47da8a2c134853fd6f63ec39be196298ad64f272 Mon Sep 17 00:00:00 2001 From: Ivan Vakhrushev Date: Sun, 14 Sep 2025 19:33:37 +0400 Subject: [PATCH 05/13] Fix problem with propagator --- build.gradle.kts | 2 +- db-migrations/build.gradle.kts | 13 +++- .../db/migrations/common/saver/DbSaver.java | 47 ++++++++++++++ .../boot3/kotlin/test/config/DbConfig.kt | 30 +++++++++ .../test/service/KafkaReadingService.kt | 62 +++++-------------- .../boot3/reactive/config/DbConfig.java | 31 ++++++++++ .../reactive/service/KafkaReadingService.java | 47 ++++---------- .../spring/boot3/test/config/DbConfig.java | 31 ++++++++++ .../test/service/KafkaReadingService.java | 50 ++++----------- .../test/controllers/TimeControllerTest.java | 5 +- 10 files changed, 195 insertions(+), 123 deletions(-) create mode 100644 db-migrations/src/main/java/io/github/mfvanek/db/migrations/common/saver/DbSaver.java create mode 100644 spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/config/DbConfig.kt create mode 100644 spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/config/DbConfig.java create mode 100644 spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/config/DbConfig.java diff --git a/build.gradle.kts b/build.gradle.kts index d69065bc..2808e25e 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -12,8 +12,8 @@ allprojects { version = "0.5.0" repositories { - mavenLocal() mavenCentral() + mavenLocal() } } diff --git a/db-migrations/build.gradle.kts b/db-migrations/build.gradle.kts index 00651f70..7fc9f66d 100644 --- a/db-migrations/build.gradle.kts +++ b/db-migrations/build.gradle.kts @@ -1,4 +1,15 @@ plugins { - id("java") + id("java-library") id("sb-ot-demo.java-conventions") + id("io.freefair.lombok") +} + +dependencies { + implementation(platform(project(":common-internal-bom"))) + implementation(platform(libs.spring.boot.v3.dependencies)) + + implementation("io.micrometer:micrometer-tracing") + implementation("org.apache.kafka:kafka-clients") + implementation("org.slf4j:slf4j-api") + implementation("org.springframework:spring-jdbc") } diff --git a/db-migrations/src/main/java/io/github/mfvanek/db/migrations/common/saver/DbSaver.java b/db-migrations/src/main/java/io/github/mfvanek/db/migrations/common/saver/DbSaver.java new file mode 100644 index 00000000..959a2eba --- /dev/null +++ b/db-migrations/src/main/java/io/github/mfvanek/db/migrations/common/saver/DbSaver.java @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.db.migrations.common.saver; + +import io.micrometer.tracing.Span; +import io.micrometer.tracing.Tracer; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.slf4j.MDC; +import org.springframework.jdbc.core.simple.JdbcClient; + +import java.time.Clock; +import java.time.LocalDateTime; +import java.util.UUID; + +@Slf4j +@RequiredArgsConstructor +public class DbSaver { + + private final String tenantName; + private final Tracer tracer; + private final Clock clock; + private final JdbcClient jdbcClient; + + public void processSingleRecord(ConsumerRecord record) { + try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { + final Span currentSpan = tracer.currentSpan(); + final String traceId = currentSpan != null ? currentSpan.context().traceId() : ""; + final String spanId = currentSpan != null ? currentSpan.context().spanId() : ""; + log.info("Received record: {} with traceId {} spanId {}", record.value(), traceId, spanId); + jdbcClient.sql(""" + insert into otel_demo.storage(message, trace_id, span_id, created_at) + values(:msg, :traceId, :currentSpan, :createdAt);""") + .param("msg", record.value()) + .param("traceId", traceId) + .param("currentSpan", spanId) + .param("createdAt", LocalDateTime.now(clock)) + .update(); + } + } +} diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/config/DbConfig.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/config/DbConfig.kt new file mode 100644 index 00000000..f7e4d9d9 --- /dev/null +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/config/DbConfig.kt @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.kotlin.test.config + +import io.github.mfvanek.db.migrations.common.saver.DbSaver +import io.micrometer.tracing.Tracer +import org.springframework.beans.factory.annotation.Value +import org.springframework.context.annotation.Bean +import org.springframework.context.annotation.Configuration +import org.springframework.jdbc.core.simple.JdbcClient +import java.time.Clock + +@Configuration(proxyBeanMethods = false) +class DbConfig { + + @Bean + fun dbSaver( + @Value("\${app.tenant.name}") tenantName: String, + tracer: Tracer, + clock: Clock, + jdbcClient: JdbcClient + ): DbSaver { + return DbSaver(tenantName, tracer, clock, jdbcClient) + } +} diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt index 7ede7eae..79f42a1e 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt @@ -7,52 +7,34 @@ package io.github.mfvanek.spring.boot3.kotlin.test.service +import io.github.mfvanek.db.migrations.common.saver.DbSaver import io.github.oshai.kotlinlogging.KotlinLogging -import io.github.oshai.kotlinlogging.withLoggingContext import io.micrometer.tracing.Tracer import io.micrometer.tracing.propagation.Propagator import org.apache.kafka.clients.consumer.ConsumerRecord -import org.springframework.beans.factory.annotation.Value -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate import org.springframework.kafka.annotation.KafkaListener import org.springframework.kafka.support.Acknowledgment import org.springframework.stereotype.Service -import java.time.Clock -import java.time.LocalDateTime +import java.nio.charset.StandardCharsets import java.util.* private val logger = KotlinLogging.logger {} +internal object KafkaHeadersGetter : Propagator.Getter> { + override fun get(carrier: ConsumerRecord, key: String): String? = + carrier.headers()?.lastHeader(key)?.value()?.toString(StandardCharsets.UTF_8) +} + @Service class KafkaReadingService( - @Value("\${app.tenant.name}") private val tenantName: String, private val tracer: Tracer, - private val clock: Clock, - private val jdbcTemplate: NamedParameterJdbcTemplate, - private val propagator: Propagator + private val propagator: Propagator, + private val dbSaver: DbSaver ) { @KafkaListener(topics = ["\${spring.kafka.template.default-topic}"]) - fun listen(message: ConsumerRecord, ack: Acknowledgment) { - withLoggingContext("tenant.name" to tenantName) { - processMessage(message) - ack.acknowledge() - } - } - - private fun processMessage(message: ConsumerRecord) { - val currentSpan = tracer.currentSpan() - val traceId = currentSpan?.context()?.traceId().orEmpty() - val spanId = currentSpan?.context()?.spanId() - logger.info { "Received record: ${message.value()} with traceId $traceId" } - jdbcTemplate.update( - "insert into otel_demo.storage(message, trace_id, span_id, created_at) values(:msg, :traceId, :currentSpan, :createdAt);", - mapOf( - "msg" to message.value(), - "traceId" to traceId, - "currentSpan" to spanId, - "createdAt" to LocalDateTime.now(clock) - ) - ) + fun listen(record: ConsumerRecord, ack: Acknowledgment) { + dbSaver.processSingleRecord(record) + ack.acknowledge() } @KafkaListener( @@ -67,9 +49,7 @@ class KafkaReadingService( logger.info { "Received from Kafka ${records.size} records" } - records.forEach { record -> - restoreContextAndProcessSingleRecordIfNeed(record, ack) - } + records.forEach { record -> restoreContextAndProcessSingleRecordIfNeed(record) } ack.acknowledge() } catch (e: Throwable) { batchSpan.error(e) @@ -79,15 +59,12 @@ class KafkaReadingService( } } - private fun restoreContextAndProcessSingleRecordIfNeed(record: ConsumerRecord, ack: Acknowledgment) { - val kafkaPropagatorGetter = Propagator.Getter> { carrier, _ -> - carrier.headers().find { it.key() == "traceparent" }?.value()?.decodeToString() - } - val builder = propagator.extract(record, kafkaPropagatorGetter) + private fun restoreContextAndProcessSingleRecordIfNeed(record: ConsumerRecord) { + val builder = propagator.extract(record, KafkaHeadersGetter) val spanFromRecord = builder.name("processing-record-from-kafka").start() try { tracer.withSpan(spanFromRecord).use { - processSingleRecordIfNeed(record, ack) + dbSaver.processSingleRecord(record) } } catch (e: Throwable) { spanFromRecord.error(e) @@ -96,11 +73,4 @@ class KafkaReadingService( spanFromRecord.end() } } - - private fun processSingleRecordIfNeed(record: ConsumerRecord, ack: Acknowledgment) { - withLoggingContext("tenant.name" to tenantName) { - processMessage(record) - ack.acknowledge() - } - } } diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/config/DbConfig.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/config/DbConfig.java new file mode 100644 index 00000000..310f2d6a --- /dev/null +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/config/DbConfig.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.reactive.config; + +import io.github.mfvanek.db.migrations.common.saver.DbSaver; +import io.micrometer.tracing.Tracer; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.simple.JdbcClient; + +import java.time.Clock; + +@Configuration(proxyBeanMethods = false) +public class DbConfig { + + @Bean + public DbSaver dbSaver( + @Value("${app.tenant.name}") String tenantName, + Tracer tracer, + Clock clock, + JdbcClient jdbcClient + ) { + return new DbSaver(tenantName, tracer, clock, jdbcClient); + } +} diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java index 4e7e19ab..2e033c09 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java @@ -7,6 +7,7 @@ package io.github.mfvanek.spring.boot3.reactive.service; +import io.github.mfvanek.db.migrations.common.saver.DbSaver; import io.micrometer.tracing.ScopedSpan; import io.micrometer.tracing.Span; import io.micrometer.tracing.Tracer; @@ -14,18 +15,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.slf4j.MDC; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Service; -import java.time.Clock; -import java.time.LocalDateTime; -import java.util.Arrays; +import java.nio.charset.StandardCharsets; import java.util.List; -import java.util.Map; import java.util.UUID; @Slf4j @@ -33,16 +28,16 @@ @RequiredArgsConstructor public class KafkaReadingService { + private static final Propagator.Getter> KAFKA_PROPAGATOR_GETTER = (carrier, key) -> new String(carrier.headers().lastHeader(key).value(), StandardCharsets.UTF_8); + private final Tracer tracer; - private final Clock clock; - private final NamedParameterJdbcTemplate jdbcTemplate; - @Value("${app.tenant.name}") - private String tenantName; private final Propagator propagator; + private final DbSaver dbSaver; @KafkaListener(topics = "${spring.kafka.template.default-topic}") - public void listen(ConsumerRecord message, Acknowledgment ack) { - processSingleRecordIfNeed(message, ack); + public void listen(ConsumerRecord record, Acknowledgment ack) { + dbSaver.processSingleRecord(record); + ack.acknowledge(); } @KafkaListener( @@ -57,8 +52,7 @@ public void listenAdditional(List> records, Acknowl log.info( "Received from Kafka {} records", records.size() ); - records.forEach(record -> - restoreContextAndProcessSingleRecordIfNeed(record, ack)); + records.forEach(this::restoreContextAndProcessSingleRecordIfNeed); ack.acknowledge(); } catch (Exception e) { batchSpan.error(e); @@ -68,12 +62,11 @@ public void listenAdditional(List> records, Acknowl } } - private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record, Acknowledgment ack) { - final Propagator.Getter> kafkaPropagatorGetter = (carrier, key) -> Arrays.toString(carrier.headers().lastHeader("traceparent").value()); - final Span.Builder builder = propagator.extract(record, kafkaPropagatorGetter); + private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record) { + final Span.Builder builder = propagator.extract(record, KAFKA_PROPAGATOR_GETTER); final Span spanFromRecord = builder.name("processing-record-from-kafka").start(); try (Tracer.SpanInScope ignored = tracer.withSpan(spanFromRecord)) { - processSingleRecordIfNeed(record, ack); + dbSaver.processSingleRecord(record); } catch (Exception e) { spanFromRecord.error(e); throw e; @@ -81,20 +74,4 @@ private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord message, Acknowledgment ack) { - try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { - final Span currentSpan = tracer.currentSpan(); - final String traceId = currentSpan != null ? currentSpan.context().traceId() : ""; - log.info("Received record: {} with traceId {}", message.value(), traceId); - jdbcTemplate.update("insert into otel_demo.storage(message, trace_id, created_at) values(:msg, :traceId, :createdAt);", - Map.ofEntries( - Map.entry("msg", message.value()), - Map.entry("traceId", traceId), - Map.entry("createdAt", LocalDateTime.now(clock)) - ) - ); - ack.acknowledge(); - } - } } diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/config/DbConfig.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/config/DbConfig.java new file mode 100644 index 00000000..d4a40d9f --- /dev/null +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/config/DbConfig.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.test.config; + +import io.github.mfvanek.db.migrations.common.saver.DbSaver; +import io.micrometer.tracing.Tracer; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.simple.JdbcClient; + +import java.time.Clock; + +@Configuration(proxyBeanMethods = false) +public class DbConfig { + + @Bean + public DbSaver dbSaver( + @Value("${app.tenant.name}") String tenantName, + Tracer tracer, + Clock clock, + JdbcClient jdbcClient + ) { + return new DbSaver(tenantName, tracer, clock, jdbcClient); + } +} diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java index 02a54811..87ce097d 100644 --- a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java @@ -7,6 +7,7 @@ package io.github.mfvanek.spring.boot3.test.service; +import io.github.mfvanek.db.migrations.common.saver.DbSaver; import io.micrometer.tracing.ScopedSpan; import io.micrometer.tracing.Span; import io.micrometer.tracing.Tracer; @@ -14,18 +15,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.slf4j.MDC; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Service; -import java.time.Clock; -import java.time.LocalDateTime; -import java.util.Arrays; +import java.nio.charset.StandardCharsets; import java.util.List; -import java.util.Map; import java.util.UUID; @Slf4j @@ -33,16 +28,16 @@ @RequiredArgsConstructor public class KafkaReadingService { - @Value("${app.tenant.name}") - private String tenantName; + private static final Propagator.Getter> KAFKA_PROPAGATOR_GETTER = (carrier, key) -> new String(carrier.headers().lastHeader(key).value(), StandardCharsets.UTF_8); + private final Tracer tracer; - private final Clock clock; - private final NamedParameterJdbcTemplate jdbcTemplate; private final Propagator propagator; + private final DbSaver dbSaver; @KafkaListener(topics = "${spring.kafka.template.default-topic}") - public void listen(ConsumerRecord message, Acknowledgment ack) { - processSingleRecordIfNeed(message, ack); + public void listen(ConsumerRecord record, Acknowledgment ack) { + dbSaver.processSingleRecord(record); + ack.acknowledge(); } @KafkaListener( @@ -57,8 +52,7 @@ public void listenAdditional(List> records, Acknowl log.info( "Received from Kafka {} records", records.size() ); - records.forEach(record -> - restoreContextAndProcessSingleRecordIfNeed(record, ack)); + records.forEach(this::restoreContextAndProcessSingleRecordIfNeed); ack.acknowledge(); } catch (Exception e) { batchSpan.error(e); @@ -68,12 +62,11 @@ public void listenAdditional(List> records, Acknowl } } - private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record, Acknowledgment ack) { - final Propagator.Getter> kafkaPropagatorGetter = (carrier, key) -> Arrays.toString(carrier.headers().lastHeader("traceparent").value()); - final Span.Builder builder = propagator.extract(record, kafkaPropagatorGetter); + private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record) { + final Span.Builder builder = propagator.extract(record, KAFKA_PROPAGATOR_GETTER); final Span spanFromRecord = builder.name("processing-record-from-kafka").start(); try (Tracer.SpanInScope ignored = tracer.withSpan(spanFromRecord)) { - processSingleRecordIfNeed(record, ack); + dbSaver.processSingleRecord(record); } catch (Exception e) { spanFromRecord.error(e); throw e; @@ -81,23 +74,4 @@ private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord message, Acknowledgment ack) { - try (MDC.MDCCloseable ignored = MDC.putCloseable("tenant.name", tenantName)) { - final Span currentSpan = tracer.currentSpan(); - final String traceId = currentSpan != null ? currentSpan.context().traceId() : ""; - final String spanId = currentSpan != null ? currentSpan.context().spanId() : ""; - log.info("Received record: {} with traceId {}", message.value(), traceId); - jdbcTemplate.update( - "insert into otel_demo.storage(message, trace_id, span_id, created_at) values(:msg, :traceId, :currentSpan, :createdAt);", - Map.ofEntries( - Map.entry("msg", message.value()), - Map.entry("traceId", traceId), - Map.entry("currentSpan", spanId), - Map.entry("createdAt", LocalDateTime.now(clock)) - ) - ); - ack.acknowledge(); - } - } } diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java index c1b67b94..087c57b8 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java @@ -105,8 +105,9 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int .contains("\"tenant.name\":\"ru-a1-private\""); final List tracesFromDb = namedParameterJdbcTemplate.query("select trace_id from otel_demo.storage where message like :message", Map.of("message", received.value()), (rs, rowNum) -> rs.getString("trace_id")); - assertThat(tracesFromDb.size()).isEqualTo(2); - assertThat(tracesFromDb.stream().filter(it -> it.equals(traceId))).hasSize(1); + assertThat(tracesFromDb) + .hasSize(2) + .containsOnly(traceId); } @Order(2) From fce4531dd01e77c899e7460553854666a0db84ac Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Fri, 26 Sep 2025 19:46:42 +0500 Subject: [PATCH 06/13] review fixes --- .../kotlin/sb-ot-demo.java-compile.gradle.kts | 4 +- .../sb-ot-demo.kotlin-conventions.gradle.kts | 2 +- config/checkstyle/checkstyle.xml | 2 +- .../db/changelog/sql/add_span_column.sql | 2 +- .../resources/db/changelog/sql/storage.sql | 2 +- .../test/service/KafkaReadingService.kt | 7 +- .../src/main/resources/application.yml | 3 - .../test/controllers/TimeControllerTest.kt | 18 +-- .../reactive/controllers/TimeController.java | 11 +- .../reactive/service/KafkaReadingService.java | 7 +- .../controllers/TimeControllerTest.java | 10 +- .../test/service/KafkaReadingService.java | 7 +- .../test/controllers/TimeControllerTest.java | 14 +-- .../boot3/test/service/KafkaTracingTest.java | 113 ------------------ 14 files changed, 45 insertions(+), 157 deletions(-) delete mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java diff --git a/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts b/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts index 1244b437..bcdd3637 100644 --- a/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts +++ b/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts @@ -24,8 +24,8 @@ dependencies { } java { - sourceCompatibility = JavaVersion.VERSION_21 - targetCompatibility = JavaVersion.VERSION_21 + sourceCompatibility = JavaVersion.VERSION_17 + targetCompatibility = JavaVersion.VERSION_17 withSourcesJar() } diff --git a/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts b/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts index 93f1743e..99a8fd53 100644 --- a/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts @@ -33,7 +33,7 @@ dependencies { tasks.withType { compilerOptions { freeCompilerArgs.add("-Xjsr305=strict") - jvmTarget = JvmTarget.JVM_21 + jvmTarget = JvmTarget.JVM_17 } } diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml index d82924e7..8566f59e 100644 --- a/config/checkstyle/checkstyle.xml +++ b/config/checkstyle/checkstyle.xml @@ -56,7 +56,7 @@ - + diff --git a/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql b/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql index b83dfa2d..832900dc 100644 --- a/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql +++ b/db-migrations/src/main/resources/db/changelog/sql/add_span_column.sql @@ -1,7 +1,7 @@ --liquibase formatted sql --changeset marina.zharinova:2025.08.31:add span column -alter table otel_demo.storage add column span_id varchar(64); +alter table otel_demo.storage add column span_id text; --changeset marina.zharinova:2025.08.31:comment on span_id comment on column otel_demo.storage.span_id is 'SpanId of operation'; diff --git a/db-migrations/src/main/resources/db/changelog/sql/storage.sql b/db-migrations/src/main/resources/db/changelog/sql/storage.sql index 1da0d4c9..feeefc8b 100644 --- a/db-migrations/src/main/resources/db/changelog/sql/storage.sql +++ b/db-migrations/src/main/resources/db/changelog/sql/storage.sql @@ -5,7 +5,7 @@ create table if not exists otel_demo.storage ( id bigint generated always as identity, message text not null, - trace_id varchar(64) not null unique, + trace_id text not null unique, created_at timestamptz not null ); diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt index 79f42a1e..978b8e44 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt @@ -12,6 +12,8 @@ import io.github.oshai.kotlinlogging.KotlinLogging import io.micrometer.tracing.Tracer import io.micrometer.tracing.propagation.Propagator import org.apache.kafka.clients.consumer.ConsumerRecord +import org.springframework.dao.DataAccessException +import org.springframework.kafka.KafkaException import org.springframework.kafka.annotation.KafkaListener import org.springframework.kafka.support.Acknowledgment import org.springframework.stereotype.Service @@ -44,14 +46,13 @@ class KafkaReadingService( ) fun listenAdditional(records: List>, ack: Acknowledgment) { val batchSpan = tracer.startScopedSpan("batch-processing") - logger.info { "current span: ${tracer.currentSpan()}" } try { logger.info { "Received from Kafka ${records.size} records" } records.forEach { record -> restoreContextAndProcessSingleRecordIfNeed(record) } ack.acknowledge() - } catch (e: Throwable) { + } catch (e: KafkaException) { batchSpan.error(e) throw e } finally { @@ -66,7 +67,7 @@ class KafkaReadingService( tracer.withSpan(spanFromRecord).use { dbSaver.processSingleRecord(record) } - } catch (e: Throwable) { + } catch (e: DataAccessException) { spanFromRecord.error(e) throw e } finally { diff --git a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml index 91d1b97e..c6146245 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml @@ -54,9 +54,6 @@ spring: sasl: mechanism: PLAIN jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="${demo.kafka.opentelemetry.username}" password="${demo.kafka.opentelemetry.password}"; - opentelemetry: - additional-topic: open.telemetry.sb3.queue.additional - additional-consumer-groupId: open.telemetry.sb3.queue.additional-group jdbc: template: query-timeout: 1s diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt index 6f818c76..abdb0ca5 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt @@ -91,15 +91,15 @@ class TimeControllerTest : TestBase() { assertThat(output.all) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\"") - val tracesFromDb = namedParameterJdbcTemplate - .query( - "select trace_id from otel_demo.storage where message like :message", - mapOf("message" to received.value()) - ) { rs, _ -> - rs.getString("trace_id") - } - assertThat(tracesFromDb.size).isEqualTo(2) - assertThat(tracesFromDb.stream().filter { it == traceId }).hasSize(2) + val messageFromDb = namedParameterJdbcTemplate.queryForList( + "select message from otel_demo.storage where trace_id = :traceId", + mapOf("traceId" to traceId), + String::class.java + ) + messageFromDb.forEach { + assertThat(it).isNotNull() + assertThat(it).isEqualTo(received.value()) + } } @Order(2) diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java index cb265cfc..518cb747 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeController.java @@ -35,14 +35,13 @@ public class TimeController { // http://localhost:8081/current-time @GetMapping(path = "/current-time") public Mono getNow() { - return Mono.just(tracer) - .map(tracer -> { - log.trace("tracer {}", tracer); - return Optional.ofNullable(tracer.currentSpan()) + log.trace("tracer {}", tracer); + return Mono.justOrEmpty( + Optional.ofNullable(tracer.currentSpan()) .map(Span::context) .map(TraceContext::traceId) - .orElse(null); - }) + .orElse(null) + ) .doOnNext(traceId -> log.info("Called method getNow. TraceId = {}", traceId)) .then(publicApiService.getZonedTime()) .defaultIfEmpty(LocalDateTime.now(clock)) diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java index 2e033c09..17985973 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java @@ -15,6 +15,8 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.springframework.dao.DataAccessException; +import org.springframework.kafka.KafkaException; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Service; @@ -47,14 +49,13 @@ public void listen(ConsumerRecord record, Acknowledgment ack) { ) public void listenAdditional(List> records, Acknowledgment ack) { final ScopedSpan batchSpan = tracer.startScopedSpan("batch-processing"); - log.info("current span: {}", tracer.currentSpan()); try { log.info( "Received from Kafka {} records", records.size() ); records.forEach(this::restoreContextAndProcessSingleRecordIfNeed); ack.acknowledge(); - } catch (Exception e) { + } catch (KafkaException e) { batchSpan.error(e); throw e; } finally { @@ -67,7 +68,7 @@ private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord tracesFromDb = namedParameterJdbcTemplate.query("select trace_id from otel_demo.storage where message like :message", - Map.of("message", received.value()), (rs, rowNum) -> rs.getString("trace_id")); - assertThat(tracesFromDb.size()).isEqualTo(2); - assertThat(tracesFromDb.stream().filter(it -> it.equals(traceId))).hasSize(1); + final List messageFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", + Map.of("traceId", traceId), String.class); + messageFromDb.forEach(it -> { + assertThat(it).isNotNull(); + assertThat(it).isEqualTo(received.value()); + }); } @Order(2) diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java index 87ce097d..6ad6e8a6 100644 --- a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java @@ -15,6 +15,8 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.springframework.dao.DataAccessException; +import org.springframework.kafka.KafkaException; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Service; @@ -47,14 +49,13 @@ public void listen(ConsumerRecord record, Acknowledgment ack) { ) public void listenAdditional(List> records, Acknowledgment ack) { final ScopedSpan batchSpan = tracer.startScopedSpan("batch-processing"); - log.info("current span: {}", tracer.currentSpan()); try { log.info( "Received from Kafka {} records", records.size() ); records.forEach(this::restoreContextAndProcessSingleRecordIfNeed); ack.acknowledge(); - } catch (Exception e) { + } catch (KafkaException e) { batchSpan.error(e); throw e; } finally { @@ -67,7 +68,7 @@ private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord received = consumerRecords.poll(10, TimeUnit.SECONDS); assertThat(received).isNotNull(); @@ -103,11 +102,12 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int assertThat(output.getAll()) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\""); - final List tracesFromDb = namedParameterJdbcTemplate.query("select trace_id from otel_demo.storage where message like :message", - Map.of("message", received.value()), (rs, rowNum) -> rs.getString("trace_id")); - assertThat(tracesFromDb) - .hasSize(2) - .containsOnly(traceId); + final List messageFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", + Map.of("traceId", traceId), String.class); + messageFromDb.forEach(it -> { + assertThat(it).isNotNull(); + assertThat(it).isEqualTo(received.value()); + }); } @Order(2) diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java deleted file mode 100644 index 32250ab4..00000000 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2020-2025. Ivan Vakhrushev and others. - * https://github.com/mfvanek/spring-boot-open-telemetry-demo - * - * Licensed under the Apache License 2.0 - */ - -package io.github.mfvanek.spring.boot3.test.service; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.github.mfvanek.spring.boot3.test.support.TestBase; -import io.micrometer.observation.Observation; -import io.micrometer.observation.ObservationRegistry; -import io.micrometer.tracing.Tracer; -import org.apache.kafka.clients.consumer.Consumer; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.header.Headers; -import org.apache.kafka.common.header.internals.RecordHeader; -import org.apache.kafka.common.record.TimestampType; -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.kafka.core.ConsumerFactory; -import org.springframework.kafka.support.Acknowledgment; -import org.springframework.test.context.bean.override.mockito.MockitoBean; - -import java.nio.charset.StandardCharsets; -import java.time.Duration; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyMap; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -class KafkaTracingTest extends TestBase { - - @MockitoBean - private ConsumerFactory consumerFactory; - @Autowired - private KafkaReadingService kafkaReadingService; - @Autowired - private Tracer tracer; - @Autowired - private ObservationRegistry observationRegistry; - - @Test - @SuppressFBWarnings(value = "PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS", justification = "Justification for suppressing this warning.") - void errorSpanWhenListenerFails() { - try (Consumer mockConsumer = mock(Consumer.class)) { - when(consumerFactory.createConsumer()).thenReturn(mockConsumer); - when(mockConsumer.poll(any(Duration.class))).thenReturn(new ConsumerRecords<>(Map.of( - new TopicPartition("test-topic", 0), - List.of(createTestConsumerRecord("test-message")) - ))); - doThrow(new RuntimeException("Commit failed")) - .when(mockConsumer) - .commitSync(anyMap()); - final Acknowledgment mockAck = mock(Acknowledgment.class); - doAnswer(invocation -> { - mockConsumer.commitSync(); - return null; - }).when(mockAck).acknowledge(); - - final AtomicReference thrownException = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - Observation.createNotStarted("test", observationRegistry).observe(() -> { - try { - kafkaReadingService.listenAdditional(List.of(createTestConsumerRecord("test-message")), mockAck); - } catch (Exception e) { - thrownException.set(e); - } finally { - latch.countDown(); - } - assertThat(Objects.requireNonNull(tracer.currentSpan()).error(thrownException.get())).isNotNull(); - }); - assertThat(thrownException.get()).isNotNull(); - assertThat(thrownException.get().getMessage()).contains("Cannot invoke " + - "\"org.apache.kafka.common.header.Header.value()\" because the return value of " + - "\"org.apache.kafka.common.header.Headers.lastHeader(String)\" is null"); - } - } - - private ConsumerRecord createTestConsumerRecord(String value) { - final Headers headers = new org.apache.kafka.common.header.internals.RecordHeaders(); - headers.add(new RecordHeader("header", "1".getBytes(StandardCharsets.UTF_8))); - - return new ConsumerRecord<>( - "test-topic", - 0, - 0L, - System.currentTimeMillis(), - TimestampType.CREATE_TIME, - 0L, - 0, - 0, - UUID.randomUUID(), - value, - headers, - Optional.empty() - ); - } -} From dd6800f71877706349b2533668899c9f0fb98c4b Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Fri, 26 Sep 2025 19:48:56 +0500 Subject: [PATCH 07/13] fix changed java version --- buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts | 4 ++-- .../src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts b/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts index bcdd3637..1244b437 100644 --- a/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts +++ b/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts @@ -24,8 +24,8 @@ dependencies { } java { - sourceCompatibility = JavaVersion.VERSION_17 - targetCompatibility = JavaVersion.VERSION_17 + sourceCompatibility = JavaVersion.VERSION_21 + targetCompatibility = JavaVersion.VERSION_21 withSourcesJar() } diff --git a/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts b/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts index 99a8fd53..93f1743e 100644 --- a/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts +++ b/buildSrc/src/main/kotlin/sb-ot-demo.kotlin-conventions.gradle.kts @@ -33,7 +33,7 @@ dependencies { tasks.withType { compilerOptions { freeCompilerArgs.add("-Xjsr305=strict") - jvmTarget = JvmTarget.JVM_17 + jvmTarget = JvmTarget.JVM_21 } } From 43680298f4b54eccba05c6c0b4ec6bdd53b12b46 Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Mon, 29 Sep 2025 11:54:38 +0500 Subject: [PATCH 08/13] fix after review --- .../kotlin/test/service/KafkaReadingService.kt | 16 ++++++++-------- .../test/controllers/TimeControllerTest.kt | 1 - .../kotlin/test/service/PublicApiServiceTest.kt | 2 +- .../reactive/service/KafkaReadingService.java | 16 ++++++++-------- .../reactive/controllers/TimeControllerTest.java | 5 +---- .../reactive/service/PublicApiServiceTest.java | 2 +- .../boot3/test/service/KafkaReadingService.java | 16 ++++++++-------- .../test/controllers/TimeControllerTest.java | 5 +---- .../boot3/test/service/PublicApiServiceTest.java | 2 +- 9 files changed, 29 insertions(+), 36 deletions(-) diff --git a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt index 978b8e44..b0faa81d 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt +++ b/spring-boot-3-demo-app-kotlin/src/main/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingService.kt @@ -12,8 +12,6 @@ import io.github.oshai.kotlinlogging.KotlinLogging import io.micrometer.tracing.Tracer import io.micrometer.tracing.propagation.Propagator import org.apache.kafka.clients.consumer.ConsumerRecord -import org.springframework.dao.DataAccessException -import org.springframework.kafka.KafkaException import org.springframework.kafka.annotation.KafkaListener import org.springframework.kafka.support.Acknowledgment import org.springframework.stereotype.Service @@ -39,6 +37,7 @@ class KafkaReadingService( ack.acknowledge() } + @SuppressWarnings("IllegalCatch", "PMD.AvoidCatchingThrowable") @KafkaListener( id = "\${spring.kafka.consumer.additional-groupId}", topics = ["\${spring.kafka.template.additional-topic}"], @@ -52,14 +51,15 @@ class KafkaReadingService( } records.forEach { record -> restoreContextAndProcessSingleRecordIfNeed(record) } ack.acknowledge() - } catch (e: KafkaException) { - batchSpan.error(e) - throw e + } catch (throwable: Throwable) { + batchSpan.error(throwable) + throw throwable } finally { batchSpan.end() } } + @SuppressWarnings("IllegalCatch", "PMD.AvoidCatchingThrowable") private fun restoreContextAndProcessSingleRecordIfNeed(record: ConsumerRecord) { val builder = propagator.extract(record, KafkaHeadersGetter) val spanFromRecord = builder.name("processing-record-from-kafka").start() @@ -67,9 +67,9 @@ class KafkaReadingService( tracer.withSpan(spanFromRecord).use { dbSaver.processSingleRecord(record) } - } catch (e: DataAccessException) { - spanFromRecord.error(e) - throw e + } catch (throwable: Throwable) { + spanFromRecord.error(throwable) + throw throwable } finally { spanFromRecord.end() } diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt index abdb0ca5..08f8c6ad 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt @@ -97,7 +97,6 @@ class TimeControllerTest : TestBase() { String::class.java ) messageFromDb.forEach { - assertThat(it).isNotNull() assertThat(it).isEqualTo(received.value()) } } diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/PublicApiServiceTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/PublicApiServiceTest.kt index 2dcf060b..2c8942dc 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/PublicApiServiceTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/PublicApiServiceTest.kt @@ -99,7 +99,7 @@ class PublicApiServiceTest : TestBase() { } @Test - fun throwsJsonProcessingExceptionWithBdResponse(output: CapturedOutput) { + fun throwsJsonProcessingExceptionWithBadResponse(output: CapturedOutput) { stubBadResponse() Observation.createNotStarted("test", observationRegistry).observe { val result = publicApiService.getZonedTime() diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java index 17985973..8b3f19ac 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingService.java @@ -15,8 +15,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.springframework.dao.DataAccessException; -import org.springframework.kafka.KafkaException; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Service; @@ -42,6 +40,7 @@ public void listen(ConsumerRecord record, Acknowledgment ack) { ack.acknowledge(); } + @SuppressWarnings({"IllegalCatch", "PMD.AvoidCatchingThrowable"}) @KafkaListener( id = "${spring.kafka.consumer.additional-groupId}", topics = "${spring.kafka.template.additional-topic}", @@ -55,22 +54,23 @@ public void listenAdditional(List> records, Acknowl ); records.forEach(this::restoreContextAndProcessSingleRecordIfNeed); ack.acknowledge(); - } catch (KafkaException e) { - batchSpan.error(e); - throw e; + } catch (Throwable throwable) { + batchSpan.error(throwable); + throw throwable; } finally { batchSpan.end(); } } + @SuppressWarnings({"IllegalCatch", "PMD.AvoidCatchingThrowable"}) private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record) { final Span.Builder builder = propagator.extract(record, KAFKA_PROPAGATOR_GETTER); final Span spanFromRecord = builder.name("processing-record-from-kafka").start(); try (Tracer.SpanInScope ignored = tracer.withSpan(spanFromRecord)) { dbSaver.processSingleRecord(record); - } catch (DataAccessException e) { - spanFromRecord.error(e); - throw e; + } catch (Throwable throwable) { + spanFromRecord.error(throwable); + throw throwable; } finally { spanFromRecord.end(); } diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java index 2e6d00ea..e79eb0df 100644 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java @@ -103,10 +103,7 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int .contains("\"tenant.name\":\"ru-a1-private\""); final List messageFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", Map.of("traceId", traceId), String.class); - messageFromDb.forEach(it -> { - assertThat(it).isNotNull(); - assertThat(it).isEqualTo(received.value()); - }); + messageFromDb.forEach(it -> assertThat(it).isEqualTo(received.value())); } @Order(2) diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/PublicApiServiceTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/PublicApiServiceTest.java index 51cb25c7..1684c0ef 100644 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/PublicApiServiceTest.java +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/PublicApiServiceTest.java @@ -98,7 +98,7 @@ void emptyResponseWhen500StatusWithStepVerifier() { } @Test - void emptyResponseWhen200StatusWithBadResposeWithStepVerifier(@Nonnull final CapturedOutput output) { + void emptyResponseWhen200StatusWithBadResponseWithStepVerifier(@Nonnull final CapturedOutput output) { stubOkButNotCorrectResponse(); StepVerifier.create(publicApiService.getZonedTime()) diff --git a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java index 6ad6e8a6..ef3a78c2 100644 --- a/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java +++ b/spring-boot-3-demo-app/src/main/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingService.java @@ -15,8 +15,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.springframework.dao.DataAccessException; -import org.springframework.kafka.KafkaException; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Service; @@ -42,6 +40,7 @@ public void listen(ConsumerRecord record, Acknowledgment ack) { ack.acknowledge(); } + @SuppressWarnings({"IllegalCatch", "PMD.AvoidCatchingThrowable"}) @KafkaListener( id = "${spring.kafka.consumer.additional-groupId}", topics = "${spring.kafka.template.additional-topic}", @@ -55,22 +54,23 @@ public void listenAdditional(List> records, Acknowl ); records.forEach(this::restoreContextAndProcessSingleRecordIfNeed); ack.acknowledge(); - } catch (KafkaException e) { - batchSpan.error(e); - throw e; + } catch (Throwable throwable) { + batchSpan.error(throwable); + throw throwable; } finally { batchSpan.end(); } } + @SuppressWarnings({"IllegalCatch", "PMD.AvoidCatchingThrowable"}) private void restoreContextAndProcessSingleRecordIfNeed(ConsumerRecord record) { final Span.Builder builder = propagator.extract(record, KAFKA_PROPAGATOR_GETTER); final Span spanFromRecord = builder.name("processing-record-from-kafka").start(); try (Tracer.SpanInScope ignored = tracer.withSpan(spanFromRecord)) { dbSaver.processSingleRecord(record); - } catch (DataAccessException e) { - spanFromRecord.error(e); - throw e; + } catch (Throwable throwable) { + spanFromRecord.error(throwable); + throw throwable; } finally { spanFromRecord.end(); } diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java index d6be973e..c613f0d3 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java @@ -104,10 +104,7 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int .contains("\"tenant.name\":\"ru-a1-private\""); final List messageFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", Map.of("traceId", traceId), String.class); - messageFromDb.forEach(it -> { - assertThat(it).isNotNull(); - assertThat(it).isEqualTo(received.value()); - }); + messageFromDb.forEach(it -> assertThat(it).isEqualTo(received.value())); } @Order(2) diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java index 247f9b27..a4f61f24 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java @@ -91,7 +91,7 @@ void throwsJsonProcessingExceptionWithBdResponse(CapturedOutput output) { Observation.createNotStarted("test", observationRegistry).observe(() -> { final LocalDateTime result = publicApiService.getZonedTime(); assertThat(result).isNull(); - assertThat(Objects.requireNonNull(tracer.currentSpan()).context().traceId()).isNotNull(); + assertThat(tracer.currentSpan().context().traceId()).isNotNull(); assertThat(output.getAll()).contains("Failed to convert response"); }); verify(1, getRequestedFor(urlPathMatching("/" + zoneName))); From 767234f858023e4d6d94188d4f46a1d82db18d6b Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Tue, 30 Sep 2025 18:21:47 +0500 Subject: [PATCH 09/13] rewrite asserts without warnings --- .../spring/boot3/test/service/PublicApiServiceTest.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java index a4f61f24..d5a5b64f 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/PublicApiServiceTest.java @@ -11,6 +11,7 @@ import io.github.mfvanek.spring.boot3.test.support.TestBase; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.Span; import io.micrometer.tracing.Tracer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -86,12 +87,15 @@ void retriesOnceToGetZonedTime(@Nonnull final CapturedOutput output) { } @Test - void throwsJsonProcessingExceptionWithBdResponse(CapturedOutput output) { + void throwsJsonProcessingExceptionWithBadResponse(CapturedOutput output) { final String zoneName = stubBadResponse(); Observation.createNotStarted("test", observationRegistry).observe(() -> { final LocalDateTime result = publicApiService.getZonedTime(); + final Span currentSpan = tracer.currentSpan(); + + assert currentSpan != null; assertThat(result).isNull(); - assertThat(tracer.currentSpan().context().traceId()).isNotNull(); + assertThat(currentSpan.context().traceId()).isNotNull(); assertThat(output.getAll()).contains("Failed to convert response"); }); verify(1, getRequestedFor(urlPathMatching("/" + zoneName))); From 3a3e3e62c8df4b8243c35cffdc8f467e93323db3 Mon Sep 17 00:00:00 2001 From: "m.zharinova" Date: Fri, 3 Oct 2025 11:36:57 +0500 Subject: [PATCH 10/13] add test for KafkaReadingService --- .../test/service/KafkaReadingServiceTest.kt | 80 +++++++++++++++++ .../service/KafkaReadingServiceTest.java | 90 +++++++++++++++++++ .../test/service/KafkaReadingServiceTest.java | 90 +++++++++++++++++++ 3 files changed, 260 insertions(+) create mode 100644 spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt create mode 100644 spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java create mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt new file mode 100644 index 00000000..f2c234d0 --- /dev/null +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.kotlin.test.service + +import io.github.mfvanek.db.migrations.common.saver.DbSaver +import io.micrometer.tracing.ScopedSpan +import io.micrometer.tracing.Span +import io.micrometer.tracing.Tracer +import io.micrometer.tracing.Tracer.SpanInScope +import io.micrometer.tracing.propagation.Propagator +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.header.Headers +import org.assertj.core.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.Answers +import org.mockito.ArgumentMatchers +import org.mockito.Mock +import org.mockito.Mockito +import org.mockito.junit.jupiter.MockitoExtension +import org.springframework.kafka.support.Acknowledgment +import java.util.UUID + +@ExtendWith(MockitoExtension::class) +internal class KafkaReadingServiceTest { + @Mock + private lateinit var tracer: Tracer + + @Mock + private lateinit var propagator: Propagator + + @Mock + private lateinit var dbSaver: DbSaver + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private lateinit var record: ConsumerRecord + + @Mock + private lateinit var acknowledgment: Acknowledgment + + private lateinit var kafkaReadingService: KafkaReadingService + + @Test + fun listenAdditionalShouldEndBatchSpanEvenOnException() { + val headers = Mockito.mock(Headers::class.java) + + Mockito.`when`(record.headers()).thenReturn(headers) + + val spanBuilder = Mockito.mock(Span.Builder::class.java) + Mockito.`when`(propagator.extract(ArgumentMatchers.any(), ArgumentMatchers.any>())) + .thenReturn(spanBuilder) + + val spanFromRecord = Mockito.mock(Span::class.java) + Mockito.`when`(spanBuilder.name("processing-record-from-kafka")).thenReturn(spanBuilder) + Mockito.`when`(spanBuilder.start()).thenReturn(spanFromRecord) + + val spanInScope = Mockito.mock(SpanInScope::class.java) + Mockito.`when`(tracer.withSpan(spanFromRecord)).thenReturn(spanInScope) + val batchSpan = Mockito.mock(ScopedSpan::class.java) + Mockito.`when`(tracer.startScopedSpan("batch-processing")).thenReturn(batchSpan) + + kafkaReadingService = KafkaReadingService(tracer, propagator, dbSaver) + val testException = RuntimeException("DB error") + Mockito.doThrow(testException).`when`(dbSaver)?.processSingleRecord(record) + + val records = listOf(record) + + Assertions.assertThatThrownBy { kafkaReadingService.listenAdditional(records, acknowledgment) } + .isSameAs(testException) + Mockito.verify(tracer).startScopedSpan("batch-processing") + Mockito.verify(dbSaver).processSingleRecord(record) + Mockito.verify(acknowledgment, Mockito.never())?.acknowledge() + Mockito.verify(spanInScope).close() + } +} diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java new file mode 100644 index 00000000..6bda9f4d --- /dev/null +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.reactive.service; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.github.mfvanek.db.migrations.common.saver.DbSaver; +import io.micrometer.tracing.ScopedSpan; +import io.micrometer.tracing.Span; +import io.micrometer.tracing.Tracer; +import io.micrometer.tracing.Tracer.SpanInScope; +import io.micrometer.tracing.propagation.Propagator; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.header.Headers; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.kafka.support.Acknowledgment; + +import java.util.List; +import java.util.UUID; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class KafkaReadingServiceTest { + + @Mock + private Tracer tracer; + + @Mock + private Propagator propagator; + + @Mock + private DbSaver dbSaver; + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private ConsumerRecord record; + + @Mock + private Acknowledgment acknowledgment; + + private KafkaReadingService kafkaReadingService; + + @Test + @SuppressWarnings("PMD.CloseResource") + @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")// to suppress warning when calling propagator.extract(any(), any()) + void listenAdditionalShouldEndBatchSpanEvenOnException() { + final Headers headers = mock(Headers.class); + + when(record.headers()).thenReturn(headers); + + final Span.Builder spanBuilder = mock(Span.Builder.class); + when(propagator.extract(any(), any())).thenReturn(spanBuilder); + + final Span spanFromRecord = mock(Span.class); + when(spanBuilder.name("processing-record-from-kafka")).thenReturn(spanBuilder); + when(spanBuilder.start()).thenReturn(spanFromRecord); + + final SpanInScope spanInScope = mock(SpanInScope.class); + when(tracer.withSpan(spanFromRecord)).thenReturn(spanInScope); + final ScopedSpan batchSpan = mock(ScopedSpan.class); + when(tracer.startScopedSpan("batch-processing")).thenReturn(batchSpan); + + kafkaReadingService = new KafkaReadingService(tracer, propagator, dbSaver); + final RuntimeException testException = new RuntimeException("DB error"); + doThrow(testException).when(dbSaver).processSingleRecord(record); + + final List> records = List.of(record); + + assertThatThrownBy(() -> kafkaReadingService.listenAdditional(records, acknowledgment)) + .isSameAs(testException); + verify(tracer).startScopedSpan("batch-processing"); + verify(dbSaver).processSingleRecord(record); + verify(acknowledgment, never()).acknowledge(); + verify(spanInScope).close(); + } +} diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java new file mode 100644 index 00000000..0737fda9 --- /dev/null +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.test.service; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.github.mfvanek.db.migrations.common.saver.DbSaver; +import io.micrometer.tracing.ScopedSpan; +import io.micrometer.tracing.Span; +import io.micrometer.tracing.Tracer; +import io.micrometer.tracing.Tracer.SpanInScope; +import io.micrometer.tracing.propagation.Propagator; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.header.Headers; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.kafka.support.Acknowledgment; + +import java.util.List; +import java.util.UUID; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class KafkaReadingServiceTest { + + @Mock + private Tracer tracer; + + @Mock + private Propagator propagator; + + @Mock + private DbSaver dbSaver; + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private ConsumerRecord record; + + @Mock + private Acknowledgment acknowledgment; + + private KafkaReadingService kafkaReadingService; + + @Test + @SuppressWarnings("PMD.CloseResource") + @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")// to suppress warning when calling propagator.extract(any(), any()) + void listenAdditionalShouldEndBatchSpanEvenOnException() { + final Headers headers = mock(Headers.class); + + when(record.headers()).thenReturn(headers); + + final Span.Builder spanBuilder = mock(Span.Builder.class); + when(propagator.extract(any(), any())).thenReturn(spanBuilder); + + final Span spanFromRecord = mock(Span.class); + when(spanBuilder.name("processing-record-from-kafka")).thenReturn(spanBuilder); + when(spanBuilder.start()).thenReturn(spanFromRecord); + + final SpanInScope spanInScope = mock(SpanInScope.class); + when(tracer.withSpan(spanFromRecord)).thenReturn(spanInScope); + final ScopedSpan batchSpan = mock(ScopedSpan.class); + when(tracer.startScopedSpan("batch-processing")).thenReturn(batchSpan); + + kafkaReadingService = new KafkaReadingService(tracer, propagator, dbSaver); + final RuntimeException testException = new RuntimeException("DB error"); + doThrow(testException).when(dbSaver).processSingleRecord(record); + + final List> records = List.of(record); + + assertThatThrownBy(() -> kafkaReadingService.listenAdditional(records, acknowledgment)) + .isSameAs(testException); + verify(tracer).startScopedSpan("batch-processing"); + verify(dbSaver).processSingleRecord(record); + verify(acknowledgment, never()).acknowledge(); + verify(spanInScope).close(); + } +} From dad70c025276c504b17ec980398a5e05c95892b9 Mon Sep 17 00:00:00 2001 From: Marina Zharinova Date: Mon, 13 Oct 2025 21:04:51 +0500 Subject: [PATCH 11/13] add new tracing test for kafka consumer --- .../kotlin/sb-ot-demo.java-compile.gradle.kts | 1 + .../test/controllers/TimeControllerTest.kt | 9 +- .../test/service/KafkaReadingServiceTest.kt | 80 ----------- .../kotlin/test/service/KafkaTracingTest.kt | 134 +++++++++++++++++ .../test/support/SpanExporterConfiguration.kt | 35 +++++ .../TraceIdInResponseReactiveFilter.java | 2 +- .../controllers/TimeControllerTest.java | 7 +- .../service/KafkaReadingServiceTest.java | 90 ------------ .../reactive/service/KafkaTracingTest.java | 136 ++++++++++++++++++ .../support/SpanExporterConfiguration.java | 43 ++++++ .../test/controllers/TimeControllerTest.java | 10 +- .../test/service/KafkaReadingServiceTest.java | 90 ------------ .../boot3/test/service/KafkaTracingTest.java | 136 ++++++++++++++++++ .../support/SpanExporterConfiguration.java | 43 ++++++ 14 files changed, 546 insertions(+), 270 deletions(-) delete mode 100644 spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt create mode 100644 spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt create mode 100644 spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/support/SpanExporterConfiguration.kt delete mode 100644 spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java create mode 100644 spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java create mode 100644 spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/support/SpanExporterConfiguration.java delete mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java create mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java create mode 100644 spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/SpanExporterConfiguration.java diff --git a/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts b/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts index 1244b437..2c481f33 100644 --- a/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts +++ b/buildSrc/src/main/kotlin/sb-ot-demo.java-compile.gradle.kts @@ -21,6 +21,7 @@ dependencies { if (osdetector.arch == "aarch_64") { testImplementation("io.netty:netty-all:4.1.104.Final") } + testImplementation("io.opentelemetry:opentelemetry-sdk-testing") } java { diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt index 08f8c6ad..913b284e 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt @@ -4,6 +4,7 @@ import io.github.mfvanek.spring.boot3.kotlin.test.filters.TraceIdInResponseServl import io.github.mfvanek.spring.boot3.kotlin.test.service.dto.toParsedDateTime import io.github.mfvanek.spring.boot3.kotlin.test.support.KafkaInitializer import io.github.mfvanek.spring.boot3.kotlin.test.support.TestBase +import io.opentelemetry.api.GlobalOpenTelemetry import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord @@ -49,6 +50,7 @@ class TimeControllerTest : TestBase() { @BeforeAll fun setUpKafkaConsumer() { + GlobalOpenTelemetry.resetForTest() container = setUpKafkaConsumer(kafkaProperties, consumerRecords) } @@ -91,12 +93,13 @@ class TimeControllerTest : TestBase() { assertThat(output.all) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\"") - val messageFromDb = namedParameterJdbcTemplate.queryForList( + val messagesFromDb = namedParameterJdbcTemplate.queryForList( "select message from otel_demo.storage where trace_id = :traceId", mapOf("traceId" to traceId), String::class.java ) - messageFromDb.forEach { + assertThat(messagesFromDb.size).isEqualTo(2) + messagesFromDb.forEach { assertThat(it).isEqualTo(received.value()) } } @@ -185,7 +188,7 @@ class TimeControllerTest : TestBase() { } } -private fun setUpKafkaConsumer(kafkaProperties: KafkaProperties, consumerRecords: BlockingQueue>): KafkaMessageListenerContainer { +fun setUpKafkaConsumer(kafkaProperties: KafkaProperties, consumerRecords: BlockingQueue>): KafkaMessageListenerContainer { val containerProperties = ContainerProperties(kafkaProperties.template.defaultTopic) val consumerProperties = KafkaTestUtils.consumerProps(KafkaInitializer.getBootstrapSevers(), "test-group", "false") consumerProperties[CommonClientConfigs.SECURITY_PROTOCOL_CONFIG] = "SASL_PLAINTEXT" diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt deleted file mode 100644 index f2c234d0..00000000 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaReadingServiceTest.kt +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2020-2025. Ivan Vakhrushev and others. - * https://github.com/mfvanek/spring-boot-open-telemetry-demo - * - * Licensed under the Apache License 2.0 - */ - -package io.github.mfvanek.spring.boot3.kotlin.test.service - -import io.github.mfvanek.db.migrations.common.saver.DbSaver -import io.micrometer.tracing.ScopedSpan -import io.micrometer.tracing.Span -import io.micrometer.tracing.Tracer -import io.micrometer.tracing.Tracer.SpanInScope -import io.micrometer.tracing.propagation.Propagator -import org.apache.kafka.clients.consumer.ConsumerRecord -import org.apache.kafka.common.header.Headers -import org.assertj.core.api.Assertions -import org.junit.jupiter.api.Test -import org.junit.jupiter.api.extension.ExtendWith -import org.mockito.Answers -import org.mockito.ArgumentMatchers -import org.mockito.Mock -import org.mockito.Mockito -import org.mockito.junit.jupiter.MockitoExtension -import org.springframework.kafka.support.Acknowledgment -import java.util.UUID - -@ExtendWith(MockitoExtension::class) -internal class KafkaReadingServiceTest { - @Mock - private lateinit var tracer: Tracer - - @Mock - private lateinit var propagator: Propagator - - @Mock - private lateinit var dbSaver: DbSaver - - @Mock(answer = Answers.RETURNS_DEEP_STUBS) - private lateinit var record: ConsumerRecord - - @Mock - private lateinit var acknowledgment: Acknowledgment - - private lateinit var kafkaReadingService: KafkaReadingService - - @Test - fun listenAdditionalShouldEndBatchSpanEvenOnException() { - val headers = Mockito.mock(Headers::class.java) - - Mockito.`when`(record.headers()).thenReturn(headers) - - val spanBuilder = Mockito.mock(Span.Builder::class.java) - Mockito.`when`(propagator.extract(ArgumentMatchers.any(), ArgumentMatchers.any>())) - .thenReturn(spanBuilder) - - val spanFromRecord = Mockito.mock(Span::class.java) - Mockito.`when`(spanBuilder.name("processing-record-from-kafka")).thenReturn(spanBuilder) - Mockito.`when`(spanBuilder.start()).thenReturn(spanFromRecord) - - val spanInScope = Mockito.mock(SpanInScope::class.java) - Mockito.`when`(tracer.withSpan(spanFromRecord)).thenReturn(spanInScope) - val batchSpan = Mockito.mock(ScopedSpan::class.java) - Mockito.`when`(tracer.startScopedSpan("batch-processing")).thenReturn(batchSpan) - - kafkaReadingService = KafkaReadingService(tracer, propagator, dbSaver) - val testException = RuntimeException("DB error") - Mockito.doThrow(testException).`when`(dbSaver)?.processSingleRecord(record) - - val records = listOf(record) - - Assertions.assertThatThrownBy { kafkaReadingService.listenAdditional(records, acknowledgment) } - .isSameAs(testException) - Mockito.verify(tracer).startScopedSpan("batch-processing") - Mockito.verify(dbSaver).processSingleRecord(record) - Mockito.verify(acknowledgment, Mockito.never())?.acknowledge() - Mockito.verify(spanInScope).close() - } -} diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt new file mode 100644 index 00000000..d942b2b3 --- /dev/null +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt @@ -0,0 +1,134 @@ +/* +* Copyright (c) 2020-2025. Ivan Vakhrushev and others. +* https://github.com/mfvanek/spring-boot-open-telemetry-demo +* +* Licensed under the Apache License 2.0 +*/ + +package io.github.mfvanek.spring.boot3.kotlin.test.service + +import com.fasterxml.jackson.databind.ObjectMapper +import com.github.tomakehurst.wiremock.client.WireMock +import io.github.mfvanek.db.migrations.common.saver.DbSaver +import io.github.mfvanek.spring.boot3.kotlin.test.filters.TraceIdInResponseServletFilter.Companion.TRACE_ID_HEADER_NAME +import io.github.mfvanek.spring.boot3.kotlin.test.service.dto.CurrentTime +import io.github.mfvanek.spring.boot3.kotlin.test.service.dto.ParsedDateTime +import io.github.mfvanek.spring.boot3.kotlin.test.service.dto.toParsedDateTime +import io.github.mfvanek.spring.boot3.kotlin.test.support.JaegerInitializer +import io.github.mfvanek.spring.boot3.kotlin.test.support.KafkaInitializer +import io.github.mfvanek.spring.boot3.kotlin.test.support.PostgresInitializer +import io.github.mfvanek.spring.boot3.kotlin.test.support.SpanExporterConfiguration +import io.opentelemetry.api.GlobalOpenTelemetry +import io.opentelemetry.api.trace.StatusCode +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter +import io.opentelemetry.sdk.trace.data.StatusData +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.assertj.core.api.Assertions.assertThat +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.TestInstance +import org.mockito.ArgumentMatchers +import org.mockito.Mockito +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.autoconfigure.kafka.KafkaProperties +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.cloud.contract.wiremock.AutoConfigureWireMock +import org.springframework.kafka.listener.KafkaMessageListenerContainer +import org.springframework.test.context.ActiveProfiles +import org.springframework.test.context.ContextConfiguration +import org.springframework.test.context.bean.override.mockito.MockitoBean +import org.springframework.test.web.reactive.server.WebTestClient +import org.springframework.web.util.UriBuilder +import java.time.Clock +import java.time.LocalDateTime +import java.util.* +import java.util.concurrent.ArrayBlockingQueue +import java.util.function.Function + +@ActiveProfiles("test") +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@ContextConfiguration( + classes = [SpanExporterConfiguration::class], + initializers = [KafkaInitializer::class, JaegerInitializer::class, PostgresInitializer::class] +) +@AutoConfigureWireMock(port = 0) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +internal open class KafkaTracingTest { + @Autowired + private lateinit var webTestClient: WebTestClient + + @Autowired + private lateinit var objectMapper: ObjectMapper + + @Autowired + private lateinit var clock: Clock + + @Autowired + private lateinit var spanExporter: InMemorySpanExporter + + @MockitoBean + private lateinit var dbSaver: DbSaver + + @Autowired + private lateinit var kafkaProperties: KafkaProperties + private lateinit var container: KafkaMessageListenerContainer + private val consumerRecords = ArrayBlockingQueue>(4) + + @BeforeAll + fun setUpKafkaConsumerAndResetTelemetry() { + GlobalOpenTelemetry.resetForTest() + container = io.github.mfvanek.spring.boot3.kotlin.test.controllers.setUpKafkaConsumer(kafkaProperties, consumerRecords) + } + + @AfterAll + fun tearDownKafkaConsumer() { + container.stop() + } + + @Test + fun closeAllSpansWhenException() { + val testException: Exception = RuntimeException("saving failed") + Mockito.doThrow( + testException + ).`when`(dbSaver).processSingleRecord(ArgumentMatchers.any>()) + stubOkResponse((LocalDateTime.now(clock).minusDays(1)).toParsedDateTime()) + + val result = webTestClient.get() + .uri( + Function { uriBuilder: UriBuilder? -> + uriBuilder!!.path("current-time") + .build() + } + ) + .exchange() + .expectStatus().isOk() + .expectHeader().exists(TRACE_ID_HEADER_NAME) + .expectBody(LocalDateTime::class.java) + .returnResult() + val traceId = result.responseHeaders.getFirst(TRACE_ID_HEADER_NAME) + val finishedSpans = spanExporter.finishedSpanItems + + assertThat(finishedSpans.map { it.traceId }).contains(traceId) + assertThat(finishedSpans.map { it.status }).contains(StatusData.create(StatusCode.ERROR, "saving failed")) + assertThat(finishedSpans.map { it.name }).contains("processing-record-from-kafka") + } + + private fun stubOkResponse(parsedDateTime: ParsedDateTime): String { + val zoneName = TimeZone.getDefault().id + stubOkResponse(zoneName, parsedDateTime) + return zoneName + } + + private fun stubOkResponse(zoneName: String, parsedDateTime: ParsedDateTime) { + val currentTime = CurrentTime(parsedDateTime) + WireMock.stubFor( + WireMock.get(WireMock.urlPathMatching("/$zoneName")) + .willReturn( + WireMock.aResponse() + .withStatus(200) + .withBody(objectMapper.writeValueAsString(currentTime)) + ) + ) + } +} diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/support/SpanExporterConfiguration.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/support/SpanExporterConfiguration.kt new file mode 100644 index 00000000..3e8ad45a --- /dev/null +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/support/SpanExporterConfiguration.kt @@ -0,0 +1,35 @@ +package io.github.mfvanek.spring.boot3.kotlin.test.support + +import io.opentelemetry.sdk.OpenTelemetrySdk +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter +import io.opentelemetry.sdk.trace.SdkTracerProvider +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor +import io.opentelemetry.sdk.trace.export.SpanExporter +import org.springframework.boot.test.context.TestConfiguration +import org.springframework.context.annotation.Bean +import org.springframework.context.annotation.Primary + +@TestConfiguration +class SpanExporterConfiguration { + @Bean + @Primary + fun spanExporter(): SpanExporter { + return InMemorySpanExporter.create() + } + + @Bean + @Primary + fun tracerProvider(spanExporter: SpanExporter): SdkTracerProvider { + return SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build() + } + + @Bean + @Primary + fun openTelemetrySdk(tracerProvider: SdkTracerProvider): OpenTelemetrySdk { + return OpenTelemetrySdk.builder() + .setTracerProvider(tracerProvider) + .buildAndRegisterGlobal() + } +} diff --git a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/filters/TraceIdInResponseReactiveFilter.java b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/filters/TraceIdInResponseReactiveFilter.java index 07b50847..92e54eeb 100644 --- a/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/filters/TraceIdInResponseReactiveFilter.java +++ b/spring-boot-3-demo-app-reactive/src/main/java/io/github/mfvanek/spring/boot3/reactive/filters/TraceIdInResponseReactiveFilter.java @@ -22,8 +22,8 @@ @RequiredArgsConstructor public class TraceIdInResponseReactiveFilter implements WebFilter { + public static final String TRACE_ID_HEADER_NAME = "X-Trace-Id"; private static final Logger LOGGER = LoggerFactory.getLogger(TraceIdInResponseReactiveFilter.class); - private static final String TRACE_ID_HEADER_NAME = "X-Trace-Id"; @Override public Mono filter(ServerWebExchange exchange, WebFilterChain chain) { diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java index e79eb0df..d2f2a3f8 100644 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java @@ -10,6 +10,7 @@ import io.github.mfvanek.spring.boot3.reactive.service.dto.ParsedDateTime; import io.github.mfvanek.spring.boot3.reactive.support.KafkaConsumerUtils; import io.github.mfvanek.spring.boot3.reactive.support.TestBase; +import io.opentelemetry.api.GlobalOpenTelemetry; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.header.Header; import org.awaitility.Awaitility; @@ -56,6 +57,7 @@ class TimeControllerTest extends TestBase { @BeforeAll void setUpKafkaConsumer() { + GlobalOpenTelemetry.resetForTest(); container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); } @@ -101,9 +103,10 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int assertThat(output.getAll()) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\""); - final List messageFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", + final List messagesFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", Map.of("traceId", traceId), String.class); - messageFromDb.forEach(it -> assertThat(it).isEqualTo(received.value())); + assertThat(messagesFromDb.size()).isEqualTo(2); + messagesFromDb.forEach(it -> assertThat(it).isEqualTo(received.value())); } @Order(2) diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java deleted file mode 100644 index 6bda9f4d..00000000 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaReadingServiceTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2020-2025. Ivan Vakhrushev and others. - * https://github.com/mfvanek/spring-boot-open-telemetry-demo - * - * Licensed under the Apache License 2.0 - */ - -package io.github.mfvanek.spring.boot3.reactive.service; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.github.mfvanek.db.migrations.common.saver.DbSaver; -import io.micrometer.tracing.ScopedSpan; -import io.micrometer.tracing.Span; -import io.micrometer.tracing.Tracer; -import io.micrometer.tracing.Tracer.SpanInScope; -import io.micrometer.tracing.propagation.Propagator; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.common.header.Headers; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Answers; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.springframework.kafka.support.Acknowledgment; - -import java.util.List; -import java.util.UUID; - -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -class KafkaReadingServiceTest { - - @Mock - private Tracer tracer; - - @Mock - private Propagator propagator; - - @Mock - private DbSaver dbSaver; - - @Mock(answer = Answers.RETURNS_DEEP_STUBS) - private ConsumerRecord record; - - @Mock - private Acknowledgment acknowledgment; - - private KafkaReadingService kafkaReadingService; - - @Test - @SuppressWarnings("PMD.CloseResource") - @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")// to suppress warning when calling propagator.extract(any(), any()) - void listenAdditionalShouldEndBatchSpanEvenOnException() { - final Headers headers = mock(Headers.class); - - when(record.headers()).thenReturn(headers); - - final Span.Builder spanBuilder = mock(Span.Builder.class); - when(propagator.extract(any(), any())).thenReturn(spanBuilder); - - final Span spanFromRecord = mock(Span.class); - when(spanBuilder.name("processing-record-from-kafka")).thenReturn(spanBuilder); - when(spanBuilder.start()).thenReturn(spanFromRecord); - - final SpanInScope spanInScope = mock(SpanInScope.class); - when(tracer.withSpan(spanFromRecord)).thenReturn(spanInScope); - final ScopedSpan batchSpan = mock(ScopedSpan.class); - when(tracer.startScopedSpan("batch-processing")).thenReturn(batchSpan); - - kafkaReadingService = new KafkaReadingService(tracer, propagator, dbSaver); - final RuntimeException testException = new RuntimeException("DB error"); - doThrow(testException).when(dbSaver).processSingleRecord(record); - - final List> records = List.of(record); - - assertThatThrownBy(() -> kafkaReadingService.listenAdditional(records, acknowledgment)) - .isSameAs(testException); - verify(tracer).startScopedSpan("batch-processing"); - verify(dbSaver).processSingleRecord(record); - verify(acknowledgment, never()).acknowledge(); - verify(spanInScope).close(); - } -} diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java new file mode 100644 index 00000000..54fb9619 --- /dev/null +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.reactive.service; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.github.mfvanek.db.migrations.common.saver.DbSaver; +import io.github.mfvanek.spring.boot3.reactive.service.dto.CurrentTime; +import io.github.mfvanek.spring.boot3.reactive.service.dto.ParsedDateTime; +import io.github.mfvanek.spring.boot3.reactive.support.JaegerInitializer; +import io.github.mfvanek.spring.boot3.reactive.support.KafkaConsumerUtils; +import io.github.mfvanek.spring.boot3.reactive.support.KafkaInitializer; +import io.github.mfvanek.spring.boot3.reactive.support.PostgresInitializer; +import io.github.mfvanek.spring.boot3.reactive.support.SpanExporterConfiguration; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import lombok.SneakyThrows; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.contract.wiremock.AutoConfigureWireMock; +import org.springframework.kafka.listener.KafkaMessageListenerContainer; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.bean.override.mockito.MockitoBean; +import org.springframework.test.web.reactive.server.EntityExchangeResult; +import org.springframework.test.web.reactive.server.WebTestClient; + +import java.time.Clock; +import java.time.LocalDateTime; +import java.util.List; +import java.util.TimeZone; +import java.util.UUID; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import javax.annotation.Nonnull; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathMatching; +import static io.github.mfvanek.spring.boot3.reactive.filters.TraceIdInResponseReactiveFilter.TRACE_ID_HEADER_NAME; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; + +@SuppressWarnings("checkstyle:classfanoutcomplexity") +@ActiveProfiles("test") +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@ContextConfiguration( + classes = SpanExporterConfiguration.class, + initializers = {KafkaInitializer.class, JaegerInitializer.class, PostgresInitializer.class} +) +@AutoConfigureWireMock(port = 0) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class KafkaTracingTest { + + private final BlockingQueue> consumerRecords = new ArrayBlockingQueue<>(4); + @Autowired + private WebTestClient webTestClient; + @Autowired + private ObjectMapper objectMapper; + @Autowired + private Clock clock; + @Autowired + private InMemorySpanExporter spanExporter; + @Autowired + private KafkaProperties kafkaProperties; + @MockitoBean + private DbSaver dbSaver; + private KafkaMessageListenerContainer container; + + @BeforeAll + void setUpKafkaConsumerAndResetTelemetry() { + GlobalOpenTelemetry.resetForTest(); + container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); + } + + @AfterAll + void tearDownKafkaConsumer() { + if (container != null) { + container.stop(); + container = null; + } + } + + @Test + void closeAllSpansWhenException() { + final Exception testException = new RuntimeException("saving failed"); + doThrow(testException).when(dbSaver).processSingleRecord(any()); + stubOkResponse(ParsedDateTime.from(LocalDateTime.now(clock).minusDays(1))); + + final EntityExchangeResult result = webTestClient.get() + .uri(uriBuilder -> uriBuilder.path("current-time") + .build()) + .exchange() + .expectStatus().isOk() + .expectHeader().exists(TRACE_ID_HEADER_NAME) + .expectBody(LocalDateTime.class) + .returnResult(); + final String traceId = result.getResponseHeaders().getFirst(TRACE_ID_HEADER_NAME); + final List finishedSpans = spanExporter.getFinishedSpanItems(); + + assertThat(finishedSpans.stream().map(SpanData::getTraceId)).contains(traceId); + assertThat(finishedSpans.stream().map(SpanData::getStatus)).contains(StatusData.create(StatusCode.ERROR, "saving failed")); + assertThat(finishedSpans.stream().map(SpanData::getName)).contains("processing-record-from-kafka"); + } + + protected void stubOkResponse(@Nonnull final ParsedDateTime parsedDateTime) { + final String zoneName = TimeZone.getDefault().getID(); + stubOkResponse(zoneName, parsedDateTime); + } + + @SneakyThrows + private void stubOkResponse(@Nonnull final String zoneName, @Nonnull final ParsedDateTime parsedDateTime) { + final CurrentTime currentTime = new CurrentTime(parsedDateTime); + stubFor(get(urlPathMatching("/" + zoneName)) + .willReturn(aResponse() + .withStatus(200) + .withBody(objectMapper.writeValueAsString(currentTime)) + )); + } +} + diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/support/SpanExporterConfiguration.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/support/SpanExporterConfiguration.java new file mode 100644 index 00000000..34202f53 --- /dev/null +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/support/SpanExporterConfiguration.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.reactive.support; + +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Primary; + +@TestConfiguration +public class SpanExporterConfiguration { + + @Bean + @Primary + public SpanExporter spanExporter() { + return InMemorySpanExporter.create(); + } + + @Bean + @Primary + public SdkTracerProvider tracerProvider(SpanExporter spanExporter) { + return SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build(); + } + + @Bean + @Primary + public OpenTelemetrySdk openTelemetrySdk(SdkTracerProvider tracerProvider) { + return OpenTelemetrySdk.builder() + .setTracerProvider(tracerProvider) + .buildAndRegisterGlobal(); + } +} diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java index c613f0d3..2b2a7676 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java @@ -10,6 +10,7 @@ import io.github.mfvanek.spring.boot3.test.service.dto.ParsedDateTime; import io.github.mfvanek.spring.boot3.test.support.KafkaConsumerUtils; import io.github.mfvanek.spring.boot3.test.support.TestBase; +import io.opentelemetry.api.GlobalOpenTelemetry; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.header.Header; import org.awaitility.Awaitility; @@ -48,14 +49,14 @@ @TestInstance(TestInstance.Lifecycle.PER_CLASS) class TimeControllerTest extends TestBase { - private KafkaMessageListenerContainer container; private final BlockingQueue> consumerRecords = new ArrayBlockingQueue<>(4); - + private KafkaMessageListenerContainer container; @Autowired private KafkaProperties kafkaProperties; @BeforeAll void setUpKafkaConsumer() { + GlobalOpenTelemetry.resetForTest(); container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); } @@ -102,9 +103,10 @@ void spanShouldBeReportedInLogs(@Nonnull final CapturedOutput output) throws Int assertThat(output.getAll()) .contains("Received record: " + received.value() + " with traceId " + traceId) .contains("\"tenant.name\":\"ru-a1-private\""); - final List messageFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", + final List messagesFromDb = namedParameterJdbcTemplate.queryForList("select message from otel_demo.storage where trace_id = :traceId", Map.of("traceId", traceId), String.class); - messageFromDb.forEach(it -> assertThat(it).isEqualTo(received.value())); + assertThat(messagesFromDb.size()).isEqualTo(2); + messagesFromDb.forEach(it -> assertThat(it).isEqualTo(received.value())); } @Order(2) diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java deleted file mode 100644 index 0737fda9..00000000 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaReadingServiceTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2020-2025. Ivan Vakhrushev and others. - * https://github.com/mfvanek/spring-boot-open-telemetry-demo - * - * Licensed under the Apache License 2.0 - */ - -package io.github.mfvanek.spring.boot3.test.service; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.github.mfvanek.db.migrations.common.saver.DbSaver; -import io.micrometer.tracing.ScopedSpan; -import io.micrometer.tracing.Span; -import io.micrometer.tracing.Tracer; -import io.micrometer.tracing.Tracer.SpanInScope; -import io.micrometer.tracing.propagation.Propagator; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.common.header.Headers; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Answers; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.springframework.kafka.support.Acknowledgment; - -import java.util.List; -import java.util.UUID; - -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -class KafkaReadingServiceTest { - - @Mock - private Tracer tracer; - - @Mock - private Propagator propagator; - - @Mock - private DbSaver dbSaver; - - @Mock(answer = Answers.RETURNS_DEEP_STUBS) - private ConsumerRecord record; - - @Mock - private Acknowledgment acknowledgment; - - private KafkaReadingService kafkaReadingService; - - @Test - @SuppressWarnings("PMD.CloseResource") - @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")// to suppress warning when calling propagator.extract(any(), any()) - void listenAdditionalShouldEndBatchSpanEvenOnException() { - final Headers headers = mock(Headers.class); - - when(record.headers()).thenReturn(headers); - - final Span.Builder spanBuilder = mock(Span.Builder.class); - when(propagator.extract(any(), any())).thenReturn(spanBuilder); - - final Span spanFromRecord = mock(Span.class); - when(spanBuilder.name("processing-record-from-kafka")).thenReturn(spanBuilder); - when(spanBuilder.start()).thenReturn(spanFromRecord); - - final SpanInScope spanInScope = mock(SpanInScope.class); - when(tracer.withSpan(spanFromRecord)).thenReturn(spanInScope); - final ScopedSpan batchSpan = mock(ScopedSpan.class); - when(tracer.startScopedSpan("batch-processing")).thenReturn(batchSpan); - - kafkaReadingService = new KafkaReadingService(tracer, propagator, dbSaver); - final RuntimeException testException = new RuntimeException("DB error"); - doThrow(testException).when(dbSaver).processSingleRecord(record); - - final List> records = List.of(record); - - assertThatThrownBy(() -> kafkaReadingService.listenAdditional(records, acknowledgment)) - .isSameAs(testException); - verify(tracer).startScopedSpan("batch-processing"); - verify(dbSaver).processSingleRecord(record); - verify(acknowledgment, never()).acknowledge(); - verify(spanInScope).close(); - } -} diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java new file mode 100644 index 00000000..1c9a9c43 --- /dev/null +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.test.service; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.github.mfvanek.db.migrations.common.saver.DbSaver; +import io.github.mfvanek.spring.boot3.test.service.dto.CurrentTime; +import io.github.mfvanek.spring.boot3.test.service.dto.ParsedDateTime; +import io.github.mfvanek.spring.boot3.test.support.JaegerInitializer; +import io.github.mfvanek.spring.boot3.test.support.KafkaConsumerUtils; +import io.github.mfvanek.spring.boot3.test.support.KafkaInitializer; +import io.github.mfvanek.spring.boot3.test.support.PostgresInitializer; +import io.github.mfvanek.spring.boot3.test.support.SpanExporterConfiguration; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.data.StatusData; +import lombok.SneakyThrows; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.contract.wiremock.AutoConfigureWireMock; +import org.springframework.kafka.listener.KafkaMessageListenerContainer; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.bean.override.mockito.MockitoBean; +import org.springframework.test.web.reactive.server.EntityExchangeResult; +import org.springframework.test.web.reactive.server.WebTestClient; + +import java.time.Clock; +import java.time.LocalDateTime; +import java.util.List; +import java.util.TimeZone; +import java.util.UUID; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import javax.annotation.Nonnull; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathMatching; +import static io.github.mfvanek.spring.boot3.test.filters.TraceIdInResponseServletFilter.TRACE_ID_HEADER_NAME; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; + +@SuppressWarnings("checkstyle:classfanoutcomplexity") +@ActiveProfiles("test") +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) +@ContextConfiguration( + classes = SpanExporterConfiguration.class, + initializers = {KafkaInitializer.class, JaegerInitializer.class, PostgresInitializer.class} +) +@AutoConfigureWireMock(port = 0) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class KafkaTracingTest { + + @Autowired + protected WebTestClient webTestClient; + @Autowired + protected ObjectMapper objectMapper; + @Autowired + protected Clock clock; + @Autowired + private InMemorySpanExporter spanExporter; + @Autowired + private KafkaProperties kafkaProperties; + @MockitoBean + private DbSaver dbSaver; + private KafkaMessageListenerContainer container; + private final BlockingQueue> consumerRecords = new ArrayBlockingQueue<>(4); + + @BeforeAll + void setUpKafkaConsumerAndResetTelemetry() { + GlobalOpenTelemetry.resetForTest(); + container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); + } + + @AfterAll + void tearDownKafkaConsumer() { + if (container != null) { + container.stop(); + container = null; + } + } + + @Test + void closeAllSpansWhenException() { + final Exception testException = new RuntimeException("saving failed"); + doThrow(testException).when(dbSaver).processSingleRecord(any()); + stubOkResponse(ParsedDateTime.from(LocalDateTime.now(clock).minusDays(1))); + + final EntityExchangeResult result = webTestClient.get() + .uri(uriBuilder -> uriBuilder.path("current-time") + .build()) + .exchange() + .expectStatus().isOk() + .expectHeader().exists(TRACE_ID_HEADER_NAME) + .expectBody(LocalDateTime.class) + .returnResult(); + final String traceId = result.getResponseHeaders().getFirst(TRACE_ID_HEADER_NAME); + final List finishedSpans = spanExporter.getFinishedSpanItems(); + + assertThat(finishedSpans.stream().map(SpanData::getTraceId)).contains(traceId); + assertThat(finishedSpans.stream().map(SpanData::getStatus)).contains(StatusData.create(StatusCode.ERROR, "saving failed")); + assertThat(finishedSpans.stream().map(SpanData::getName)).contains("processing-record-from-kafka"); + } + + protected void stubOkResponse(@Nonnull final ParsedDateTime parsedDateTime) { + final String zoneName = TimeZone.getDefault().getID(); + stubOkResponse(zoneName, parsedDateTime); + } + + @SneakyThrows + private void stubOkResponse(@Nonnull final String zoneName, @Nonnull final ParsedDateTime parsedDateTime) { + final CurrentTime currentTime = new CurrentTime(parsedDateTime); + stubFor(get(urlPathMatching("/" + zoneName)) + .willReturn(aResponse() + .withStatus(200) + .withBody(objectMapper.writeValueAsString(currentTime)) + )); + } +} + diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/SpanExporterConfiguration.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/SpanExporterConfiguration.java new file mode 100644 index 00000000..9e173356 --- /dev/null +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/support/SpanExporterConfiguration.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020-2025. Ivan Vakhrushev and others. + * https://github.com/mfvanek/spring-boot-open-telemetry-demo + * + * Licensed under the Apache License 2.0 + */ + +package io.github.mfvanek.spring.boot3.test.support; + +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Primary; + +@TestConfiguration +public class SpanExporterConfiguration { + + @Bean + @Primary + public SpanExporter spanExporter() { + return InMemorySpanExporter.create(); + } + + @Bean + @Primary + public SdkTracerProvider tracerProvider(SpanExporter spanExporter) { + return SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(spanExporter)) + .build(); + } + + @Bean + @Primary + public OpenTelemetrySdk openTelemetrySdk(SdkTracerProvider tracerProvider) { + return OpenTelemetrySdk.builder() + .setTracerProvider(tracerProvider) + .buildAndRegisterGlobal(); + } +} From 05a28e2f42e97bab7f11c9857303dff26eb9c1d6 Mon Sep 17 00:00:00 2001 From: Ivan Vakhrushev Date: Sun, 19 Oct 2025 13:40:26 +0400 Subject: [PATCH 12/13] Fix properties --- .../src/main/resources/application.yml | 3 ++- .../src/main/resources/application.yml | 4 ++-- spring-boot-3-demo-app/src/main/resources/application.yml | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml index c6146245..fa629b40 100644 --- a/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-kotlin/src/main/resources/application.yml @@ -36,8 +36,9 @@ spring: additional-topic: open.telemetry.sb3.queue.additional producer: key-serializer: org.apache.kafka.common.serialization.UUIDSerializer + batch-size: 32KB properties: - linger.ms: 100 + linger.ms: 20 batch.size: 10000 listener: observation-enabled: true # Important!!! diff --git a/spring-boot-3-demo-app-reactive/src/main/resources/application.yml b/spring-boot-3-demo-app-reactive/src/main/resources/application.yml index a3904d66..9bf356c6 100644 --- a/spring-boot-3-demo-app-reactive/src/main/resources/application.yml +++ b/spring-boot-3-demo-app-reactive/src/main/resources/application.yml @@ -29,9 +29,9 @@ spring: additional-topic: open.telemetry.sb3.queue.additional producer: key-serializer: org.apache.kafka.common.serialization.UUIDSerializer + batch-size: 32KB properties: - linger.ms: 100 - batch.size: 10000 + linger.ms: 20 listener: observation-enabled: true # Important!!! ack-mode: manual_immediate diff --git a/spring-boot-3-demo-app/src/main/resources/application.yml b/spring-boot-3-demo-app/src/main/resources/application.yml index aa478faf..49877a5c 100644 --- a/spring-boot-3-demo-app/src/main/resources/application.yml +++ b/spring-boot-3-demo-app/src/main/resources/application.yml @@ -36,9 +36,9 @@ spring: additional-topic: open.telemetry.sb3.queue.additional producer: key-serializer: org.apache.kafka.common.serialization.UUIDSerializer + batch-size: 32KB properties: - linger.ms: 100 - batch.size: 10000 + linger.ms: 20 listener: observation-enabled: true # Important!!! ack-mode: manual_immediate From 6e3d9dc411751f5161a58a5a4e44685462e5189a Mon Sep 17 00:00:00 2001 From: Ivan Vakhrushev Date: Sun, 19 Oct 2025 14:22:50 +0400 Subject: [PATCH 13/13] Refactor tests --- .../test/controllers/TimeControllerTest.kt | 2 -- .../kotlin/test/service/KafkaTracingTest.kt | 28 +++++-------------- .../controllers/TimeControllerTest.java | 2 -- .../reactive/service/KafkaTracingTest.java | 26 +---------------- .../test/controllers/TimeControllerTest.java | 3 +- .../boot3/test/service/KafkaTracingTest.java | 26 +---------------- 6 files changed, 10 insertions(+), 77 deletions(-) diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt index 913b284e..760be89b 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/controllers/TimeControllerTest.kt @@ -4,7 +4,6 @@ import io.github.mfvanek.spring.boot3.kotlin.test.filters.TraceIdInResponseServl import io.github.mfvanek.spring.boot3.kotlin.test.service.dto.toParsedDateTime import io.github.mfvanek.spring.boot3.kotlin.test.support.KafkaInitializer import io.github.mfvanek.spring.boot3.kotlin.test.support.TestBase -import io.opentelemetry.api.GlobalOpenTelemetry import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord @@ -50,7 +49,6 @@ class TimeControllerTest : TestBase() { @BeforeAll fun setUpKafkaConsumer() { - GlobalOpenTelemetry.resetForTest() container = setUpKafkaConsumer(kafkaProperties, consumerRecords) } diff --git a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt index d942b2b3..57bbdb0d 100644 --- a/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt +++ b/spring-boot-3-demo-app-kotlin/src/test/kotlin/io/github/mfvanek/spring/boot3/kotlin/test/service/KafkaTracingTest.kt @@ -24,17 +24,13 @@ import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter import io.opentelemetry.sdk.trace.data.StatusData import org.apache.kafka.clients.consumer.ConsumerRecord import org.assertj.core.api.Assertions.assertThat -import org.junit.jupiter.api.AfterAll import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.Test -import org.junit.jupiter.api.TestInstance import org.mockito.ArgumentMatchers import org.mockito.Mockito import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.autoconfigure.kafka.KafkaProperties import org.springframework.boot.test.context.SpringBootTest import org.springframework.cloud.contract.wiremock.AutoConfigureWireMock -import org.springframework.kafka.listener.KafkaMessageListenerContainer import org.springframework.test.context.ActiveProfiles import org.springframework.test.context.ContextConfiguration import org.springframework.test.context.bean.override.mockito.MockitoBean @@ -43,7 +39,6 @@ import org.springframework.web.util.UriBuilder import java.time.Clock import java.time.LocalDateTime import java.util.* -import java.util.concurrent.ArrayBlockingQueue import java.util.function.Function @ActiveProfiles("test") @@ -53,8 +48,7 @@ import java.util.function.Function initializers = [KafkaInitializer::class, JaegerInitializer::class, PostgresInitializer::class] ) @AutoConfigureWireMock(port = 0) -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -internal open class KafkaTracingTest { +internal class KafkaTracingTest { @Autowired private lateinit var webTestClient: WebTestClient @@ -70,20 +64,12 @@ internal open class KafkaTracingTest { @MockitoBean private lateinit var dbSaver: DbSaver - @Autowired - private lateinit var kafkaProperties: KafkaProperties - private lateinit var container: KafkaMessageListenerContainer - private val consumerRecords = ArrayBlockingQueue>(4) - - @BeforeAll - fun setUpKafkaConsumerAndResetTelemetry() { - GlobalOpenTelemetry.resetForTest() - container = io.github.mfvanek.spring.boot3.kotlin.test.controllers.setUpKafkaConsumer(kafkaProperties, consumerRecords) - } - - @AfterAll - fun tearDownKafkaConsumer() { - container.stop() + companion object { + @JvmStatic + @BeforeAll + fun resetTelemetry() { + GlobalOpenTelemetry.resetForTest() + } } @Test diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java index d2f2a3f8..5e819d6b 100644 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/controllers/TimeControllerTest.java @@ -10,7 +10,6 @@ import io.github.mfvanek.spring.boot3.reactive.service.dto.ParsedDateTime; import io.github.mfvanek.spring.boot3.reactive.support.KafkaConsumerUtils; import io.github.mfvanek.spring.boot3.reactive.support.TestBase; -import io.opentelemetry.api.GlobalOpenTelemetry; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.header.Header; import org.awaitility.Awaitility; @@ -57,7 +56,6 @@ class TimeControllerTest extends TestBase { @BeforeAll void setUpKafkaConsumer() { - GlobalOpenTelemetry.resetForTest(); container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); } diff --git a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java index 54fb9619..2d94947f 100644 --- a/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java +++ b/spring-boot-3-demo-app-reactive/src/test/java/io/github/mfvanek/spring/boot3/reactive/service/KafkaTracingTest.java @@ -12,7 +12,6 @@ import io.github.mfvanek.spring.boot3.reactive.service.dto.CurrentTime; import io.github.mfvanek.spring.boot3.reactive.service.dto.ParsedDateTime; import io.github.mfvanek.spring.boot3.reactive.support.JaegerInitializer; -import io.github.mfvanek.spring.boot3.reactive.support.KafkaConsumerUtils; import io.github.mfvanek.spring.boot3.reactive.support.KafkaInitializer; import io.github.mfvanek.spring.boot3.reactive.support.PostgresInitializer; import io.github.mfvanek.spring.boot3.reactive.support.SpanExporterConfiguration; @@ -22,16 +21,11 @@ import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.StatusData; import lombok.SneakyThrows; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.contract.wiremock.AutoConfigureWireMock; -import org.springframework.kafka.listener.KafkaMessageListenerContainer; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.bean.override.mockito.MockitoBean; @@ -42,9 +36,6 @@ import java.time.LocalDateTime; import java.util.List; import java.util.TimeZone; -import java.util.UUID; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; import javax.annotation.Nonnull; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; @@ -64,10 +55,8 @@ initializers = {KafkaInitializer.class, JaegerInitializer.class, PostgresInitializer.class} ) @AutoConfigureWireMock(port = 0) -@TestInstance(TestInstance.Lifecycle.PER_CLASS) class KafkaTracingTest { - private final BlockingQueue> consumerRecords = new ArrayBlockingQueue<>(4); @Autowired private WebTestClient webTestClient; @Autowired @@ -76,24 +65,12 @@ class KafkaTracingTest { private Clock clock; @Autowired private InMemorySpanExporter spanExporter; - @Autowired - private KafkaProperties kafkaProperties; @MockitoBean private DbSaver dbSaver; - private KafkaMessageListenerContainer container; @BeforeAll - void setUpKafkaConsumerAndResetTelemetry() { + static void resetTelemetry() { GlobalOpenTelemetry.resetForTest(); - container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); - } - - @AfterAll - void tearDownKafkaConsumer() { - if (container != null) { - container.stop(); - container = null; - } } @Test @@ -133,4 +110,3 @@ private void stubOkResponse(@Nonnull final String zoneName, @Nonnull final Parse )); } } - diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java index 2b2a7676..b26b5ce9 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/controllers/TimeControllerTest.java @@ -10,7 +10,6 @@ import io.github.mfvanek.spring.boot3.test.service.dto.ParsedDateTime; import io.github.mfvanek.spring.boot3.test.support.KafkaConsumerUtils; import io.github.mfvanek.spring.boot3.test.support.TestBase; -import io.opentelemetry.api.GlobalOpenTelemetry; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.header.Header; import org.awaitility.Awaitility; @@ -51,12 +50,12 @@ class TimeControllerTest extends TestBase { private final BlockingQueue> consumerRecords = new ArrayBlockingQueue<>(4); private KafkaMessageListenerContainer container; + @Autowired private KafkaProperties kafkaProperties; @BeforeAll void setUpKafkaConsumer() { - GlobalOpenTelemetry.resetForTest(); container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); } diff --git a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java index 1c9a9c43..c5355f73 100644 --- a/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java +++ b/spring-boot-3-demo-app/src/test/java/io/github/mfvanek/spring/boot3/test/service/KafkaTracingTest.java @@ -12,7 +12,6 @@ import io.github.mfvanek.spring.boot3.test.service.dto.CurrentTime; import io.github.mfvanek.spring.boot3.test.service.dto.ParsedDateTime; import io.github.mfvanek.spring.boot3.test.support.JaegerInitializer; -import io.github.mfvanek.spring.boot3.test.support.KafkaConsumerUtils; import io.github.mfvanek.spring.boot3.test.support.KafkaInitializer; import io.github.mfvanek.spring.boot3.test.support.PostgresInitializer; import io.github.mfvanek.spring.boot3.test.support.SpanExporterConfiguration; @@ -22,16 +21,11 @@ import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.StatusData; import lombok.SneakyThrows; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.contract.wiremock.AutoConfigureWireMock; -import org.springframework.kafka.listener.KafkaMessageListenerContainer; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.bean.override.mockito.MockitoBean; @@ -42,9 +36,6 @@ import java.time.LocalDateTime; import java.util.List; import java.util.TimeZone; -import java.util.UUID; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; import javax.annotation.Nonnull; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; @@ -64,7 +55,6 @@ initializers = {KafkaInitializer.class, JaegerInitializer.class, PostgresInitializer.class} ) @AutoConfigureWireMock(port = 0) -@TestInstance(TestInstance.Lifecycle.PER_CLASS) class KafkaTracingTest { @Autowired @@ -75,25 +65,12 @@ class KafkaTracingTest { protected Clock clock; @Autowired private InMemorySpanExporter spanExporter; - @Autowired - private KafkaProperties kafkaProperties; @MockitoBean private DbSaver dbSaver; - private KafkaMessageListenerContainer container; - private final BlockingQueue> consumerRecords = new ArrayBlockingQueue<>(4); @BeforeAll - void setUpKafkaConsumerAndResetTelemetry() { + static void resetTelemetry() { GlobalOpenTelemetry.resetForTest(); - container = KafkaConsumerUtils.setUpKafkaConsumer(kafkaProperties, consumerRecords); - } - - @AfterAll - void tearDownKafkaConsumer() { - if (container != null) { - container.stop(); - container = null; - } } @Test @@ -133,4 +110,3 @@ private void stubOkResponse(@Nonnull final String zoneName, @Nonnull final Parse )); } } -