Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,415 changes: 1,371 additions & 44 deletions docs/instrumentation-list.yaml

Large diffs are not rendered by default.

18 changes: 17 additions & 1 deletion instrumentation-docs/ci-collect.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ set -euo pipefail
# shellcheck source=instrumentation-docs/instrumentations.sh
source "$(dirname "$0")/instrumentations.sh"

# Collect standard and colima tasks (without testLatestDeps)
ALL_TASKS=()
for task in "${INSTRUMENTATIONS[@]}"; do
ALL_TASKS+=(":instrumentation:${task}")
Expand All @@ -16,8 +17,23 @@ for task in "${COLIMA_INSTRUMENTATIONS[@]}"; do
ALL_TASKS+=(":instrumentation:${task}")
done

echo "Processing instrumentations..."
echo "Processing standard instrumentations..."
./gradlew "${ALL_TASKS[@]}" \
-PcollectMetadata=true \
--rerun-tasks --continue

# Collect and run tasks that need testLatestDeps
LATEST_DEPS_TASKS=()
for task in "${TEST_LATEST_DEPS_INSTRUMENTATIONS[@]}"; do
LATEST_DEPS_TASKS+=(":instrumentation:${task}")
done

if [[ ${#LATEST_DEPS_TASKS[@]} -gt 0 ]]; then
echo "Processing instrumentations with -PtestLatestDeps=true..."
./gradlew "${LATEST_DEPS_TASKS[@]}" \
-PcollectMetadata=true \
-PtestLatestDeps=true \
--rerun-tasks --continue
fi

echo "Telemetry file regeneration complete."
27 changes: 27 additions & 0 deletions instrumentation-docs/collect.sh
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,25 @@ run_gradle_tasks() {
--rerun-tasks --continue --no-parallel
}

run_gradle_tasks_with_latest_deps() {
local -a tasks=("$@")

if [[ ${#tasks[@]} -eq 0 ]]; then
echo "No tasks to run"
return 0
fi

echo
echo "Running Gradle tasks with -PtestLatestDeps=true:"
printf ' %s\n' "${tasks[@]}"
echo

./gradlew "${tasks[@]}" \
-PcollectMetadata=true \
-PtestLatestDeps=true \
--rerun-tasks --continue --no-parallel
}

# Cleans any stray .telemetry directories left in the repo.
find_and_remove_all_telemetry() {
echo "Removing stray .telemetry directories..."
Expand All @@ -153,6 +172,14 @@ main() {
done < <(process_descriptors "${INSTRUMENTATIONS[@]}")
run_gradle_tasks "${gradle_tasks[@]}"

# Process instrumentations requiring testLatestDeps
echo "Processing instrumentations with -PtestLatestDeps=true..."
gradle_tasks=()
while IFS= read -r line; do
gradle_tasks+=("$line")
done < <(process_descriptors "${TEST_LATEST_DEPS_INSTRUMENTATIONS[@]}")
run_gradle_tasks_with_latest_deps "${gradle_tasks[@]}"

# Setup colima if needed
setup_colima

Expand Down
11 changes: 11 additions & 0 deletions instrumentation-docs/instrumentations.sh
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ readonly INSTRUMENTATIONS=(
"jsf:jsf-mojarra-3.0:javaagent:test"
"jsf:jsf-myfaces-1.2:javaagent:myfaces2Test"
"jsf:jsf-myfaces-3.0:javaagent:test"
"kafka:kafka-clients:kafka-clients-2.6:library:test"
"kafka:kafka-connect-2.6:testing:test"
"nats:nats-2.17:javaagent:test"
"nats:nats-2.17:javaagent:testExperimental"
Expand Down Expand Up @@ -222,3 +223,13 @@ readonly COLIMA_INSTRUMENTATIONS=(
"oracle-ucp-11.2:javaagent:testStableSemconv"
"spring:spring-jms:spring-jms-6.0:javaagent:test"
)

# Some instrumentation test suites need to run with -PtestLatestDeps=true to collect
# metrics telemetry or test against latest library versions.
# shellcheck disable=SC2034
readonly TEST_LATEST_DEPS_INSTRUMENTATIONS=(
"kafka:kafka-clients:kafka-clients-0.11:javaagent:test"
"kafka:kafka-clients:kafka-clients-0.11:javaagent:testExperimental"
"kafka:kafka-streams-0.11:javaagent:test"
"kafka:kafka-streams-0.11:javaagent:testExperimental"
)
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ tasks {
usesService(gradle.sharedServices.registrations["testcontainersBuildService"].service)

systemProperty("testLatestDeps", findProperty("testLatestDeps") as Boolean)

// TODO run tests both with and without experimental span attributes
jvmArgs("-Dotel.instrumentation.kafka.experimental-span-attributes=true")
systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false")
}

val testPropagationDisabled by registering(Test::class) {
Expand All @@ -54,6 +52,20 @@ tasks {
include("**/KafkaClientSuppressReceiveSpansTest.*")
}

val testExperimental by registering(Test::class) {
testClassesDirs = sourceSets.test.get().output.classesDirs
classpath = sourceSets.test.get().runtimeClasspath

filter {
excludeTestsMatching("KafkaClientPropagationDisabledTest")
excludeTestsMatching("KafkaClientSuppressReceiveSpansTest")
}
jvmArgs("-Dotel.instrumentation.messaging.experimental.receive-telemetry.enabled=true")

jvmArgs("-Dotel.instrumentation.kafka.experimental-span-attributes=true")
systemProperty("metadataConfig", "otel.instrumentation.kafka.experimental-span-attributes=true")
}

test {
filter {
excludeTestsMatching("KafkaClientPropagationDisabledTest")
Expand All @@ -63,7 +75,7 @@ tasks {
}

check {
dependsOn(testPropagationDisabled, testReceiveSpansDisabled)
dependsOn(testPropagationDisabled, testReceiveSpansDisabled, testExperimental)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.assertj.core.api.AbstractIterableAssert;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
Expand All @@ -34,6 +35,9 @@

class KafkaClientDefaultTest extends KafkaClientPropagationBaseTest {

private static final boolean testLatestDeps =
Boolean.parseBoolean(System.getProperty("testLatestDeps", "true"));

@RegisterExtension
static final InstrumentationExtension testing = AgentInstrumentationExtension.create();

Expand Down Expand Up @@ -110,6 +114,13 @@ void testKafkaProducerAndConsumerSpan(boolean testHeaders) throws Exception {
.hasAttributesSatisfyingExactly(
processAttributes("10", greeting, testHeaders, false)),
span -> span.hasName("processing").hasParent(trace.getSpan(1))));

if (testLatestDeps) {
testing.waitAndAssertMetrics(
"io.opentelemetry.kafka-clients-0.11",
"kafka.producer.record_send_total",
AbstractIterableAssert::isNotEmpty);
}
}

@DisplayName("test pass through tombstone")
Expand Down Expand Up @@ -155,6 +166,7 @@ void testPassThroughTombstone()
processAttributes(null, null, false, false))));
}

@ParameterizedTest
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

random fix: this test wasn't being run

@DisplayName("test records(TopicPartition) kafka consume")
@ValueSource(booleans = {true, false})
void testRecordsWithTopicPartitionKafkaConsume(boolean testListIterator)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
description: >
This instrumentation enables messaging spans and metrics for Apache Kafka 0.11 clients.
It automatically traces message production and consumption, propagates context, and emits metrics for production and consumption.
description: This instrumentation enables messaging spans for Kafka producers and consumers, and collects internal Kafka client metrics.
display_name: Apache Kafka Client
library_link: https://kafka.apache.org/
semantic_conventions:
- MESSAGING_SPANS
configurations:
- name: otel.instrumentation.kafka.producer-propagation.enabled
description: Enable context propagation for kafka message producers.
description: Enable context propagation for Kafka message producers.
type: boolean
default: true
- name: otel.instrumentation.kafka.experimental-span-attributes
description: Enables the capture of the experimental consumer attribute "kafka.record.queue_time_ms"
description: Enables the capture of the experimental consumer attribute `kafka.record.queue_time_ms`.
type: boolean
default: false
- name: otel.instrumentation.messaging.experimental.capture-headers
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

package io.opentelemetry.instrumentation.kafkaclients.common.v0_11.internal;

import static io.opentelemetry.api.common.AttributeKey.longKey;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies;
import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_BATCH_MESSAGE_COUNT;
Expand All @@ -17,6 +18,7 @@
import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_MESSAGE_BODY_SIZE;
import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION;
import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM;
import static org.assertj.core.api.Assertions.assertThat;

import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.sdk.testing.assertj.AttributeAssertion;
Expand Down Expand Up @@ -71,6 +73,9 @@ public abstract class KafkaClientBaseTest {
protected Consumer<Integer, String> consumer;
private final CountDownLatch consumerReady = new CountDownLatch(1);

static final boolean isExperimentalEnabled =
Boolean.getBoolean("otel.instrumentation.kafka.experimental-span-attributes");

public static final int partition = 0;
public static final TopicPartition topicPartition = new TopicPartition(SHARED_TOPIC, partition);

Expand Down Expand Up @@ -230,8 +235,11 @@ protected static List<AttributeAssertion> processAttributes(
satisfies(MESSAGING_DESTINATION_PARTITION_ID, AbstractStringAssert::isNotEmpty),
satisfies(MESSAGING_KAFKA_MESSAGE_OFFSET, AbstractLongAssert::isNotNegative),
satisfies(
AttributeKey.longKey("kafka.record.queue_time_ms"),
AbstractLongAssert::isNotNegative)));
longKey("kafka.record.queue_time_ms"),
val ->
val.satisfiesAnyOf(
v -> assertThat(v).isNotNegative(),
v -> assertThat(isExperimentalEnabled).isFalse()))));
// consumer group is not available in version 0.11
if (Boolean.getBoolean("testLatestDeps")) {
assertions.add(equalTo(MESSAGING_KAFKA_CONSUMER_GROUP, "test"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ tasks {
withType<Test>().configureEach {
usesService(gradle.sharedServices.registrations["testcontainersBuildService"].service)
systemProperty("testLatestDeps", findProperty("testLatestDeps") as Boolean)
systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false")
}

test {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,15 @@ abstract class AbstractInterceptorsTest extends KafkaClientBaseTest {
public Map<String, Object> producerProps() {
Map<String, Object> props = super.producerProps();
props.putAll(kafkaTelemetry().producerInterceptorConfigProperties());
props.putAll(kafkaTelemetry().metricConfigProperties());
return props;
}

@Override
public Map<String, Object> consumerProps() {
Map<String, Object> props = super.consumerProps();
props.putAll(kafkaTelemetry().consumerInterceptorConfigProperties());
props.putAll(kafkaTelemetry().metricConfigProperties());
return props;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import io.opentelemetry.sdk.trace.data.LinkData;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicReference;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.AbstractLongAssert;
import org.assertj.core.api.AbstractStringAssert;

Expand Down Expand Up @@ -125,5 +126,10 @@ void assertTraces() {
trace.hasSpansSatisfyingExactly(
span ->
span.hasName("producer callback").hasKind(SpanKind.INTERNAL).hasNoParent()));

testing.waitAndAssertMetrics(
"io.opentelemetry.kafka-clients-2.6",
"kafka.producer.record_send_total",
AbstractIterableAssert::isNotEmpty);
}
}
Original file line number Diff line number Diff line change
@@ -1,14 +1,5 @@
description: >
This instrumentation provides a library integration that enables messaging spans and metrics for Apache Kafka 2.6+ clients.
description: This standalone instrumentation enables messaging spans for Kafka producers and consumers, and collects internal Kafka client metrics.
display_name: Apache Kafka Client
library_link: https://kafka.apache.org/
configurations:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

removing these configs as they were added in error, this is standalone library instrumentation and doesn't use them

- name: otel.instrumentation.messaging.experimental.capture-headers
description: A comma-separated list of header names to capture as span attributes.
type: list
default: ''
- name: otel.instrumentation.messaging.experimental.receive-telemetry.enabled
description: >
Enables experimental receive telemetry, which will cause consumers to start a new trace, with
only a span link connecting it to the producer trace.
type: boolean
default: false
semantic_conventions:
- MESSAGING_SPANS
5 changes: 3 additions & 2 deletions instrumentation/kafka/kafka-connect-2.6/metadata.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
description: "This instrumentation enables messaging spans for Kafka Connect sink tasks."
description: This instrumentation enables messaging spans for Kafka Connect sink tasks.
display_name: Apache Kafka Connect
library_link: https://kafka.apache.org/documentation/#connect
semantic_conventions:
- MESSAGING_SPANS
library_link: https://kafka.apache.org/documentation/#connect
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@ tasks {
usesService(gradle.sharedServices.registrations["testcontainersBuildService"].service)

systemProperty("testLatestDeps", findProperty("testLatestDeps") as Boolean)

// TODO run tests both with and without experimental span attributes
jvmArgs("-Dotel.instrumentation.kafka.experimental-span-attributes=true")
systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false")
}

val testReceiveSpansDisabled by registering(Test::class) {
Expand All @@ -42,6 +40,19 @@ tasks {
include("**/KafkaStreamsSuppressReceiveSpansTest.*")
}

val testExperimental by registering(Test::class) {
testClassesDirs = sourceSets.test.get().output.classesDirs
classpath = sourceSets.test.get().runtimeClasspath

filter {
excludeTestsMatching("KafkaStreamsSuppressReceiveSpansTest")
}
jvmArgs("-Dotel.instrumentation.messaging.experimental.receive-telemetry.enabled=true")

jvmArgs("-Dotel.instrumentation.kafka.experimental-span-attributes=true")
systemProperty("metadataConfig", "otel.instrumentation.kafka.experimental-span-attributes=true")
}

test {
filter {
excludeTestsMatching("KafkaStreamsSuppressReceiveSpansTest")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,9 @@ abstract class KafkaStreamsBaseTest {
static Consumer<Integer, String> consumer;
static CountDownLatch consumerReady = new CountDownLatch(1);

protected static final boolean isExperimental =
Boolean.getBoolean("otel.instrumentation.kafka.experimental-span-attributes");

@BeforeAll
static void setup() throws ExecutionException, InterruptedException, TimeoutException {
kafka =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,10 +156,14 @@ void testKafkaProduceAndConsumeWithStreamsInBetween() throws Exception {
k -> k.isInstanceOf(String.class)),
equalTo(MESSAGING_KAFKA_MESSAGE_OFFSET, 0),
equalTo(MESSAGING_KAFKA_MESSAGE_KEY, "10"),
satisfies(
longKey("kafka.record.queue_time_ms"),
k -> k.isGreaterThanOrEqualTo(0)),
equalTo(stringKey("asdf"), "testing")));

if (isExperimental) {
assertions.add(
satisfies(
longKey("kafka.record.queue_time_ms"), k -> k.isGreaterThanOrEqualTo(0)));
}

if (Boolean.getBoolean("testLatestDeps")) {
assertions.add(equalTo(MESSAGING_KAFKA_CONSUMER_GROUP, "test-application"));
}
Expand Down Expand Up @@ -224,10 +228,14 @@ void testKafkaProduceAndConsumeWithStreamsInBetween() throws Exception {
k -> k.isInstanceOf(String.class)),
equalTo(MESSAGING_KAFKA_MESSAGE_OFFSET, 0),
equalTo(MESSAGING_KAFKA_MESSAGE_KEY, "10"),
satisfies(
longKey("kafka.record.queue_time_ms"),
k -> k.isGreaterThanOrEqualTo(0)),
equalTo(longKey("testing"), 123)));
if (isExperimental) {
assertions.add(
satisfies(
longKey("kafka.record.queue_time_ms"),
k -> k.isGreaterThanOrEqualTo(0)));
}

if (Boolean.getBoolean("testLatestDeps")) {
assertions.add(equalTo(MESSAGING_KAFKA_CONSUMER_GROUP, "test"));
}
Expand Down
Loading
Loading