From 96e4ea6afcd31a7bcf9fa281342f925926869f3e Mon Sep 17 00:00:00 2001 From: cirilla-zmh Date: Tue, 21 Oct 2025 00:29:54 +0800 Subject: [PATCH 1/2] draft: init spring-ai instrumentation Change-Id: Iba625f3b4a0c5528c86d83e42a1a884b2e139fd7 Change-Id: I94b2d7d49d68e720b7637d73f5de973640cd13a6 --- .../genai/GenAiAgentAttributesExtractor.java | 54 ++ .../genai/GenAiAgentAttributesGetter.java | 15 + .../semconv/genai/GenAiAttributesGetter.java | 4 +- .../semconv/genai/GenAiMessagesExtractor.java | 217 ++++++++ .../semconv/genai/GenAiMessagesProvider.java | 22 + .../genai/GenAiOperationAttributesGetter.java | 9 + .../semconv/genai/GenAiSpanNameExtractor.java | 10 +- .../incubator/AgentIncubatingAttributes.java | 13 + .../incubator/GenAiIncubatingAttributes.java | 80 +++ .../GenAiToolIncubatingAttributes.java | 15 + .../tool/GenAiToolAttributesExtractor.java | 66 +++ .../genai/tool/GenAiToolAttributesGetter.java | 23 + .../reactor-3.1/bootstrap/build.gradle.kts | 3 + .../spring-ai-1.0/javaagent/build.gradle.kts | 71 +++ .../v1_0/SpringAiInstrumentationModule.java | 34 ++ .../spring/ai/v1_0/SpringAiSingletons.java | 26 + .../spring/ai/v1_0/SpringAiTelemetry.java | 47 ++ .../ai/v1_0/SpringAiTelemetryBuilder.java | 94 ++++ .../client/ChatClientAttributesGetter.java | 215 ++++++++ .../chat/client/ChatClientMessageBuffer.java | 137 +++++ .../client/ChatClientMessagesProvider.java | 200 ++++++++ .../chat/client/ChatClientStreamListener.java | 137 +++++ .../chat/client/ChatClientStreamWrapper.java | 23 + ...efaultCallResponseSpecInstrumentation.java | 79 +++ ...aultStreamResponseSpecInstrumentation.java | 86 ++++ ...aultToolCallingManagerInstrumentation.java | 85 +++ .../v1_0/tool/ToolCallAttributesGetter.java | 51 ++ .../spring/ai/v1_0/tool/ToolCallContext.java | 41 ++ .../spring/ai/v1_0/tool/ToolCallRequest.java | 56 ++ .../tool/ToolCallbackInstrumentation.java | 85 +++ .../spring/ai/v1_0/ChatClientTest.java | 18 + .../spring-ai/spring-ai-1.0/metadata.yaml | 0 .../spring-ai-1.0/testing/build.gradle.kts | 15 + .../spring/ai/v1_0/AbstractChatClient.java | 354 +++++++++++++ .../spring/ai/v1_0/AbstractSpringAiTest.java | 123 +++++ ....ai.v1_0.abstractchatclienttest.basic.yaml | 41 ++ ...ai.v1_0.abstractchatclienttest.stream.yaml | 47 ++ ...bstractchatclienttest.streamtoolcalls.yaml | 198 +++++++ ...v1_0.abstractchatclienttest.toolcalls.yaml | 163 ++++++ ...0.abstractchatclienttest.with400error.yaml | 38 ++ .../javaagent/build.gradle.kts | 69 +++ .../v1_0/ChatModelAttributesGetter.java | 165 ++++++ .../openai/v1_0/ChatModelMessageBuffer.java | 151 ++++++ .../v1_0/ChatModelMessagesProvider.java | 219 ++++++++ .../openai/v1_0/ChatModelStreamListener.java | 140 +++++ .../openai/v1_0/ChatModelStreamWrapper.java | 30 ++ .../openai/v1_0/OpenAiApiInstrumentation.java | 125 +++++ .../v1_0/OpenAiChatModelInstrumentation.java | 63 +++ .../SpringAiOpenaiInstrumentationModule.java | 26 + .../openai/v1_0/SpringAiOpenaiSingletons.java | 26 + .../openai/v1_0/SpringAiOpenaiTelemetry.java | 44 ++ .../v1_0/SpringAiOpenaiTelemetryBuilder.java | 86 ++++ .../ai/openai/v1_0/ChatCompletionTest.java | 18 + .../spring-ai-openai-1.0/metadata.yaml | 0 .../testing/build.gradle.kts | 14 + .../v1_0/AbstractChatCompletionTest.java | 484 ++++++++++++++++++ .../v1_0/AbstractSpringAiOpenAiTest.java | 61 +++ ...hatcompletiontest.alltheclientoptions.yaml | 47 ++ ...v1_0.abstractchatcompletiontest.basic.yaml | 41 ++ ...actchatcompletiontest.multiplechoices.yaml | 44 ++ ...1_0.abstractchatcompletiontest.stream.yaml | 47 ++ ...tcompletiontest.streammultiplechoices.yaml | 62 +++ ...actchatcompletiontest.streamtoolcalls.yaml | 82 +++ ....abstractchatcompletiontest.toolcalls.yaml | 159 ++++++ ...stractchatcompletiontest.with400error.yaml | 38 ++ settings.gradle.kts | 5 + 66 files changed, 5234 insertions(+), 7 deletions(-) create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java create mode 100644 instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java create mode 100644 instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/metadata.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/metadata.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml create mode 100644 instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java new file mode 100644 index 000000000000..1b0098e37b93 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java @@ -0,0 +1,54 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_DESCRIPTION; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_DATA_SOURCE_ID; +import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; +import javax.annotation.Nullable; + +/** + * Extractor of GenAI Agent + * attributes. + * + *

This class delegates to a type-specific {@link GenAiAgentAttributesGetter} for individual attribute + * extraction from request/response objects. + */ +public final class GenAiAgentAttributesExtractor + implements AttributesExtractor { + + /** Creates the GenAI Agent attributes extractor. */ + public static AttributesExtractor create( + GenAiAgentAttributesGetter attributesGetter) { + return new GenAiAgentAttributesExtractor<>(attributesGetter); + } + + private final GenAiAgentAttributesGetter getter; + + private GenAiAgentAttributesExtractor( + GenAiAgentAttributesGetter getter) { + this.getter = getter; + } + + @Override + public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { + internalSet(attributes, GEN_AI_AGENT_ID, getter.getId(request)); + internalSet(attributes, GEN_AI_AGENT_NAME, getter.getName(request)); + internalSet(attributes, GEN_AI_AGENT_DESCRIPTION, getter.getDescription(request)); + internalSet(attributes, GEN_AI_DATA_SOURCE_ID, getter.getDataSourceId(request)); + } + + @Override + public void onEnd( + AttributesBuilder attributes, + Context context, + REQUEST request, + @Nullable RESPONSE response, + @Nullable Throwable error) { + // do nothing + } +} \ No newline at end of file diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java new file mode 100644 index 000000000000..e0f97405ce9b --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java @@ -0,0 +1,15 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +public interface GenAiAgentAttributesGetter { + + String getName(REQUEST request); + + @Nullable + String getDescription(REQUEST request); + + @Nullable + String getId(REQUEST request); + + @Nullable + String getDataSourceId(REQUEST request); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java index ed2e48cd8024..d333f8c0eef5 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java @@ -15,8 +15,8 @@ * library/framework. It will be used by the {@link GenAiAttributesExtractor} to obtain the various * GenAI attributes in a type-generic way. */ -public interface GenAiAttributesGetter { - String getOperationName(REQUEST request); +public interface GenAiAttributesGetter + extends GenAiOperationAttributesGetter { String getSystem(REQUEST request); diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java new file mode 100644 index 000000000000..4a13f9615797 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java @@ -0,0 +1,217 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_TYPE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_CHOICE_COUNT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_FREQUENCY_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MAX_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_PRESENCE_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_SEED; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_STOP_SEQUENCES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TEMPERATURE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TOP_K; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TOP_P; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_SYSTEM_INSTRUCTIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_TOOL_DEFINITIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_INPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiEventName.GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS; +import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; +import static io.opentelemetry.semconv.trace.attributes.SemanticAttributes.EVENT_NAME; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.logs.LogRecordBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.aliyun.common.JsonMarshaler; +import io.opentelemetry.instrumentation.api.aliyun.common.provider.GlobalInstanceHolder; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions.CaptureMessageStrategy; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; +import io.opentelemetry.instrumentation.api.log.genai.GenAiEventLoggerProvider; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.annotation.Nullable; + +public class GenAiMessagesExtractor + implements AttributesExtractor { + + private static final Logger LOGGER = Logger.getLogger(GenAiMessagesExtractor.class.getName()); + + /** Creates the GenAI attributes extractor. */ + public static AttributesExtractor create( + GenAiAttributesGetter attributesGetter, + GenAiMessagesProvider messagesProvider, + MessageCaptureOptions messageCaptureOptions, + String instrumentationName) { + return new GenAiMessagesExtractor<>(attributesGetter, messagesProvider, messageCaptureOptions, instrumentationName); + } + + private final MessageCaptureOptions messageCaptureOptions; + + private final GenAiAttributesGetter getter; + + private final GenAiMessagesProvider messagesProvider; + + private final String instrumentationName; + + private final AtomicBoolean lazyInit = new AtomicBoolean(false); + + private JsonMarshaler jsonMarshaler; + + private io.opentelemetry.api.logs.Logger eventLogger; + + private GenAiMessagesExtractor( + GenAiAttributesGetter getter, + GenAiMessagesProvider messagesProvider, + MessageCaptureOptions messageCaptureOptions, + String instrumentationName) { + this.getter = getter; + this.messagesProvider = messagesProvider; + this.messageCaptureOptions = messageCaptureOptions; + this.instrumentationName = instrumentationName; + } + + @Override + public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { + tryInit(); + if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals(messageCaptureOptions.captureMessageStrategy())) { + SystemInstructions systemInstructions = messagesProvider.systemInstructions(request, null); + if (systemInstructions != null) { + internalSet(attributes, GEN_AI_SYSTEM_INSTRUCTIONS, toJsonString(systemInstructions.getSerializableObject())); + } + + InputMessages inputMessages = messagesProvider.inputMessages(request, null); + if (inputMessages != null) { + internalSet(attributes, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); + } + + ToolDefinitions toolDefinitions = messagesProvider.toolDefinitions(request, null); + if (toolDefinitions != null) { + internalSet(attributes, GEN_AI_TOOL_DEFINITIONS, toJsonString(toolDefinitions.getSerializableObject())); + } + } + } + + @Override + public void onEnd( + AttributesBuilder attributes, + Context context, + REQUEST request, + @Nullable RESPONSE response, + @Nullable Throwable error) { + if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals(messageCaptureOptions.captureMessageStrategy())) { + OutputMessages outputMessages = messagesProvider.outputMessages(request, response); + if (outputMessages != null) { + internalSet(attributes, GEN_AI_OUTPUT_MESSAGES, toJsonString(outputMessages.getSerializableObject())); + } + } else if (CaptureMessageStrategy.EVENT.equals(messageCaptureOptions.captureMessageStrategy())) { + emitInferenceEvent(context, request, response); + } + } + + private void emitInferenceEvent(Context context, REQUEST request, @Nullable RESPONSE response) { + if (eventLogger != null) { + LogRecordBuilder builder = eventLogger.logRecordBuilder() + .setAttribute(EVENT_NAME, GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS) + .setContext(context); + + SystemInstructions systemInstructions = messagesProvider.systemInstructions(request, + response); + if (systemInstructions != null) { + internalSetLogAttribute(builder, GEN_AI_SYSTEM_INSTRUCTIONS, toJsonString(systemInstructions.getSerializableObject())); + } + InputMessages inputMessages = messagesProvider.inputMessages(request, response); + if (inputMessages != null) { + internalSetLogAttribute(builder, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); + } + ToolDefinitions toolDefinitions = messagesProvider.toolDefinitions(request, null); + if (toolDefinitions != null) { + internalSetLogAttribute(builder, GEN_AI_TOOL_DEFINITIONS, toJsonString(toolDefinitions.getSerializableObject())); + } + OutputMessages outputMessages = messagesProvider.outputMessages(request, response); + if (outputMessages != null) { + internalSetLogAttribute(builder, GEN_AI_OUTPUT_MESSAGES, toJsonString(outputMessages.getSerializableObject())); + } + + internalSetLogAttribute(builder, GEN_AI_OPERATION_NAME, getter.getOperationName(request)); + internalSetLogAttribute(builder, GEN_AI_OUTPUT_TYPE, getter.getOutputType(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_CHOICE_COUNT, getter.getChoiceCount(request)); + internalSetLogAttribute(builder, GEN_AI_PROVIDER_NAME, getter.getSystem(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_MODEL, getter.getRequestModel(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_SEED, getter.getRequestSeed(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_FREQUENCY_PENALTY, getter.getRequestFrequencyPenalty(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_MAX_TOKENS, getter.getRequestMaxTokens(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_PRESENCE_PENALTY, getter.getRequestPresencePenalty(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_STOP_SEQUENCES, getter.getRequestStopSequences(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_TEMPERATURE, getter.getRequestTemperature(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_TOP_K, getter.getRequestTopK(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_TOP_P, getter.getRequestTopP(request)); + + List finishReasons = getter.getResponseFinishReasons(request, response); + if (finishReasons != null && !finishReasons.isEmpty()) { + builder.setAttribute(GEN_AI_RESPONSE_FINISH_REASONS, finishReasons); + } + internalSetLogAttribute(builder, GEN_AI_RESPONSE_ID, getter.getResponseId(request, response)); + internalSetLogAttribute(builder, GEN_AI_RESPONSE_MODEL, getter.getResponseModel(request, response)); + internalSetLogAttribute( + builder, GEN_AI_USAGE_INPUT_TOKENS, getter.getUsageInputTokens(request, response)); + internalSetLogAttribute( + builder, GEN_AI_USAGE_OUTPUT_TOKENS, getter.getUsageOutputTokens(request, response)); + builder.emit(); + } + } + + private void internalSetLogAttribute(LogRecordBuilder logRecordBuilder, AttributeKey key, @Nullable T value) { + if (value == null) { + return; + } + logRecordBuilder.setAttribute(key, value); + } + + private void tryInit() { + if (lazyInit.get()) { + return; + } + + if (lazyInit.compareAndSet(false, true)) { + jsonMarshaler = GlobalInstanceHolder.getInstance(JsonMarshaler.class); + if (jsonMarshaler == null) { + LOGGER.log(Level.WARNING, "failed to init json marshaler, global instance is null"); + } + + GenAiEventLoggerProvider loggerProvider = GlobalInstanceHolder.getInstance( + GenAiEventLoggerProvider.class); + + if (loggerProvider == null) { + LOGGER.log(Level.WARNING, "failed to init event logger, logger provider is null"); + return; + } + + eventLogger = loggerProvider.get(instrumentationName); + } + } + + private String toJsonString(Object object) { + if (jsonMarshaler == null) { + LOGGER.log(Level.INFO, "failed to serialize object, json marshaler is null"); + return null; + } + return jsonMarshaler.toJSONStringNonEmpty(object); + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java new file mode 100644 index 000000000000..0ad36a6aa610 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java @@ -0,0 +1,22 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import javax.annotation.Nullable; + +public interface GenAiMessagesProvider { + + @Nullable + InputMessages inputMessages(REQUEST request, @Nullable RESPONSE response); + + @Nullable + OutputMessages outputMessages(REQUEST request, @Nullable RESPONSE response); + + @Nullable + SystemInstructions systemInstructions(REQUEST request, @Nullable RESPONSE response); + + @Nullable + ToolDefinitions toolDefinitions(REQUEST request, @Nullable RESPONSE response); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java new file mode 100644 index 000000000000..c3afd4dab5b8 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java @@ -0,0 +1,9 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +public interface GenAiOperationAttributesGetter { + + String getOperationName(REQUEST request); + + @Nullable + String getOperationTarget(REQUEST request); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java index d8a7f517da3c..66ebb9949c6c 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java @@ -19,19 +19,19 @@ public static SpanNameExtractor create( return new GenAiSpanNameExtractor<>(attributesGetter); } - private final GenAiAttributesGetter getter; + private final GenAiOperationAttributesGetter getter; - private GenAiSpanNameExtractor(GenAiAttributesGetter getter) { + private GenAiSpanNameExtractor(GenAiOperationAttributesGetter getter) { this.getter = getter; } @Override public String extract(REQUEST request) { String operation = getter.getOperationName(request); - String model = getter.getRequestModel(request); - if (model == null) { + String operationTarget = getter.getOperationTarget(request); + if (operationTarget == null) { return operation; } - return operation + ' ' + model; + return operation + ' ' + operationTarget; } } diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java new file mode 100644 index 000000000000..f982827d04d4 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java @@ -0,0 +1,13 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.AttributeKey; + +public final class AgentIncubatingAttributes { + + public static final AttributeKey GEN_AI_AGENT_DESCRIPTION = stringKey("gen_ai.agent.description"); + public static final AttributeKey GEN_AI_AGENT_ID = stringKey("gen_ai.agent.id"); + public static final AttributeKey GEN_AI_AGENT_NAME = stringKey("gen_ai.agent.name"); + public static final AttributeKey GEN_AI_DATA_SOURCE_ID = stringKey("gen_ai.data_source.id"); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java new file mode 100644 index 000000000000..2e88cbdd29b3 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java @@ -0,0 +1,80 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; + +import static io.opentelemetry.api.common.AttributeKey.doubleKey; +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.AttributeKey; +import java.util.List; + +public final class GenAiIncubatingAttributes { + + public static final AttributeKey GEN_AI_OPERATION_NAME = stringKey("gen_ai.operation.name"); + public static final AttributeKey> GEN_AI_REQUEST_ENCODING_FORMATS = + stringArrayKey("gen_ai.request.encoding_formats"); + public static final AttributeKey GEN_AI_REQUEST_FREQUENCY_PENALTY = + doubleKey("gen_ai.request.frequency_penalty"); + public static final AttributeKey GEN_AI_REQUEST_MAX_TOKENS = + longKey("gen_ai.request.max_tokens"); + public static final AttributeKey GEN_AI_REQUEST_MODEL = stringKey("gen_ai.request.model"); + public static final AttributeKey GEN_AI_REQUEST_PRESENCE_PENALTY = + doubleKey("gen_ai.request.presence_penalty"); + public static final AttributeKey GEN_AI_REQUEST_SEED = longKey("gen_ai.request.seed"); + public static final AttributeKey> GEN_AI_REQUEST_STOP_SEQUENCES = + stringArrayKey("gen_ai.request.stop_sequences"); + public static final AttributeKey GEN_AI_REQUEST_TEMPERATURE = + doubleKey("gen_ai.request.temperature"); + public static final AttributeKey GEN_AI_REQUEST_TOP_K = + doubleKey("gen_ai.request.top_k"); + public static final AttributeKey GEN_AI_REQUEST_TOP_P = + doubleKey("gen_ai.request.top_p"); + public static final AttributeKey> GEN_AI_RESPONSE_FINISH_REASONS = + stringArrayKey("gen_ai.response.finish_reasons"); + public static final AttributeKey GEN_AI_RESPONSE_ID = stringKey("gen_ai.response.id"); + public static final AttributeKey GEN_AI_RESPONSE_MODEL = stringKey("gen_ai.response.model"); + public static final AttributeKey GEN_AI_PROVIDER_NAME = stringKey("gen_ai.provider.name"); + public static final AttributeKey GEN_AI_CONVERSATION_ID = stringKey("gen_ai.conversation.id"); + public static final AttributeKey GEN_AI_USAGE_INPUT_TOKENS = longKey("gen_ai.usage.input_tokens"); + public static final AttributeKey GEN_AI_USAGE_OUTPUT_TOKENS = + longKey("gen_ai.usage.output_tokens"); + public static final AttributeKey GEN_AI_REQUEST_CHOICE_COUNT = longKey("gen_ai.request.choice.count"); + public static final AttributeKey GEN_AI_OUTPUT_TYPE = stringKey("gen_ai.output.type"); + public static final AttributeKey GEN_AI_SYSTEM_INSTRUCTIONS = stringKey("gen_ai.system_instructions"); + public static final AttributeKey GEN_AI_INPUT_MESSAGES = stringKey("gen_ai.input.messages"); + public static final AttributeKey GEN_AI_OUTPUT_MESSAGES = stringKey("gen_ai.output.messages"); + public static final AttributeKey GEN_AI_TOOL_DEFINITIONS = stringKey("gen_ai.tool.definitions"); + + public static class GenAiOperationNameIncubatingValues { + public static final String CHAT = "chat"; + public static final String CREATE_AGENT = "create_agent"; + public static final String EMBEDDINGS = "embeddings"; + public static final String EXECUTE_TOOL = "execute_tool"; + public static final String GENERATE_CONTENT = "generate_content"; + public static final String INVOKE_AGENT = "invoke_agent"; + public static final String TEXT_COMPLETION = "text_completion"; + } + + public static class GenAiProviderNameIncubatingValues { + public static final String ANTHROPIC = "anthropic"; + public static final String AWS_BEDROCK = "aws.bedrock"; + public static final String AZURE_AI_INFERENCE = "azure.ai.inference"; + public static final String AZURE_AI_OPENAI = "azure.ai.openai"; + public static final String COHERE = "cohere"; + public static final String DEEPSEEK = "deepseek"; + public static final String GCP_GEMINI = "gcp.gemini"; + public static final String GCP_GEN_AI = "gcp.gen_ai"; + public static final String GCP_VERTEX_AI = "gcp.vertex_ai"; + public static final String GROQ = "groq"; + public static final String IBM_WATSONX_AI = "ibm.watsonx.ai"; + public static final String MISTRAL_AI = "mistral_ai"; + public static final String OPENAI = "openai"; + public static final String PERPLEXITY = "perplexity"; + public static final String X_AI = "x_ai"; + public static final String DASHSCOPE = "dashscope"; + } + + public static class GenAiEventName { + public static final String GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS = "gen_ai.client.inference.operation.details"; + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java new file mode 100644 index 000000000000..d73e4d292342 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java @@ -0,0 +1,15 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.AttributeKey; + +public class GenAiToolIncubatingAttributes { + + public static final AttributeKey GEN_AI_TOOL_CALL_ID = stringKey("gen_ai.tool.call.id"); + public static final AttributeKey GEN_AI_TOOL_DESCRIPTION = stringKey("gen_ai.tool.description"); + public static final AttributeKey GEN_AI_TOOL_NAME = stringKey("gen_ai.tool.name"); + public static final AttributeKey GEN_AI_TOOL_TYPE = stringKey("gen_ai.tool.type"); + public static final AttributeKey GEN_AI_TOOL_CALL_ARGUMENTS = stringKey("gen_ai.tool.call.arguments"); + public static final AttributeKey GEN_AI_TOOL_CALL_RESULT = stringKey("gen_ai.tool.call.result"); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java new file mode 100644 index 000000000000..a970c061eff3 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java @@ -0,0 +1,66 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.tool; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_CALL_ARGUMENTS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_CALL_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_CALL_RESULT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_DESCRIPTION; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_TYPE; +import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; +import javax.annotation.Nullable; + +public final class GenAiToolAttributesExtractor + implements AttributesExtractor { + + /** Creates the GenAI attributes extractor. */ + public static AttributesExtractor create( + GenAiToolAttributesGetter attributesGetter, MessageCaptureOptions messageCaptureOptions) { + return new GenAiToolAttributesExtractor<>(attributesGetter, messageCaptureOptions); + } + + private final GenAiToolAttributesGetter getter; + + private final MessageCaptureOptions messageCaptureOptions; + + private GenAiToolAttributesExtractor( + GenAiToolAttributesGetter getter, + MessageCaptureOptions messageCaptureOptions) { + this.getter = getter; + this.messageCaptureOptions = messageCaptureOptions; + } + + @Override + public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { + internalSet(attributes, GEN_AI_OPERATION_NAME, getter.getOperationName(request)); + internalSet(attributes, GEN_AI_TOOL_DESCRIPTION, getter.getToolDescription(request)); + internalSet(attributes, GEN_AI_TOOL_NAME, getter.getToolName(request)); + internalSet(attributes, GEN_AI_TOOL_TYPE, getter.getToolType(request)); + if (messageCaptureOptions.captureMessageContent()) { + internalSet(attributes, GEN_AI_TOOL_CALL_ARGUMENTS, getter.getToolCallArguments(request)); + } + } + + @Override + public void onEnd( + AttributesBuilder attributes, + Context context, + REQUEST request, + @Nullable RESPONSE response, + @Nullable Throwable error) { + internalSet(attributes, GEN_AI_TOOL_CALL_ID, getter.getToolCallId(request, response)); + if (messageCaptureOptions.captureMessageContent()) { + internalSet(attributes, GEN_AI_TOOL_CALL_RESULT, getter.getToolCallResult(request, response)); + } + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java new file mode 100644 index 000000000000..ea891cecf2ee --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java @@ -0,0 +1,23 @@ +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.tool; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiOperationAttributesGetter; +import javax.annotation.Nullable; + +public interface GenAiToolAttributesGetter extends + GenAiOperationAttributesGetter { + + String getToolDescription(REQUEST request); + + String getToolName(REQUEST request); + + String getToolType(REQUEST request); + + @Nullable + String getToolCallArguments(REQUEST request); + + @Nullable + String getToolCallId(REQUEST request, RESPONSE response); + + @Nullable + String getToolCallResult(REQUEST request, RESPONSE response); +} diff --git a/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts b/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts new file mode 100644 index 000000000000..ef2537931a62 --- /dev/null +++ b/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts @@ -0,0 +1,3 @@ +plugins { + id("otel.javaagent-bootstrap") +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts new file mode 100644 index 000000000000..dc9feb4e1eb0 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts @@ -0,0 +1,71 @@ +plugins { + id("otel.javaagent-instrumentation") +} + +otelJava { + // Spring AI 3 requires java 17 + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +muzzle { + pass { + group.set("org.springframework.ai") + module.set("spring-ai-client-chat") + versions.set("(,)") + } +} + +repositories { + mavenLocal() + maven { + url = uri("https://repo.spring.io/milestone") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + } + maven { + url = uri("https://repo.spring.io/snapshot") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + mavenContent { + snapshotsOnly() + } + } + mavenCentral() +} + +dependencies { + library("io.projectreactor:reactor-core:3.7.0") + library("org.springframework.ai:spring-ai-client-chat:1.0.0") + library("org.springframework.ai:spring-ai-model:1.0.0") + + implementation(project(":instrumentation:reactor:reactor-3.1:library")) + + bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) + + testInstrumentation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:javaagent")) + testInstrumentation(project(":instrumentation:reactor:reactor-3.1:javaagent")) + testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-1.0:testing")) +} + +tasks { + withType().configureEach { + val latestDepTest = findProperty("testLatestDeps") as Boolean + systemProperty("testLatestDeps", latestDepTest) + // spring ai requires java 17 + if (latestDepTest) { + otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) + } + } + + // TODO run tests both with and without genai message capture + systemProperty("otel.instrumentation.genai.capture-message-content", "true") + systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java new file mode 100644 index 000000000000..84e99a1b7f68 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import static java.util.Arrays.asList; + +import java.util.List; +import com.google.auto.service.AutoService; +import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.DefaultCallResponseSpecInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.DefaultStreamResponseSpecInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallbackInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.DefaultToolCallingManagerInstrumentation; + +@AutoService(InstrumentationModule.class) +public class SpringAiInstrumentationModule extends InstrumentationModule { + + public SpringAiInstrumentationModule() { + super("spring-ai", "spring-ai-1.0"); + } + + @Override + public List typeInstrumentations() { + return asList( + new DefaultCallResponseSpecInstrumentation(), + new DefaultStreamResponseSpecInstrumentation(), + new ToolCallbackInstrumentation(), + new DefaultToolCallingManagerInstrumentation()); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java new file mode 100644 index 000000000000..438a1f772ac4 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.javaagent.bootstrap.internal.InstrumentationConfig; + +public final class SpringAiSingletons { + public static final SpringAiTelemetry TELEMETRY = + SpringAiTelemetry.builder(GlobalOpenTelemetry.get()) + .setCaptureMessageContent( + InstrumentationConfig.get() + .getBoolean("otel.instrumentation.genai.capture-message-content", true)) + .setContentMaxLength( + InstrumentationConfig.get() + .getInt("otel.instrumentation.genai.message-content.max-length", 8192)) + .setCaptureMessageStrategy( + InstrumentationConfig.get() + .getString("otel.instrumentation.genai.message-content.capture-strategy", "span-attributes")) + .build(); + + private SpringAiSingletons() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java new file mode 100644 index 000000000000..0e81b9adaf01 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.client.ChatClientRequest; +import reactor.core.publisher.Flux; + +public final class SpringAiTelemetry { + + public static SpringAiTelemetryBuilder builder(OpenTelemetry openTelemetry) { + return new SpringAiTelemetryBuilder(openTelemetry); + } + private final Instrumenter chatClientInstrumenter; + private final Instrumenter toolCallInstrumenter; + private final MessageCaptureOptions messageCaptureOptions; + + SpringAiTelemetry( + Instrumenter chatClientInstrumenter, + Instrumenter toolCallInstrumenter, + MessageCaptureOptions messageCaptureOptions) { + this.chatClientInstrumenter = chatClientInstrumenter; + this.toolCallInstrumenter = toolCallInstrumenter; + this.messageCaptureOptions = messageCaptureOptions; + } + + public Instrumenter chatClientInstrumenter() { + return chatClientInstrumenter; + } + + public Instrumenter toolCallInstrumenter() { + return toolCallInstrumenter; + } + + public MessageCaptureOptions messageCaptureOptions() { + return messageCaptureOptions; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java new file mode 100644 index 000000000000..d8c10951395c --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java @@ -0,0 +1,94 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAgentAttributesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiSpanNameExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.tool.GenAiToolAttributesExtractor; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.ChatClientAttributesGetter; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.ChatClientMessagesProvider; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallAttributesGetter; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallRequest; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import com.google.errorprone.annotations.CanIgnoreReturnValue; + +public final class SpringAiTelemetryBuilder { + + private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-1.0"; + + private final OpenTelemetry openTelemetry; + private boolean captureMessageContent; + + private int contentMaxLength; + + private String captureMessageStrategy; + + SpringAiTelemetryBuilder(OpenTelemetry openTelemetry) { + this.openTelemetry = openTelemetry; + } + + /** + * Sets whether to capture message content in spans. Defaults to false. + */ + @CanIgnoreReturnValue + public SpringAiTelemetryBuilder setCaptureMessageContent(boolean captureMessageContent) { + this.captureMessageContent = captureMessageContent; + return this; + } + + /** + * Sets the maximum length of message content to capture. Defaults to 8192. + */ + @CanIgnoreReturnValue + public SpringAiTelemetryBuilder setContentMaxLength(int contentMaxLength) { + this.contentMaxLength = contentMaxLength; + return this; + } + + /** + * Sets the strategy to capture message content. Defaults to "span-attributes". + */ + @CanIgnoreReturnValue + public SpringAiTelemetryBuilder setCaptureMessageStrategy(String captureMessageStrategy) { + this.captureMessageStrategy = captureMessageStrategy; + return this; + } + + public SpringAiTelemetry build() { + MessageCaptureOptions messageCaptureOptions = MessageCaptureOptions.create( + captureMessageContent, contentMaxLength, captureMessageStrategy); + + Instrumenter chatClientInstrumenter = + Instrumenter.builder( + openTelemetry, + INSTRUMENTATION_NAME, + GenAiSpanNameExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor(GenAiAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor(GenAiAgentAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor(GenAiMessagesExtractor.create( + ChatClientAttributesGetter.INSTANCE, + ChatClientMessagesProvider.create(messageCaptureOptions), + messageCaptureOptions, INSTRUMENTATION_NAME)) + .buildInstrumenter(); + + Instrumenter toolCallInstrumenter = + Instrumenter.builder( + openTelemetry, + INSTRUMENTATION_NAME, + GenAiSpanNameExtractor.create(ToolCallAttributesGetter.INSTANCE)) + .addAttributesExtractor(GenAiToolAttributesExtractor.create( + ToolCallAttributesGetter.INSTANCE, messageCaptureOptions)) + .buildInstrumenter(); + + return new SpringAiTelemetry(chatClientInstrumenter, toolCallInstrumenter, messageCaptureOptions); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java new file mode 100644 index 000000000000..9617dc660284 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java @@ -0,0 +1,215 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAgentAttributesGetter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesGetter; +import java.util.List; +import javax.annotation.Nullable; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; + +public enum ChatClientAttributesGetter implements + GenAiAttributesGetter, + GenAiAgentAttributesGetter { + INSTANCE; + + @Override + public String getOperationName(ChatClientRequest request) { + return "invoke_agent"; + } + + @Override + public String getSystem(ChatClientRequest request) { + return "spring-ai"; + } + + @Nullable + @Override + public String getRequestModel(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getModel(); + } + + @Override + public String getOperationTarget(ChatClientRequest request) { + return getName(request); + } + + @Nullable + @Override + public Long getRequestSeed(ChatClientRequest request) { + // Spring AI currently does not support seed parameter + return null; + } + + @Nullable + @Override + public List getRequestEncodingFormats(ChatClientRequest request) { + // Spring AI currently does not support encoding_formats parameter + return null; + } + + @Nullable + @Override + public Double getRequestFrequencyPenalty(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getFrequencyPenalty(); + } + + @Nullable + @Override + public Long getRequestMaxTokens(ChatClientRequest request) { + if (request.prompt().getOptions() == null || request.prompt().getOptions().getMaxTokens() == null) { + return null; + } + return request.prompt().getOptions().getMaxTokens().longValue(); + } + + @Nullable + @Override + public Double getRequestPresencePenalty(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getPresencePenalty(); + } + + @Nullable + @Override + public List getRequestStopSequences(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getStopSequences(); + } + + @Nullable + @Override + public Double getRequestTemperature(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getTemperature(); + } + + @Nullable + @Override + public Double getRequestTopK(ChatClientRequest request) { + if (request.prompt().getOptions() == null || request.prompt().getOptions().getTopK() == null) { + return null; + } + return request.prompt().getOptions().getTopK().doubleValue(); + } + + @Nullable + @Override + public Double getRequestTopP(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getTopP(); + } + + @Override + public List getResponseFinishReasons(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getResult() == null + || response.chatResponse().getResult().getMetadata() == null + || response.chatResponse().getResult().getMetadata().getFinishReason() == null) { + return emptyList(); + } + + return singletonList(response.chatResponse().getResult().getMetadata().getFinishReason().toLowerCase()); + } + + @Nullable + @Override + public String getResponseId(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null) { + return null; + } + + return response.chatResponse().getMetadata().getId(); + } + + @Nullable + @Override + public String getResponseModel(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null + || response.chatResponse().getMetadata().getModel() == null + || response.chatResponse().getMetadata().getModel().isEmpty()) { + return null; + } + + return response.chatResponse().getMetadata().getModel(); + } + + @Nullable + @Override + public Long getUsageInputTokens(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null + || response.chatResponse().getMetadata().getUsage() == null + || response.chatResponse().getMetadata().getUsage().getPromptTokens() == null + || response.chatResponse().getMetadata().getUsage().getPromptTokens() == 0) { + return null; + } + + return response.chatResponse().getMetadata().getUsage().getPromptTokens().longValue(); + } + + @Nullable + @Override + public Long getUsageOutputTokens(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null + || response.chatResponse().getMetadata().getUsage() == null + || response.chatResponse().getMetadata().getUsage().getCompletionTokens() == null + || response.chatResponse().getMetadata().getUsage().getCompletionTokens() == 0) { + return null; + } + + return response.chatResponse().getMetadata().getUsage().getCompletionTokens().longValue(); + } + + @Override + public String getName(ChatClientRequest request) { + return "spring_ai chat_client"; + } + + @Nullable + @Override + public String getDescription(ChatClientRequest request) { + return null; + } + + @Nullable + @Override + public String getId(ChatClientRequest request) { + return null; + } + + @Nullable + @Override + public String getDataSourceId(ChatClientRequest request) { + return null; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java new file mode 100644 index 000000000000..9b4d9cb1b12a --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java @@ -0,0 +1,137 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; +import org.springframework.ai.chat.messages.AssistantMessage; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.metadata.ChatGenerationMetadata; +import org.springframework.ai.chat.model.Generation; + +final class ChatClientMessageBuffer { + private static final String TRUNCATE_FLAG = "...[truncated]"; + private final int index; + private final MessageCaptureOptions messageCaptureOptions; + + @Nullable private String finishReason; + + @Nullable private StringBuilder rawContentBuffer; + + @Nullable private Map toolCalls; + + ChatClientMessageBuffer(int index, + MessageCaptureOptions messageCaptureOptions) { + this.index = index; + this.messageCaptureOptions = messageCaptureOptions; + } + + Generation toGeneration() { + List toolCalls; + if (this.toolCalls != null) { + toolCalls = new ArrayList<>(this.toolCalls.size()); + for (Map.Entry entry : this.toolCalls.entrySet()) { + if (entry.getValue() != null) { + String arguments; + if (entry.getValue().function.arguments != null) { + arguments = entry.getValue().function.arguments.toString(); + } else { + arguments = ""; + } + if (entry.getValue().type == null) { + entry.getValue().type = "function"; + } + if (entry.getValue().function.name == null) { + entry.getValue().function.name = ""; + } + toolCalls.add(new ToolCall(entry.getValue().id, entry.getValue().type, + entry.getValue().function.name, arguments)); + } + } + } else { + toolCalls = Collections.emptyList(); + } + + String content = ""; + + if (this.rawContentBuffer != null) { + content = this.rawContentBuffer.toString(); + } + + return new Generation(new AssistantMessage(content, Collections.emptyMap(), toolCalls), + ChatGenerationMetadata.builder().finishReason(this.finishReason).build()); + } + + void append(Generation generation) { + AssistantMessage message = generation.getOutput(); + if (message != null) { + if (this.messageCaptureOptions.captureMessageContent()) { + if (message.getText() != null) { + if (this.rawContentBuffer == null) { + this.rawContentBuffer = new StringBuilder(); + } + + String deltaContent = message.getText(); + if (this.rawContentBuffer.length() < this.messageCaptureOptions.maxMessageContentLength()) { + if (this.rawContentBuffer.length() + deltaContent.length() >= this.messageCaptureOptions.maxMessageContentLength()) { + deltaContent = deltaContent.substring(0, this.messageCaptureOptions.maxMessageContentLength() - this.rawContentBuffer.length()); + this.rawContentBuffer.append(deltaContent).append(TRUNCATE_FLAG); + } else { + this.rawContentBuffer.append(deltaContent); + } + } + } + } + + if (message.hasToolCalls()) { + if (this.toolCalls == null) { + this.toolCalls = new HashMap<>(); + } + + for (int i = 0; i < message.getToolCalls().size(); i++) { + ToolCall toolCall = message.getToolCalls().get(i); + ToolCallBuffer buffer = + this.toolCalls.computeIfAbsent( + i, unused -> new ToolCallBuffer(toolCall.id())); + + buffer.type = toolCall.type(); + buffer.function.name = toolCall.name(); + if (this.messageCaptureOptions.captureMessageContent()) { + if (buffer.function.arguments == null) { + buffer.function.arguments = new StringBuilder(); + } + buffer.function.arguments.append(toolCall.arguments()); + } + } + } + } + + ChatGenerationMetadata metadata = generation.getMetadata(); + if (metadata != null && metadata.getFinishReason() != null && !metadata.getFinishReason().isEmpty()) { + this.finishReason = metadata.getFinishReason(); + } + } + + private static class FunctionBuffer { + @Nullable String name; + @Nullable StringBuilder arguments; + } + + private static class ToolCallBuffer { + final String id; + final FunctionBuffer function = new FunctionBuffer(); + @Nullable String type; + + ToolCallBuffer(String id) { + this.id = id; + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java new file mode 100644 index 000000000000..569c0c4db647 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java @@ -0,0 +1,200 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.MessagePart; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.Role; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.TextPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallRequestPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallResponsePart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinition; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesProvider; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.messages.AssistantMessage; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.messages.Message; +import org.springframework.ai.chat.messages.MessageType; +import org.springframework.ai.chat.messages.ToolResponseMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; +import org.springframework.ai.chat.model.Generation; +import org.springframework.ai.model.tool.ToolCallingChatOptions; + +public class ChatClientMessagesProvider implements + GenAiMessagesProvider { + + private static final String TRUNCATE_FLAG = "...[truncated]"; + + private final MessageCaptureOptions messageCaptureOptions; + + ChatClientMessagesProvider(MessageCaptureOptions messageCaptureOptions) { + this.messageCaptureOptions = messageCaptureOptions; + } + + public static ChatClientMessagesProvider create(MessageCaptureOptions messageCaptureOptions) { + return new ChatClientMessagesProvider(messageCaptureOptions); + } + + @Nullable + @Override + public InputMessages inputMessages(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (!messageCaptureOptions.captureMessageContent() + || request.prompt().getInstructions() == null) { + return null; + } + + InputMessages inputMessages = InputMessages.create(); + for (Message msg : request.prompt().getInstructions()) { + if (msg.getMessageType() == MessageType.SYSTEM) { + inputMessages.append( + InputMessage.create(Role.SYSTEM, contentToMessageParts(msg.getText()))); + } else if (msg.getMessageType() == MessageType.USER) { + inputMessages.append(InputMessage.create(Role.USER, contentToMessageParts(msg.getText()))); + } else if (msg.getMessageType() == MessageType.ASSISTANT) { + AssistantMessage assistantMessage = (AssistantMessage) msg; + List messageParts = new ArrayList<>(); + + if (assistantMessage.getText() != null && !assistantMessage.getText().isEmpty()) { + messageParts.addAll(contentToMessageParts(assistantMessage.getText())); + } + + if (assistantMessage.hasToolCalls()) { + messageParts.addAll(assistantMessage + .getToolCalls() + .stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); + } + inputMessages.append(InputMessage.create(Role.ASSISTANT, messageParts)); + } else if (msg.getMessageType() == MessageType.TOOL) { + ToolResponseMessage toolResponseMessage = (ToolResponseMessage) msg; + inputMessages.append(InputMessage.create(Role.TOOL, contentToMessageParts(toolResponseMessage.getResponses()))); + } + } + return inputMessages; + } + + @Nullable + @Override + public OutputMessages outputMessages(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (!messageCaptureOptions.captureMessageContent() + || response == null + || response.chatResponse() == null + || response.chatResponse().getResults() == null) { + return null; + } + + OutputMessages outputMessages = OutputMessages.create(); + for (Generation generation : response.chatResponse().getResults()) { + AssistantMessage message = generation.getOutput(); + List messageParts = new ArrayList<>(); + if (message != null) { + if (message.getText() != null && !message.getText().isEmpty()) { + messageParts.addAll(contentToMessageParts(message.getText())); + } + + if (message.hasToolCalls()) { + messageParts.addAll(message + .getToolCalls() + .stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); + } + } + + outputMessages.append( + OutputMessage.create( + Role.ASSISTANT, + messageParts, + generation.getMetadata().getFinishReason().toLowerCase())); + } + return outputMessages; + } + + @Nullable + @Override + public SystemInstructions systemInstructions(ChatClientRequest request, @Nullable ChatClientResponse response) { + return null; + } + + @Nullable + @Override + public ToolDefinitions toolDefinitions(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (request.prompt().getOptions() == null || !(request.prompt() + .getOptions() instanceof ToolCallingChatOptions options)) { + return null; + } + + ToolDefinitions toolDefinitions = ToolDefinitions.create(); + + // See: org.springframework.ai.model.tool.DefaultToolCallingManager.resolveToolDefinitions + options.getToolCallbacks() + .stream() + .map(toolCallback -> { + String name = toolCallback.getToolDefinition().name(); + String type = "function"; + if (messageCaptureOptions.captureMessageContent()) { + return ToolDefinition.create(type, name, toolCallback.getToolDefinition().description(), null); + } else { + return ToolDefinition.create(type, name, null, null); + } + }) + .filter(Objects::nonNull) + .forEach(toolDefinitions::append); + + for (String toolName : options.getToolNames()) { + // Skip the tool if it is already present in the request toolCallbacks. + // That might happen if a tool is defined in the options + // both as a ToolCallback and as a tool name. + if (options.getToolCallbacks() + .stream() + .anyMatch(tool -> tool.getToolDefinition().name().equals(toolName))) { + continue; + } + toolDefinitions.append(ToolDefinition.create("function", toolName, null, null)); + } + + return toolDefinitions; + } + + private List contentToMessageParts(String content) { + return Collections.singletonList(TextPart.create(truncateTextContent(content))); + } + + private MessagePart toolCallToMessagePart(ToolCall call) { + if (call != null) { + return ToolCallRequestPart.create(call.id(), call.name(), call.arguments()); + } + return ToolCallRequestPart.create("unknown_function"); + } + + private List contentToMessageParts(List toolResponses) { + if (toolResponses == null) { + return Collections.singletonList(ToolCallResponsePart.create("")); + } + + return toolResponses.stream() + .map(response -> + ToolCallResponsePart.create( + response.id(), truncateTextContent(response.responseData()))) + .collect(Collectors.toList()); + } + + private String truncateTextContent(String content) { + if (!content.endsWith(TRUNCATE_FLAG) && content.length() > messageCaptureOptions.maxMessageContentLength()) { + content = content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; + } + return content; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java new file mode 100644 index 000000000000..21213374afd6 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java @@ -0,0 +1,137 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.metadata.ChatResponseMetadata; +import org.springframework.ai.chat.metadata.DefaultUsage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.model.Generation; + +public final class ChatClientStreamListener { + + private final Context context; + private final ChatClientRequest request; + private final Instrumenter instrumenter; + private final MessageCaptureOptions messageCaptureOptions; + private final boolean newSpan; + private final AtomicBoolean hasEnded; + private final List chatClientMessageBuffers; + + // Aggregated metadata + private final AtomicLong inputTokens = new AtomicLong(0); + private final AtomicLong outputTokens = new AtomicLong(0); + private final AtomicReference requestId = new AtomicReference<>(); + private final AtomicReference model = new AtomicReference<>(); + + public ChatClientStreamListener( + Context context, + ChatClientRequest request, + Instrumenter instrumenter, + MessageCaptureOptions messageCaptureOptions, + boolean newSpan) { + this.context = context; + this.request = request; + this.instrumenter = instrumenter; + this.messageCaptureOptions = messageCaptureOptions; + this.newSpan = newSpan; + this.hasEnded = new AtomicBoolean(); + this.chatClientMessageBuffers = new ArrayList<>(); + } + + public void onChunk(ChatClientResponse chatClientChunk) { + if (chatClientChunk == null || chatClientChunk.chatResponse() == null) { + return; + } + + ChatResponse chunk = chatClientChunk.chatResponse(); + if (chunk.getMetadata() != null) { + if (chunk.getMetadata().getId() != null) { + requestId.set(chunk.getMetadata().getId()); + } + if (chunk.getMetadata().getUsage() != null) { + if (chunk.getMetadata().getUsage().getPromptTokens() != null) { + inputTokens.set(chunk.getMetadata().getUsage().getPromptTokens().longValue()); + } + if (chunk.getMetadata().getUsage().getCompletionTokens() != null) { + outputTokens.set(chunk.getMetadata().getUsage().getCompletionTokens().longValue()); + } + } + } + + if (chunk.getResults() != null) { + List generations = chunk.getResults(); + for (int i = 0; i < generations.size(); i++) { + while (chatClientMessageBuffers.size() <= i) { + chatClientMessageBuffers.add(null); + } + ChatClientMessageBuffer buffer = chatClientMessageBuffers.get(i); + if (buffer == null) { + buffer = new ChatClientMessageBuffer(i, messageCaptureOptions); + chatClientMessageBuffers.set(i, buffer); + } + + buffer.append(generations.get(i)); + } + } + } + + public void endSpan(@Nullable Throwable error) { + // Use an atomic operation since close() type of methods are exposed to the user + // and can come from any thread. + if (!this.hasEnded.compareAndSet(false, true)) { + return; + } + + if (this.chatClientMessageBuffers.isEmpty()) { + // Only happens if we got no chunks, so we have no response. + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, null, error); + } + return; + } + + Integer inputTokens = null; + if (this.inputTokens.get() > 0) { + inputTokens = (int) this.inputTokens.get(); + } + + Integer outputTokens = null; + if (this.outputTokens.get() > 0) { + outputTokens = (int) this.outputTokens.get(); + } + + List generations = this.chatClientMessageBuffers.stream() + .map(ChatClientMessageBuffer::toGeneration) + .collect(Collectors.toList()); + + ChatClientResponse response = ChatClientResponse.builder() + .chatResponse(ChatResponse.builder() + .generations(generations) + .metadata(ChatResponseMetadata.builder() + .usage(new DefaultUsage(inputTokens, outputTokens)) + .id(requestId.get()) + .model(model.get()) + .build()) + .build()) + .build(); + + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, response, error); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java new file mode 100644 index 000000000000..3f921d646476 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java @@ -0,0 +1,23 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; +import org.springframework.ai.chat.client.ChatClientResponse; +import reactor.core.publisher.Flux; + +public final class ChatClientStreamWrapper { + + public static Flux wrap( + Flux originFlux, + ChatClientStreamListener streamListener, + Context context) { + + Flux chatClientResponseFlux = originFlux.doOnNext( + chunk -> streamListener.onChunk(chunk)) + .doOnComplete(() -> streamListener.endSpan(null)) + .doOnError(streamListener::endSpan); + return ContextPropagationOperator.runWithContext(chatClientResponseFlux, context); + } + + private ChatClientStreamWrapper() {} +} \ No newline at end of file diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java new file mode 100644 index 000000000000..abbd4b1791be --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java @@ -0,0 +1,79 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; +import static net.bytebuddy.matcher.ElementMatchers.isPrivate; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.client.ChatClientRequest; + +@AutoService(TypeInstrumentation.class) +public class DefaultCallResponseSpecInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.chat.client.DefaultChatClient$DefaultCallResponseSpec"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.chat.client.DefaultChatClient$DefaultCallResponseSpec"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod().and(named("doGetObservableChatClientResponse")).and(takesArguments(2)) + .and(isPrivate()).and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), + this.getClass().getName() + "$DoGetObservableChatClientResponseAdvice"); + } + + @SuppressWarnings("unused") + public static class DoGetObservableChatClientResponseAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void doGetObservableChatClientResponseEnter( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + context = Context.current(); + + if (TELEMETRY.chatClientInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.chatClientInstrumenter().start(context, request); + } + scope = context.makeCurrent(); + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void doGetObservableChatClientResponseExit( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Return ChatClientResponse response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + if (scope == null) { + return; + } + scope.close(); + + TELEMETRY.chatClientInstrumenter().end(context, request, response, throwable); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java new file mode 100644 index 000000000000..2eec32010ba3 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java @@ -0,0 +1,86 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; +import static net.bytebuddy.matcher.ElementMatchers.isPrivate; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.client.ChatClientRequest; +import reactor.core.publisher.Flux; + +@AutoService(TypeInstrumentation.class) +public class DefaultStreamResponseSpecInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.chat.client.DefaultChatClient$DefaultStreamResponseSpec"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.chat.client.DefaultChatClient$DefaultStreamResponseSpec"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod().and(named("doGetObservableFluxChatResponse")).and(takesArguments(1)) + .and(isPrivate()).and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), + this.getClass().getName() + "$DoGetObservableFluxChatResponseAdvice"); + } + + @SuppressWarnings("unused") + public static class DoGetObservableFluxChatResponseAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void doGetObservableFluxChatResponseEnter( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatClientStreamListener streamListener) { + context = Context.current(); + + if (TELEMETRY.chatClientInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.chatClientInstrumenter().start(context, request); + streamListener = new ChatClientStreamListener( + context, request, TELEMETRY.chatClientInstrumenter(), + TELEMETRY.messageCaptureOptions(), true); + } + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void doGetObservableFluxChatResponseExit( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Return(readOnly = false) Flux response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatClientStreamListener streamListener) { + + if (throwable != null) { + // In case of exception, directly call end + TELEMETRY.chatClientInstrumenter().end(context, request, null, throwable); + return; + } + + if (streamListener != null) { + // Wrap the response to integrate the stream listener + response = ChatClientStreamWrapper.wrap(response, streamListener, context); + } + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java new file mode 100644 index 000000000000..85cdbfa0d511 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java @@ -0,0 +1,85 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import java.util.HashMap; +import java.util.Map; +import org.springframework.ai.chat.messages.AssistantMessage; + +@AutoService(TypeInstrumentation.class) +public class DefaultToolCallingManagerInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.model.tool.DefaultToolCallingManager"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.model.tool.DefaultToolCallingManager"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod().and(named("executeToolCall")) + .and(takesArguments(3)) + .and(takesArgument(1, named("org.springframework.ai.chat.messages.AssistantMessage"))), + this.getClass().getName() + "$ExecuteToolCallAdvice"); + } + + @SuppressWarnings("unused") + public static class ExecuteToolCallAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void executeToolCallEnter( + @Advice.Argument(1) AssistantMessage assistantMessage, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + + context = Context.current(); + + if (assistantMessage != null && assistantMessage.getToolCalls() != null) { + Map toolNameToIdMap = new HashMap<>(); + + for (AssistantMessage.ToolCall toolCall : assistantMessage.getToolCalls()) { + if (toolCall.id() != null && toolCall.name() != null) { + toolNameToIdMap.put(toolCall.name(), toolCall.id()); + } + } + + // store tool call ids map to context + if (!toolNameToIdMap.isEmpty()) { + context = ToolCallContext.storeToolCalls(context, toolNameToIdMap); + } + scope = context.makeCurrent(); + } + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void executeToolCallExit( + @Advice.Local("otelScope") Scope scope) { + if (scope == null) { + return; + } + scope.close(); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java new file mode 100644 index 000000000000..d5f52689feb0 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java @@ -0,0 +1,51 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.tool.GenAiToolAttributesGetter; +import javax.annotation.Nullable; + +public enum ToolCallAttributesGetter implements GenAiToolAttributesGetter { + INSTANCE; + + @Override + public String getOperationName(ToolCallRequest request) { + return request.getOperationName(); + } + + @Override + public String getOperationTarget(ToolCallRequest request) { + return getToolName(request); + } + + @Override + public String getToolDescription(ToolCallRequest request) { + return request.getDescription(); + } + + @Override + public String getToolName(ToolCallRequest request) { + return request.getName(); + } + + @Override + public String getToolType(ToolCallRequest request) { + return "function"; + } + + @Nullable + @Override + public String getToolCallArguments(ToolCallRequest request) { + return request.getToolInput(); + } + + @Nullable + @Override + public String getToolCallId(ToolCallRequest request, String response) { + return request.getToolCallId(); + } + + @Nullable + @Override + public String getToolCallResult(ToolCallRequest request, String response) { + return response; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java new file mode 100644 index 000000000000..d5a20e33c1ec --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; +import java.util.Map; + +/** + * Tool call context to store tool call ids map + */ +public final class ToolCallContext { + + private static final ContextKey> TOOL_CALL_IDS_KEY = + ContextKey.named("spring-ai-tool-call-ids"); + + private ToolCallContext() {} + + public static Context storeToolCalls(Context context, Map toolNameToIdMap) { + if (toolNameToIdMap == null || toolNameToIdMap.isEmpty()) { + return context; + } + return context.with(TOOL_CALL_IDS_KEY, toolNameToIdMap); + } + + public static String getToolCallId(Context context, String toolName) { + if (context == null || toolName == null) { + return null; + } + + Map toolCallIds = context.get(TOOL_CALL_IDS_KEY); + if (toolCallIds == null) { + return null; + } + + return toolCallIds.get(toolName); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java new file mode 100644 index 000000000000..5b96220da34e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java @@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import org.springframework.ai.tool.definition.ToolDefinition; + +public final class ToolCallRequest { + + private final String toolInput; + private final String toolCallId; + private final ToolDefinition toolDefinition; + + private ToolCallRequest(String toolInput, String toolCallId, ToolDefinition toolDefinition) { + this.toolInput = toolInput; + this.toolCallId = toolCallId; + this.toolDefinition = toolDefinition; + } + + public static ToolCallRequest create(String toolInput, String toolCallId, ToolDefinition toolDefinition) { + return new ToolCallRequest(toolInput, toolCallId, toolDefinition); + } + + public String getOperationName() { + return "execute_tool"; + } + + public String getType() { + // spring ai support function only + return "function"; + } + + public String getName() { + if (toolDefinition == null) { + return null; + } + return toolDefinition.name(); + } + + public String getDescription() { + if (toolDefinition == null) { + return null; + } + return toolDefinition.description(); + } + + public String getToolInput() { + return toolInput; + } + + public String getToolCallId() { + return toolCallId; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java new file mode 100644 index 000000000000..0139c4476cff --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java @@ -0,0 +1,85 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.implementsInterface; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.returns; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.tool.ToolCallback; + +@AutoService(TypeInstrumentation.class) +public class ToolCallbackInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.tool.ToolCallback"); + } + + @Override + public ElementMatcher typeMatcher() { + return implementsInterface(named("org.springframework.ai.tool.ToolCallback")); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod().and(named("call")).and(takesArguments(2)) + .and(takesArgument(0, named("java.lang.String"))) + .and(returns(named("java.lang.String"))), + this.getClass().getName() + "$CallAdvice"); + } + + @SuppressWarnings("unused") + public static class CallAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void callEnter( + @Advice.This ToolCallback toolCallback, + @Advice.Argument(0) String toolInput, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope, + @Advice.Local("toolCallRequest") ToolCallRequest request) { + context = Context.current(); + + // get tool call id from context + String toolCallId = ToolCallContext.getToolCallId(context, toolCallback.getToolDefinition().name()); + request = ToolCallRequest.create(toolInput, toolCallId, toolCallback.getToolDefinition()); + + if (TELEMETRY.toolCallInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.toolCallInstrumenter().start(context, request); + } + scope = context.makeCurrent(); + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void callExit( + @Advice.Return String result, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope, + @Advice.Local("toolCallRequest") ToolCallRequest request) { + if (scope == null) { + return; + } + scope.close(); + TELEMETRY.toolCallInstrumenter().end(context, request, result, throwable); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java new file mode 100644 index 000000000000..6cf4aabfcbbc --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java @@ -0,0 +1,18 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.instrumentation.spring.ai.v1_0.AbstractChatClientTest; +import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import org.junit.jupiter.api.extension.RegisterExtension; + +public class ChatClientTest extends AbstractChatClientTest { + + @RegisterExtension + private static final AgentInstrumentationExtension testing = + AgentInstrumentationExtension.create(); + + @Override + protected InstrumentationExtension getTesting() { + return testing; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/metadata.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/metadata.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts new file mode 100644 index 000000000000..018ea7ab229f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts @@ -0,0 +1,15 @@ +plugins { + id("otel.java-conventions") +} + +otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +dependencies { + implementation(project(":testing-common")) + + api("org.springframework.ai:spring-ai-openai:1.0.0") + api("org.springframework.ai:spring-ai-client-chat:1.0.0") + api(project(":instrumentation-api-incubator")) +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java new file mode 100644 index 000000000000..ee302f4200a9 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java @@ -0,0 +1,354 @@ +package io.opentelemetry.instrumentation.spring.ai.v1_0; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TEMPERATURE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_TOOL_DEFINITIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_INPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.EXECUTE_TOOL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.INVOKE_AGENT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiProviderNameIncubatingValues.OPENAI; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_DESCRIPTION; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_TYPE; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies; +import static java.util.Arrays.asList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; +import org.springframework.ai.chat.client.ChatClient; +import org.springframework.ai.chat.client.ChatClient.CallResponseSpec; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.messages.SystemMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; +import org.springframework.ai.chat.messages.UserMessage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.prompt.ChatOptions; +import org.springframework.ai.chat.prompt.Prompt; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.OpenAiChatOptions; + +public abstract class AbstractChatClientTest extends AbstractSpringAiTest { + + protected static final String TEST_CHAT_MODEL = "qwen3-coder-flash"; + protected static final String TEST_CHAT_INPUT = + "Answer in up to 3 words: Which ocean contains Bouvet Island?"; + protected static final String TEST_AGENT_NAME = "spring_ai chat_client"; + protected static final String TEST_TOOL_NAME = "get_weather"; + + @Test + void basic() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + ChatClient chatClient = getChatClient(); + + ChatResponse response = chatClient.prompt(prompt).call().chatResponse(); + String content = "Southern Ocean"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span + .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), + span -> + span + .hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_SPAN_KIND, "LLM")))); + } + + @Test + void stream() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + ChatClient chatClient = getChatClient(); + + List chunks = chatClient.prompt(prompt).stream().chatResponse().toStream().collect( + Collectors.toList()); + + String fullMessage = + chunks.stream() + .map( + cc -> { + if (cc.getResults().isEmpty()) { + return Optional.empty(); + } + return Optional.of(cc.getResults().get(0).getOutput().getText()); + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.joining()); + + String content = "South Atlantic"; + assertEquals(fullMessage, content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span + .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("South Atlantic")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), + span -> + span + .hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_SPAN_KIND, "LLM")))); + } + + @Test + void with400Error() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model("gpt-4o").build()) + .build(); + ChatClient chatClient = getChatClient(); + + Throwable thrown = catchThrowable(() -> chatClient.prompt(prompt).call().chatResponse()); + assertThat(thrown).isInstanceOf(Exception.class); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span + .hasStatus(StatusData.error()) + .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, "gpt-4o"), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); + } + + @Test + void toolCalls() { + Prompt prompt = Prompt.builder() + .messages(asList( + SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), + UserMessage.builder().text("What is the weather in New York City and London?").build())) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(getToolCallbacks()) + .build()) + .build(); + + ChatClient chatClient = getChatClient(); + + ChatResponse response = chatClient.prompt(prompt).call().chatResponse(); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span + .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 739L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 76L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 815L), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("The location to get the current temperature for"))), + span -> + span + .hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 331L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 45L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 376L)), + // 2 spans are compressed into 1 span + span -> + span + .hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, EXECUTE_TOOL), + equalTo(GEN_AI_SPAN_KIND, "TOOL"), + equalTo(GEN_AI_TOOL_DESCRIPTION, "The location to get the current temperature for"), + equalTo(GEN_AI_TOOL_TYPE, "function"), + equalTo(GEN_AI_TOOL_NAME, TEST_TOOL_NAME)), + span -> + span + .hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 408L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 31L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 439L)))); + } + + @Test + void streamToolCalls() { + Prompt prompt = Prompt.builder() + .messages(asList( + SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), + UserMessage.builder().text("What is the weather in New York City and London?").build())) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(getToolCallbacks()) + .build()) + .build(); + + ChatClient chatClient = getChatClient(); + + List chunks = chatClient.prompt(prompt).stream().chatResponse().toStream() + .collect(Collectors.toList()); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span + .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("The location to get the current temperature for"))), + span -> + span + .hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM")), + // 2 spans are compressed into 1 span + span -> + span + .hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, EXECUTE_TOOL), + equalTo(GEN_AI_SPAN_KIND, "TOOL"), + equalTo(GEN_AI_TOOL_DESCRIPTION, "The location to get the current temperature for"), + equalTo(GEN_AI_TOOL_TYPE, "function"), + equalTo(GEN_AI_TOOL_NAME, TEST_TOOL_NAME)), + span -> + span + .hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM")))); + } + +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java new file mode 100644 index 000000000000..ffd16778015c --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java @@ -0,0 +1,123 @@ +package io.opentelemetry.instrumentation.spring.ai.v1_0; + +import static java.util.Collections.singletonList; + +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import io.opentelemetry.instrumentation.testing.recording.RecordingExtension; +import java.net.http.HttpClient; +import java.net.http.HttpClient.Version; +import java.util.List; +import java.util.function.Function; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.springframework.ai.chat.client.ChatClient; +import org.springframework.ai.model.tool.ToolCallingManager; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.api.OpenAiApi; +import org.springframework.ai.tool.ToolCallback; +import org.springframework.ai.tool.function.FunctionToolCallback; +import org.springframework.ai.tool.resolution.StaticToolCallbackResolver; +import org.springframework.http.client.JdkClientHttpRequestFactory; +import org.springframework.http.client.reactive.JdkClientHttpConnector; +import org.springframework.web.client.RestClient; +import org.springframework.web.reactive.function.client.WebClient; + +public abstract class AbstractSpringAiTest { + + protected static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-1.0"; + + private static final String API_URL = "https://dashscope.aliyuncs.com/compatible-mode"; + + @RegisterExtension + static final RecordingExtension recording = new RecordingExtension(API_URL); + + protected abstract InstrumentationExtension getTesting(); + + private OpenAiApi openAiApi; + + private OpenAiChatModel chatModel; + + protected final OpenAiApi getOpenAiApi() { + if (openAiApi == null) { + HttpClient httpClient = HttpClient.newBuilder() + .version(Version.HTTP_1_1) + .build(); + + OpenAiApi.Builder builder = OpenAiApi.builder() + .restClientBuilder(RestClient.builder() + .requestFactory(new JdkClientHttpRequestFactory(httpClient))) + .webClientBuilder(WebClient.builder() + .clientConnector(new JdkClientHttpConnector(httpClient))) + .baseUrl("http://localhost:" + recording.getPort()); + if (recording.isRecording()) { + builder.apiKey(System.getenv("OPENAI_API_KEY")); + } else { + builder.apiKey("unused"); + } + openAiApi = builder.build(); + } + return openAiApi; + } + + protected final ToolCallingManager getToolCallingManager() { + return ToolCallingManager.builder() + .toolCallbackResolver( + new StaticToolCallbackResolver(getToolCallbacks())) + .build(); + } + + protected final OpenAiChatModel getChatModel() { + if (chatModel == null) { + chatModel = OpenAiChatModel.builder() + .openAiApi(getOpenAiApi()) + .toolCallingManager(getToolCallingManager()) + .build(); + } + return chatModel; + } + + protected final ChatClient getChatClient() { + return ChatClient.builder(getChatModel()).build(); + } + + protected final List getToolCallbacks() { + return singletonList( + FunctionToolCallback.builder("get_weather", new GetWeatherFunction()) + .description("The location to get the current temperature for") + .inputType(ToolInput.class) + .build()); + } + + @JsonClassDescription("The location to get the current temperature for") + public static class ToolInput { + @JsonPropertyDescription("location") + private String location; + + public String getLocation() { + return location; + } + + public void setLocation(String location) { + this.location = location; + } + + @JsonCreator + public ToolInput(@JsonProperty("location") String location) { + this.location = location; + } + } + + private static class GetWeatherFunction implements Function { + @Override + public String apply(ToolInput location) { + if (location.getLocation().contains("London")) { + return "15 degrees and raining"; + } + return "25 degrees and sunny"; + } + } + +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml new file mode 100644 index 000000000000..7326c6de9b3e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml @@ -0,0 +1,41 @@ +--- +id: 8fafdbfc-2cc0-4198-85a0-3cdbe8eebaf4 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"Southern Ocean\",\"role\":\"assistant\"\ + },\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"object\":\"chat.completion\"\ + ,\"usage\":{\"prompt_tokens\":23,\"completion_tokens\":2,\"total_tokens\":25},\"\ + created\":1758182305,\"system_fingerprint\":null,\"model\":\"qwen3-coder-flash\"\ + ,\"id\":\"chatcmpl-443cc847-7f2c-486f-ba86-6383748b8842\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 443cc847-7f2c-486f-ba86-6383748b8842 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "261" + req-arrive-time: "1758182305225" + resp-start-time: "1758182305486" + x-envoy-upstream-service-time: "260" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:25 GMT" + server: istio-envoy +uuid: 8fafdbfc-2cc0-4198-85a0-3cdbe8eebaf4 +persistent: true +insertionIndex: 32 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml new file mode 100644 index 000000000000..ef07d4b31ec7 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml @@ -0,0 +1,47 @@ +--- +id: 73a6455e-643f-4321-b633-1a1b98e70f42 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":0,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"South"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: {"choices":[{"delta":{"content":" Atlantic"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: {"choices":[{"finish_reason":"stop","delta":{"content":""},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: a9fd1b06-0202-4e61-8105-ba5e150d6718 + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "243" + req-arrive-time: "1758182301783" + resp-start-time: "1758182302027" + x-envoy-upstream-service-time: "242" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:21 GMT" + server: istio-envoy +uuid: 73a6455e-643f-4321-b633-1a1b98e70f42 +persistent: true +insertionIndex: 8 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml new file mode 100644 index 000000000000..56c1af245723 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml @@ -0,0 +1,198 @@ +--- +id: 404f68b7-6b67-4297-851b-e79c6e4962da +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"call_5ae3e6e00f414bc08b14c713","type":"function","function":{"name":"get_weather","arguments":""}}],"role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"New York City"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"\""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"call_769dac36ee3a449984c540e7","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"London"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"\"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"tool_calls":[{"function":{"arguments":""},"index":1,"id":"","type":"function"}]},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"finish_reason":"tool_calls","delta":{},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: 155ae8e9-0fbb-4a0e-997d-96e4c8e79c46 + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "266" + req-arrive-time: "1758182299922" + resp-start-time: "1758182300189" + x-envoy-upstream-service-time: "265" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:20 GMT" + server: istio-envoy +uuid: 404f68b7-6b67-4297-851b-e79c6e4962da +persistent: true +insertionIndex: 2 +--- +id: 5fc59074-53b1-46dc-a21e-76e5545e985d +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + }, { + "content" : "", + "role" : "assistant", + "tool_calls" : [ { + "id" : "call_5ae3e6e00f414bc08b14c713", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"New York City\"}" + } + }, { + "id" : "call_769dac36ee3a449984c540e7", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"London\"}" + } + } ] + }, { + "content" : "\"25 degrees and sunny\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_5ae3e6e00f414bc08b14c713" + }, { + "content" : "\"15 degrees and raining\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_769dac36ee3a449984c540e7" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"The","role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" current"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" weather"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" is"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" as follows:\n-"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" **New York City"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":"**: 25"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" degrees and sunny.\n"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":"- **London**:"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" 15 degrees"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" and raining."},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"finish_reason":"stop","delta":{"content":""},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: cf97005f-1370-480f-8702-edd4f33f3dcf + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "215" + req-arrive-time: "1758182300832" + resp-start-time: "1758182301048" + x-envoy-upstream-service-time: "215" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:21 GMT" + server: istio-envoy +uuid: 5fc59074-53b1-46dc-a21e-76e5545e985d +persistent: true +insertionIndex: 3 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml new file mode 100644 index 000000000000..f51a7b4d3b8e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml @@ -0,0 +1,163 @@ +--- +id: a4b86c3d-75a1-40bd-ac90-93a2d1d062a8 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"\",\"role\":\"assistant\",\"tool_calls\"\ + :[{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"New York City\\\"}\",\"\ + name\":\"get_weather\"},\"id\":\"call_f964ea7704d446a8867e951a\",\"index\":0,\"\ + type\":\"function\"},{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"London\\\ + \"}\",\"name\":\"get_weather\"},\"id\":\"call_b308af719e54417396b302e9\",\"index\"\ + :1,\"type\":\"function\"}]},\"finish_reason\":\"tool_calls\",\"index\":0,\"logprobs\"\ + :null}],\"object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":331,\"completion_tokens\"\ + :45,\"total_tokens\":376},\"created\":1758182304,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-44c186a0-8a78-4a38-8d82-4acd82eb6a54\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 44c186a0-8a78-4a38-8d82-4acd82eb6a54 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "609" + req-arrive-time: "1758182303354" + resp-start-time: "1758182303963" + x-envoy-upstream-service-time: "608" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:23 GMT" + server: istio-envoy +uuid: a4b86c3d-75a1-40bd-ac90-93a2d1d062a8 +persistent: true +insertionIndex: 14 +--- +id: 55b829f9-d19a-431a-b61f-172e0db88979 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + }, { + "content" : "", + "role" : "assistant", + "tool_calls" : [ { + "id" : "call_f964ea7704d446a8867e951a", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"New York City\"}" + } + }, { + "id" : "call_b308af719e54417396b302e9", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"London\"}" + } + } ] + }, { + "content" : "\"25 degrees and sunny\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_f964ea7704d446a8867e951a" + }, { + "content" : "\"15 degrees and raining\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_b308af719e54417396b302e9" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"The current weather is as follows:\\\ + n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.\"\ + ,\"role\":\"assistant\"},\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"\ + object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":408,\"completion_tokens\"\ + :31,\"total_tokens\":439},\"created\":1758182305,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-fcaf6b84-ec12-420a-bcd3-ffbe1411d7bd\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: fcaf6b84-ec12-420a-bcd3-ffbe1411d7bd + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "596" + req-arrive-time: "1758182304162" + resp-start-time: "1758182304759" + x-envoy-upstream-service-time: "595" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:24 GMT" + server: istio-envoy +uuid: 55b829f9-d19a-431a-b61f-172e0db88979 +persistent: true +insertionIndex: 15 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml new file mode 100644 index 000000000000..1f8598d64aaa --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml @@ -0,0 +1,38 @@ +--- +id: c54797f6-72e9-482e-ac4c-c46624a7e78c +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "gpt-4o", + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 404 + body: "{\"error\":{\"message\":\"The model `gpt-4o` does not exist or you do not\ + \ have access to it.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\"\ + :\"model_not_found\"},\"request_id\":\"6739e7e5-b528-4989-ac08-e9e293c378f3\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 6739e7e5-b528-4989-ac08-e9e293c378f3 + content-type: application/json + req-cost-time: "9" + req-arrive-time: "1758186777371" + resp-start-time: "1758186777380" + x-envoy-upstream-service-time: "8" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 09:12:57 GMT" + server: istio-envoy +uuid: c54797f6-72e9-482e-ac4c-c46624a7e78c +persistent: true +insertionIndex: 14 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts new file mode 100644 index 000000000000..e30f8520ff61 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts @@ -0,0 +1,69 @@ +plugins { + id("otel.javaagent-instrumentation") +} + +otelJava { + // Spring AI OpenAI requires java 17 (same as Spring AI) + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +muzzle { + pass { + group.set("org.springframework.ai") + module.set("spring-ai-openai") + versions.set("(,)") + } +} + +repositories { + mavenLocal() + maven { + url = uri("https://repo.spring.io/milestone") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + } + maven { + url = uri("https://repo.spring.io/snapshot") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + mavenContent { + snapshotsOnly() + } + } + mavenCentral() +} + +dependencies { + library("io.projectreactor:reactor-core:3.7.0") + library("org.springframework.ai:spring-ai-openai:1.0.0") + library("org.springframework.ai:spring-ai-model:1.0.0") + + implementation(project(":instrumentation:reactor:reactor-3.1:library")) + + bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) + + testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:testing")) +} + +tasks { + withType().configureEach { + val latestDepTest = findProperty("testLatestDeps") as Boolean + systemProperty("testLatestDeps", latestDepTest) + // spring ai requires java 17 + if (latestDepTest) { + otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) + } + } + + // TODO run tests both with and without genai message capture + systemProperty("otel.instrumentation.genai.capture-message-content", "true") + systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java new file mode 100644 index 000000000000..fb9d2a2feb66 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java @@ -0,0 +1,165 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static java.util.Collections.emptyList; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes; +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesGetter; +import java.util.List; +import java.util.stream.Collectors; + +enum ChatModelAttributesGetter + implements GenAiAttributesGetter { + INSTANCE; + + @Override + public String getOperationName(ChatCompletionRequest request) { + return GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; + } + + @Override + public String getSystem(ChatCompletionRequest request) { + return GenAiIncubatingAttributes.GenAiProviderNameIncubatingValues.OPENAI; + } + + @Nullable + @Override + public String getRequestModel(ChatCompletionRequest request) { + return request.model(); + } + + @Nullable + @Override + public String getOperationTarget(ChatCompletionRequest request) { + return getRequestModel(request); + } + + + @Nullable + @Override + public Long getRequestSeed(ChatCompletionRequest request) { + if (request.seed() == null) { + return null; + } + return Long.valueOf(request.seed()); + } + + @Nullable + @Override + public List getRequestEncodingFormats(ChatCompletionRequest request) { + return null; + } + + @Nullable + @Override + public Double getRequestFrequencyPenalty(ChatCompletionRequest request) { + return request.frequencyPenalty(); + } + + @Nullable + @Override + public Long getRequestMaxTokens(ChatCompletionRequest request) { + if (request.maxTokens() == null && request.maxCompletionTokens() == null) { + return null; + } + // Use maxCompletionTokens if available, otherwise fall back to maxTokens + Integer maxTokens = request.maxCompletionTokens() != null ? request.maxCompletionTokens() : request.maxTokens(); + return maxTokens != null ? Long.valueOf(maxTokens) : null; + } + + @Nullable + @Override + public Double getRequestPresencePenalty(ChatCompletionRequest request) { + return request.presencePenalty(); + } + + @Nullable + @Override + public List getRequestStopSequences(ChatCompletionRequest request) { + if (request.stop() == null) { + return null; + } + return request.stop(); + } + + @Nullable + @Override + public Double getRequestTemperature(ChatCompletionRequest request) { + return request.temperature(); + } + + @Nullable + @Override + public Double getRequestTopK(ChatCompletionRequest request) { + // OpenAI doesn't support top_k parameter + return null; + } + + @Nullable + @Override + public Double getRequestTopP(ChatCompletionRequest request) { + return request.topP(); + } + + @Nullable + @Override + public Long getChoiceCount(ChatCompletionRequest request) { + if (request.n() == null) { + return null; + } + return Long.valueOf(request.n()); + } + + @Override + public List getResponseFinishReasons( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null || response.choices() == null) { + return emptyList(); + } + return response.choices().stream() + .map(choice -> choice.finishReason() != null ? choice.finishReason().name().toLowerCase() : "") + .collect(Collectors.toList()); + } + + @Override + @Nullable + public String getResponseId( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null) { + return null; + } + return response.id(); + } + + @Override + @Nullable + public String getResponseModel( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null) { + return null; + } + return response.model(); + } + + @Override + @Nullable + public Long getUsageInputTokens( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null || response.usage() == null || response.usage().promptTokens() == null) { + return null; + } + return Long.valueOf(response.usage().promptTokens()); + } + + @Override + @Nullable + public Long getUsageOutputTokens( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null || response.usage() == null || response.usage().completionTokens() == null) { + return null; + } + return Long.valueOf(response.usage().completionTokens()); + } +} \ No newline at end of file diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java new file mode 100644 index 000000000000..2692c2db3ed0 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java @@ -0,0 +1,151 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionFinishReason; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ChatCompletionFunction; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.Role; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; + +final class ChatModelMessageBuffer { + private static final String TRUNCATE_FLAG = "...[truncated]"; + private final int index; + private final MessageCaptureOptions messageCaptureOptions; + + @Nullable private ChatCompletionFinishReason finishReason; + + @Nullable private StringBuilder rawContent; + + @Nullable private Role role; + + @Nullable private String name; + + @Nullable private String toolCallId; + + @Nullable private Map toolCalls; + + ChatModelMessageBuffer(int index, MessageCaptureOptions messageCaptureOptions) { + this.index = index; + this.messageCaptureOptions = messageCaptureOptions; + } + + Choice toChoice() { + List toolCalls = null; + if (this.toolCalls != null) { + toolCalls = new ArrayList<>(this.toolCalls.size()); + for (Map.Entry entry : this.toolCalls.entrySet()) { + if (entry.getValue() != null) { + String arguments = null; + if (entry.getValue().function.arguments != null) { + arguments = entry.getValue().function.arguments.toString(); + } + toolCalls.add(new ToolCall(entry.getValue().id, entry.getValue().type, + new ChatCompletionFunction(entry.getValue().function.name, arguments))); + } + } + } + + String content = ""; + // Type of content is String for OpenAI + if (rawContent != null) { + content = rawContent.toString(); + } + + return new Choice( + finishReason, + index, + new ChatCompletionMessage(content, role, name, toolCallId, toolCalls, null, null, null), + null); + } + + void append(Choice choice) { + if (choice.message() != null) { + if (this.messageCaptureOptions.captureMessageContent()) { + // Type of content is String for OpenAI + if (choice.message().rawContent() instanceof String) { + if (this.rawContent == null) { + this.rawContent = new StringBuilder(); + } + + String deltaContent = (String) choice.message().rawContent(); + if (this.rawContent.length() < this.messageCaptureOptions.maxMessageContentLength()) { + if (this.rawContent.length() + deltaContent.length() >= this.messageCaptureOptions.maxMessageContentLength() ) { + deltaContent = deltaContent.substring(0, this.messageCaptureOptions.maxMessageContentLength() - this.rawContent.length()); + this.rawContent.append(deltaContent).append(TRUNCATE_FLAG); + } else { + this.rawContent.append(deltaContent); + } + } + } + } + + if (choice.message().toolCalls() != null) { + if (this.toolCalls == null) { + this.toolCalls = new HashMap<>(); + } + + for (int i = 0; i < choice.message().toolCalls().size(); i++) { + ToolCall toolCall = choice.message().toolCalls().get(i); + ToolCallBuffer buffer = + this.toolCalls.computeIfAbsent( + i, unused -> new ToolCallBuffer(toolCall.id())); + if (toolCall.type() != null) { + buffer.type = toolCall.type(); + } + + if (toolCall.function() != null) { + if (toolCall.function().name() != null) { + buffer.function.name = toolCall.function().name(); + } + if (this.messageCaptureOptions.captureMessageContent() && toolCall.function().arguments() != null) { + if (buffer.function.arguments == null) { + buffer.function.arguments = new StringBuilder(); + } + buffer.function.arguments.append(toolCall.function().arguments()); + } + } + } + } + + if (choice.message().role() != null) { + this.role = choice.message().role(); + } + if (choice.message().name() != null) { + this.name = choice.message().name(); + } + if (choice.message().toolCallId() != null) { + this.toolCallId = choice.message().toolCallId(); + } + } + + if (choice.finishReason() != null) { + this.finishReason = choice.finishReason(); + } + } + + private static class FunctionBuffer { + @Nullable String name; + @Nullable StringBuilder arguments; + } + + private static class ToolCallBuffer { + final String id; + final FunctionBuffer function = new FunctionBuffer(); + @Nullable String type; + + ToolCallBuffer(String id) { + this.id = id; + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java new file mode 100644 index 000000000000..812fbd7f1253 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java @@ -0,0 +1,219 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.MessagePart; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.Role; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.TextPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallRequestPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallResponsePart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinition; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesProvider; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public final class ChatModelMessagesProvider + implements GenAiMessagesProvider { + + private static final String TRUNCATE_FLAG = "...[truncated]"; + + private final MessageCaptureOptions messageCaptureOptions; + + ChatModelMessagesProvider(MessageCaptureOptions messageCaptureOptions) { + this.messageCaptureOptions = messageCaptureOptions; + } + + public static ChatModelMessagesProvider create(MessageCaptureOptions messageCaptureOptions) { + return new ChatModelMessagesProvider(messageCaptureOptions); + } + + @Nullable + @Override + public InputMessages inputMessages(ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (!messageCaptureOptions.captureMessageContent() + || request.messages() == null) { + return null; + } + + InputMessages inputMessages = InputMessages.create(); + for (ChatCompletionMessage msg : request.messages()) { + + if (msg.role() == ChatCompletionMessage.Role.SYSTEM) { + inputMessages.append( + InputMessage.create(Role.SYSTEM, contentToMessageParts(msg.rawContent()))); + } else if (msg.role() == ChatCompletionMessage.Role.USER) { + inputMessages.append(InputMessage.create(Role.USER, contentToMessageParts(msg.rawContent()))); + } else if (msg.role() == ChatCompletionMessage.Role.ASSISTANT) { + List messageParts = new ArrayList<>(); + + List contentParts = contentToMessagePartsOrNull(msg.rawContent()); + if (contentParts != null) { + messageParts.addAll(contentParts); + } + + List toolCalls = msg.toolCalls(); + if (toolCalls != null) { + messageParts.addAll(toolCalls.stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); + } + inputMessages.append(InputMessage.create(Role.ASSISTANT, messageParts)); + } else if (msg.role() == ChatCompletionMessage.Role.TOOL) { + inputMessages.append(InputMessage.create(Role.TOOL, contentToToolMessageParts(msg.toolCallId(), msg.rawContent()))); + } + } + return inputMessages; + } + + @Nullable + @Override + public OutputMessages outputMessages(ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (!messageCaptureOptions.captureMessageContent() + || response == null + || response.choices() == null) { + return null; + } + + OutputMessages outputMessages = OutputMessages.create(); + for (Choice choice : response.choices()) { + ChatCompletionMessage choiceMsg = choice.message(); + List messageParts = new ArrayList<>(); + + if (choiceMsg != null) { + List contentParts = contentToMessagePartsOrNull(choiceMsg.rawContent()); + if (contentParts != null) { + messageParts.addAll(contentParts); + } + List toolCalls = choiceMsg.toolCalls(); + if (toolCalls != null) { + messageParts.addAll(toolCalls.stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); + } + } + + outputMessages.append( + OutputMessage.create( + Role.ASSISTANT, + messageParts, + choice.finishReason() != null ? choice.finishReason().name().toLowerCase() : "")); + } + return outputMessages; + } + + @Nullable + @Override + public SystemInstructions systemInstructions(ChatCompletionRequest request, @Nullable ChatCompletion response) { + return null; + } + + @Nullable + @Override + public ToolDefinitions toolDefinitions(ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (request.tools() == null) { + return null; + } + + ToolDefinitions toolDefinitions = ToolDefinitions.create(); + request.tools() + .stream() + .filter(Objects::nonNull) + .map(tool -> { + if (tool.getFunction() != null) { + String name = tool.getFunction().getName(); + String type = tool.getType().name().toLowerCase(); + if (messageCaptureOptions.captureMessageContent() && tool.getFunction().getDescription() != null) { + return ToolDefinition.create(type, name, tool.getFunction().getDescription(), null); + } else { + return ToolDefinition.create(type, name, null, null); + } + } + return null; + }) + .filter(Objects::nonNull) + .forEach(toolDefinitions::append); + + return toolDefinitions; + } + + /** + * Support content: + *

+ * */ + private List contentToMessageParts(Object rawContent) { + List messageParts = contentToMessagePartsOrNull(rawContent); + return messageParts == null ? Collections.singletonList(TextPart.create("")) : messageParts; + } + + /** + * Support content: + *
    + *
  • {@code String}
  • + *
  • {@code List}
  • + *
+ * */ + @SuppressWarnings({"unchecked", "rawtypes"}) + private List contentToMessagePartsOrNull(Object rawContent) { + if (rawContent instanceof String && !((String) rawContent).isEmpty()) { + return Collections.singletonList(TextPart.create(truncateTextContent((String) rawContent))); + } else if (rawContent instanceof List) { + return joinContentParts((List) rawContent); + } else { + return null; + } + } + + private MessagePart toolCallToMessagePart(ToolCall call) { + if (call != null && call.function() != null) { + return ToolCallRequestPart.create(call.id(), call.function().name(), call.function().arguments()); + } + return ToolCallRequestPart.create("unknown_function"); + } + + /** + * Support content: + *
    + *
  • {@code String}
  • + *
  • {@code List}
  • + *
+ * */ + private List contentToToolMessageParts(String toolCallId, Object rawContent) { + if (rawContent instanceof String && !((String) rawContent).isEmpty()) { + return Collections.singletonList(ToolCallResponsePart.create(toolCallId, truncateTextContent((String) rawContent))); + } + return Collections.singletonList(ToolCallResponsePart.create(toolCallId)); + } + + private List joinContentParts(List contentParts) { + return contentParts.stream() + .filter(part -> part instanceof String) + .map(part -> this.truncateTextContent((String) part)) + .map(TextPart::create) + .collect(Collectors.toList()); + } + + private String truncateTextContent(String content) { + if (!content.endsWith(TRUNCATE_FLAG) && content.length() > messageCaptureOptions.maxMessageContentLength()) { + content = content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; + } + return content; + } + +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java new file mode 100644 index 000000000000..19764b4c134d --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java @@ -0,0 +1,140 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk.ChunkChoice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import org.springframework.ai.openai.api.OpenAiApi.Usage; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +public final class ChatModelStreamListener { + + private final Context context; + private final ChatCompletionRequest request; + private final Instrumenter instrumenter; + private final MessageCaptureOptions messageCaptureOptions; + private final boolean newSpan; + private final AtomicBoolean hasEnded; + private final List chatModelMessageBuffers; + + // Aggregated metadata + private final AtomicLong inputTokens = new AtomicLong(0); + private final AtomicLong outputTokens = new AtomicLong(0); + private final AtomicReference requestId = new AtomicReference<>(); + + public ChatModelStreamListener( + Context context, + ChatCompletionRequest request, + Instrumenter instrumenter, + MessageCaptureOptions messageCaptureOptions, + boolean newSpan) { + this.context = context; + this.request = request; + this.instrumenter = instrumenter; + this.messageCaptureOptions = messageCaptureOptions; + this.newSpan = newSpan; + this.hasEnded = new AtomicBoolean(); + this.chatModelMessageBuffers = new ArrayList<>(); + } + + public void onChunk(ChatCompletionChunk chunk) { + if (chunk == null) { + return; + } + + if (chunk.id() != null) { + requestId.set(chunk.id()); + } + if (chunk.usage() != null) { + if (chunk.usage().promptTokens() != null) { + inputTokens.set(chunk.usage().promptTokens().longValue()); + } + if (chunk.usage().completionTokens() != null) { + outputTokens.set(chunk.usage().completionTokens().longValue()); + } + } + + if (chunk.choices() != null) { + List choices = chunk.choices(); + for (ChunkChoice choice : choices) { + while (chatModelMessageBuffers.size() <= choice.index()) { + chatModelMessageBuffers.add(null); + } + ChatModelMessageBuffer buffer = chatModelMessageBuffers.get(choice.index()); + if (buffer == null) { + buffer = new ChatModelMessageBuffer(choice.index(), messageCaptureOptions); + chatModelMessageBuffers.set(choice.index(), buffer); + } + + // Convert ChunkChoice to Choice for compatibility with buffer + buffer.append( + new org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice( + choice.finishReason(), + choice.index(), + choice.delta(), + choice.logprobs()) + ); + } + } + } + + public void endSpan(@Nullable Throwable error) { + // Use an atomic operation since close() type of methods are exposed to the user + // and can come from any thread. + if (!this.hasEnded.compareAndSet(false, true)) { + return; + } + + if (this.chatModelMessageBuffers.isEmpty()) { + // Only happens if we got no chunks, so we have no response. + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, null, error); + } + return; + } + + Integer inputTokens = null; + if (this.inputTokens.get() > 0) { + inputTokens = (int) this.inputTokens.get(); + } + + Integer outputTokens = null; + if (this.outputTokens.get() > 0) { + outputTokens = (int) this.outputTokens.get(); + } + + List choices = this.chatModelMessageBuffers.stream() + .map(ChatModelMessageBuffer::toChoice) + .collect(Collectors.toList()); + + ChatCompletion result = new ChatCompletion( + this.requestId.get(), + choices, + null, // created + null, // model + null, // serviceTier + null, // systemFingerprint + "chat.completion", + new Usage(outputTokens, inputTokens, + inputTokens != null && outputTokens != null ? inputTokens + outputTokens : null, + null, null)); + + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, result, error); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java new file mode 100644 index 000000000000..9973677f584a --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java @@ -0,0 +1,30 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; +import io.opentelemetry.javaagent.bootstrap.reactor.ReactorSubscribeOnProcessTracing; +import org.springframework.ai.chat.model.ChatResponse; +import reactor.core.publisher.Flux; + +public final class ChatModelStreamWrapper { + + public static Flux wrap( + Flux originFlux, + ChatModelStreamListener streamListener, + Context context) { + + Flux chatCompletionChunkFlux = originFlux.doOnNext( + chunk -> streamListener.onChunk(chunk)) + .doOnComplete(() -> streamListener.endSpan(null)) + .doOnError(streamListener::endSpan); + return ContextPropagationOperator.runWithContext(chatCompletionChunkFlux, context); + } + + public static Flux enableContextPropagation(Flux originFlux) { + return originFlux + .contextWrite(ctx -> ctx.put(ReactorSubscribeOnProcessTracing.CONTEXT_PROPAGATION_KEY, true)); + } + + private ChatModelStreamWrapper() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java new file mode 100644 index 000000000000..8e876d5d21f4 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java @@ -0,0 +1,125 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0.SpringAiOpenaiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.http.ResponseEntity; +import reactor.core.publisher.Flux; + +@AutoService(TypeInstrumentation.class) +public class OpenAiApiInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.openai.api.OpenAiApi"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.openai.api.OpenAiApi"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod().and(named("chatCompletionEntity")).and(takesArguments(2)) + .and(takesArgument(0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) + .and(returns(named("org.springframework.http.ResponseEntity"))), + this.getClass().getName() + "$CallAdvice"); + + transformer.applyAdviceToMethod( + isMethod().and(named("chatCompletionStream")).and(takesArguments(2)) + .and(takesArgument(0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) + .and(returns(named("reactor.core.publisher.Flux"))), + this.getClass().getName() + "$StreamAdvice"); + } + + @SuppressWarnings("unused") + public static class CallAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void callEnter( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + Context parentContext = Context.current(); + if (!TELEMETRY.chatCompletionInstrumenter().shouldStart(parentContext, request)) { + return; + } + + context = TELEMETRY.chatCompletionInstrumenter().start(parentContext, request); + scope = context.makeCurrent(); + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void callExit( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Return ResponseEntity response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + if (scope == null) { + return; + } + scope.close(); + + TELEMETRY.chatCompletionInstrumenter() + .end(context, request, response.hasBody() ? response.getBody() : null, throwable); + } + } + + @SuppressWarnings("unused") + public static class StreamAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void streamEnter( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatModelStreamListener streamListener) { + context = Context.current(); + + if (TELEMETRY.chatCompletionInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.chatCompletionInstrumenter().start(context, request); + streamListener = new ChatModelStreamListener( + context, request, TELEMETRY.chatCompletionInstrumenter(), + TELEMETRY.messageCaptureOptions(), true); + } + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void streamExit( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Return(readOnly = false) Flux response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatModelStreamListener streamListener) { + + if (throwable != null) { + // In case of exception, directly call end + TELEMETRY.chatCompletionInstrumenter().end(context, request, null, throwable); + return; + } + + if (streamListener != null) { + // Wrap the response to integrate the stream listener + response = ChatModelStreamWrapper.wrap(response, streamListener, context); + } + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java new file mode 100644 index 000000000000..335b0e61a2d8 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java @@ -0,0 +1,63 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.model.ChatResponse; +import reactor.core.publisher.Flux; + +@AutoService(TypeInstrumentation.class) +public class OpenAiChatModelInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.openai.OpenAiChatModel"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.openai.OpenAiChatModel"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod().and(named("internalStream")).and(takesArguments(2)) + .and(returns(named("reactor.core.publisher.Flux"))), + this.getClass().getName() + "$StreamAdvice"); + } + + @SuppressWarnings("unused") + public static class StreamAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void streamEnter() { + // do nothing + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void streamExit( + @Advice.Return(readOnly = false) Flux response, + @Advice.Thrown Throwable throwable) { + if (throwable != null) { + return; + } + + response = ChatModelStreamWrapper.enableContextPropagation(response); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java new file mode 100644 index 000000000000..19c358b1f36a --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static java.util.Arrays.asList; + +import com.google.auto.service.AutoService; +import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import java.util.List; + +@AutoService(InstrumentationModule.class) +public class SpringAiOpenaiInstrumentationModule extends InstrumentationModule { + public SpringAiOpenaiInstrumentationModule() { + super("spring-ai-openai", "spring-ai-openai-1.0"); + } + + @Override + public List typeInstrumentations() { + return asList(new OpenAiChatModelInstrumentation(), + new OpenAiApiInstrumentation()); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java new file mode 100644 index 000000000000..b240df7f8d12 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java @@ -0,0 +1,26 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.javaagent.bootstrap.internal.InstrumentationConfig; + +public final class SpringAiOpenaiSingletons { + public static final SpringAiOpenaiTelemetry TELEMETRY = + SpringAiOpenaiTelemetry.builder(GlobalOpenTelemetry.get()) + .setCaptureMessageContent( + InstrumentationConfig.get() + .getBoolean("otel.instrumentation.genai.capture-message-content", true)) + .setContentMaxLength( + InstrumentationConfig.get() + .getInt("otel.instrumentation.genai.message-content.max-length", 8192)) + .setCaptureMessageStrategy( + InstrumentationConfig.get() + .getString("otel.instrumentation.genai.message-content.capture-strategy", "span-attributes")) + .build(); + + private SpringAiOpenaiSingletons() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java new file mode 100644 index 000000000000..140f1473558a --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java @@ -0,0 +1,44 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; + +/** + * Entrypoint for instrumenting Spring AI OpenAI clients. + */ +public final class SpringAiOpenaiTelemetry { + + /** + * Returns a new {@link SpringAiOpenaiTelemetryBuilder} configured with the given {@link OpenTelemetry}. + */ + public static SpringAiOpenaiTelemetryBuilder builder(OpenTelemetry openTelemetry) { + return new SpringAiOpenaiTelemetryBuilder(openTelemetry); + } + + private final Instrumenter chatCompletionInstrumenter; + private final MessageCaptureOptions messageCaptureOptions; + + SpringAiOpenaiTelemetry( + Instrumenter chatCompletionInstrumenter, + MessageCaptureOptions messageCaptureOptions) { + this.chatCompletionInstrumenter = chatCompletionInstrumenter; + this.messageCaptureOptions = messageCaptureOptions; + } + + public Instrumenter chatCompletionInstrumenter() { + return chatCompletionInstrumenter; + } + + public MessageCaptureOptions messageCaptureOptions() { + return messageCaptureOptions; + } + +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java new file mode 100644 index 000000000000..9d4b2e005f29 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java @@ -0,0 +1,86 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiSpanNameExtractor; + +/** + * Builder for {@link SpringAiOpenaiTelemetry}. + */ +public final class SpringAiOpenaiTelemetryBuilder { + + private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-openai-1.0"; + + private final OpenTelemetry openTelemetry; + + private boolean captureMessageContent; + + private int contentMaxLength; + + private String captureMessageStrategy; + + SpringAiOpenaiTelemetryBuilder(OpenTelemetry openTelemetry) { + this.openTelemetry = openTelemetry; + } + + /** + * Sets whether to capture message content in spans. Defaults to false. + */ + @CanIgnoreReturnValue + public SpringAiOpenaiTelemetryBuilder setCaptureMessageContent(boolean captureMessageContent) { + this.captureMessageContent = captureMessageContent; + return this; + } + + /** + * Sets the maximum length of message content to capture. Defaults to 8192. + */ + @CanIgnoreReturnValue + public SpringAiOpenaiTelemetryBuilder setContentMaxLength(int contentMaxLength) { + this.contentMaxLength = contentMaxLength; + return this; + } + + /** + * Sets the strategy to capture message content. Defaults to "span-attributes". + */ + @CanIgnoreReturnValue + public SpringAiOpenaiTelemetryBuilder setCaptureMessageStrategy(String captureMessageStrategy) { + this.captureMessageStrategy = captureMessageStrategy; + return this; + } + + /** + * Returns a new {@link SpringAiOpenaiTelemetry} with the settings of this {@link + * SpringAiOpenaiTelemetryBuilder}. + */ + public SpringAiOpenaiTelemetry build() { + MessageCaptureOptions messageCaptureOptions = MessageCaptureOptions.create( + captureMessageContent, contentMaxLength, captureMessageStrategy); + + Instrumenter chatCompletionInstrumenter = + Instrumenter.builder( + openTelemetry, + INSTRUMENTATION_NAME, + GenAiSpanNameExtractor.create(ChatModelAttributesGetter.INSTANCE)) + .addAttributesExtractor(GenAiAttributesExtractor.create(ChatModelAttributesGetter.INSTANCE)) + .addAttributesExtractor(GenAiMessagesExtractor.create( + ChatModelAttributesGetter.INSTANCE, + ChatModelMessagesProvider.create(messageCaptureOptions), + messageCaptureOptions, INSTRUMENTATION_NAME)) + .buildInstrumenter(); + + return new SpringAiOpenaiTelemetry(chatCompletionInstrumenter, messageCaptureOptions); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java new file mode 100644 index 000000000000..743d1e309b28 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java @@ -0,0 +1,18 @@ +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.instrumentation.spring.ai.openai.v1_0.AbstractChatCompletionTest; +import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import org.junit.jupiter.api.extension.RegisterExtension; + +public class ChatCompletionTest extends AbstractChatCompletionTest { + + @RegisterExtension + private static final AgentInstrumentationExtension testing = + AgentInstrumentationExtension.create(); + + @Override + protected InstrumentationExtension getTesting() { + return testing; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/metadata.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/metadata.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts new file mode 100644 index 000000000000..f2cd5575ae3f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts @@ -0,0 +1,14 @@ +plugins { + id("otel.java-conventions") +} + +otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +dependencies { + api(project(":testing-common")) + + api("org.springframework.ai:spring-ai-openai:1.0.0") + api(project(":instrumentation-api-incubator")) +} \ No newline at end of file diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java new file mode 100644 index 000000000000..d898ed230429 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java @@ -0,0 +1,484 @@ +package io.opentelemetry.instrumentation.spring.ai.openai.v1_0; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_CHOICE_COUNT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_FREQUENCY_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MAX_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_PRESENCE_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_SEED; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_STOP_SEQUENCES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TEMPERATURE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TOP_P; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_TOOL_DEFINITIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_INPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiProviderNameIncubatingValues.OPENAI; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies; +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.messages.SystemMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.OpenAiChatOptions; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; +import org.springframework.ai.chat.messages.UserMessage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.prompt.ChatOptions; +import org.springframework.ai.chat.prompt.Prompt; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.tool.ToolCallback; +import org.springframework.ai.tool.function.FunctionToolCallback; +import org.springframework.ai.tool.metadata.ToolMetadata; +import org.springframework.ai.tool.method.MethodToolCallback; +import reactor.core.publisher.Flux; + +public abstract class AbstractChatCompletionTest extends AbstractSpringAiOpenaiTest { + + protected static final String TEST_CHAT_MODEL = "qwen3-coder-flash"; + protected static final String TEST_CHAT_INPUT = + "Answer in up to 3 words: Which ocean contains Bouvet Island?"; + + @Test + void basic() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + String content = "South Atlantic"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("South Atlantic")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void stream() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + List chunks = chatModel.stream(prompt).collectList().block(); + + String fullMessage = + chunks.stream() + .map( + cc -> { + if (cc.getResults().isEmpty()) { + return Optional.empty(); + } + return Optional.of(cc.getResults().get(0).getOutput().getText()); + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.joining()); + + String content = "South Atlantic"; + assertEquals(fullMessage, content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("South Atlantic")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void allTheClientOptions() { + OpenAiChatOptions options = OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .maxTokens(1000) + .seed(100) + .stop(singletonList("foo")) + .topP(1.0) + .temperature(0.8) + .frequencyPenalty(0.5) + .presencePenalty(0.3) + .build(); + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(options) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + String content = "Southern Ocean"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.8d), + equalTo(GEN_AI_REQUEST_MAX_TOKENS, Long.valueOf(options.getMaxTokens())), + equalTo(GEN_AI_REQUEST_SEED, Long.valueOf(options.getSeed())), + satisfies(GEN_AI_REQUEST_STOP_SEQUENCES, seq -> seq.hasSize(options.getStop().size())), + equalTo(GEN_AI_REQUEST_TOP_P, options.getTopP()), + equalTo(GEN_AI_REQUEST_FREQUENCY_PENALTY, options.getFrequencyPenalty()), + equalTo(GEN_AI_REQUEST_PRESENCE_PENALTY, options.getPresencePenalty()), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void with400Error() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model("gpt-4o").build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + Throwable thrown = catchThrowable(() -> chatModel.stream(prompt).collectList().block()); + assertThat(thrown).isInstanceOf(Exception.class); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, "gpt-4o"), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); + } + + @Test + void multipleChoices() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .N(2) + .build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + String content1 = "Southern Ocean"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content1); + String content2 = "South"; + assertThat(response.getResults().get(1).getOutput().getText()).isEqualTo(content2); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_CHOICE_COUNT, 2), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop", "stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 3L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 26L), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void streamMultipleChoices() { + Prompt prompt = Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .N(2) + .build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + // there's a bug in open-ai chat model, thus we couldn't agg multi choice + List chunks = chatModel.stream(prompt).collectList().block(); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + equalTo(GEN_AI_REQUEST_CHOICE_COUNT, 2), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop", "stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void toolCalls() { + Prompt prompt = Prompt.builder() + .messages(asList( + SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), + UserMessage.builder().text("What is the weather in New York City and London?").build())) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + + List toolCalls = response.getResult().getOutput().getToolCalls(); + + assertThat(toolCalls.get(0).id()).startsWith("call_"); + assertThat(toolCalls.get(1).id()).startsWith("call_"); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_MODEL, "qwen3-coder-flash"), + satisfies(GEN_AI_RESPONSE_ID, id -> id.startsWith("chatcmpl-")), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 311L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 45L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 356L), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("get_weather")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("New York City")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_calls")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather"))))); + + getTesting().clearData(); + + prompt = Prompt.builder() + .messages( + asList( + SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), + UserMessage.builder().text("What is the weather in New York City and London?").build(), + response.getResult().getOutput(), + new ToolResponseMessage( + asList( + new ToolResponse(toolCalls.get(0).id(), "get_weather", "25 degrees and sunny"), + new ToolResponse(toolCalls.get(1).id(), "get_weather", "15 degrees and sunny"))))) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); + + response = chatModel.call(prompt); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + satisfies(GEN_AI_RESPONSE_ID, id -> id.startsWith("chatcmpl-")), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_MODEL, "qwen3-coder-flash"), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 386L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 31L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 417L), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("tool_call_response")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("25 degrees and sunny")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("15 degrees and sunny")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather"))))); + } + + @Test + void streamToolCalls() { + Prompt prompt = Prompt.builder() + .messages(asList( + SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), + UserMessage.builder().text("What is the weather in New York City and London?").build())) + .chatOptions(OpenAiChatOptions + .builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + List chunks = chatModel.stream(prompt).toStream().collect(Collectors.toList()); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("get_weather")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("New York City")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), + satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_calls")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather"))))); + } + + private ToolCallback buildGetWeatherToolDefinition() { + return FunctionToolCallback.builder("get_weather", new GetWeatherFunction()) + .description("The location to get the current temperature for") + .inputType(ToolInput.class) + .build(); + } + + public static class ToolInput { + private String location; + + public String getLocation() { + return location; + } + + public ToolInput(String location) { + this.location = location; + } + } + + private static class GetWeatherFunction implements Function { + @Override + public String apply(ToolInput location) { + return "test function"; + } + } + +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java new file mode 100644 index 000000000000..1812e14c71a5 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java @@ -0,0 +1,61 @@ +package io.opentelemetry.instrumentation.spring.ai.openai.v1_0; + +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.api.OpenAiApi; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import io.opentelemetry.instrumentation.testing.recording.RecordingExtension; +import java.net.http.HttpClient; +import java.net.http.HttpClient.Version; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.springframework.http.client.JdkClientHttpRequestFactory; +import org.springframework.http.client.reactive.JdkClientHttpConnector; +import org.springframework.web.client.RestClient; +import org.springframework.web.reactive.function.client.WebClient; + +public abstract class AbstractSpringAiOpenaiTest { + + protected static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-openai-1.0"; + + private static final String API_URL = "https://dashscope.aliyuncs.com/compatible-mode"; + + @RegisterExtension + static final RecordingExtension recording = new RecordingExtension(API_URL); + + protected abstract InstrumentationExtension getTesting(); + + private OpenAiApi openAiApi; + + private OpenAiChatModel chatModel; + + protected final OpenAiApi getOpenAiApi() { + if (openAiApi == null) { + HttpClient httpClient = HttpClient.newBuilder() + .version(Version.HTTP_1_1) + .build(); + + OpenAiApi.Builder builder = OpenAiApi.builder() + .restClientBuilder(RestClient.builder() + .requestFactory(new JdkClientHttpRequestFactory(httpClient))) + .webClientBuilder(WebClient.builder() + .clientConnector(new JdkClientHttpConnector(httpClient))) + .baseUrl("http://localhost:" + recording.getPort()); + if (recording.isRecording()) { + builder.apiKey(System.getenv("OPENAI_API_KEY")); + } else { + builder.apiKey("unused"); + } + openAiApi = builder.build(); + } + return openAiApi; + } + + protected final OpenAiChatModel getChatModel() { + if (chatModel == null) { + chatModel = OpenAiChatModel.builder() + .openAiApi(getOpenAiApi()) + .toolExecutionEligibilityPredicate((o1, o2) -> false) + .build(); + } + return chatModel; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml new file mode 100644 index 000000000000..7f9762709e16 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml @@ -0,0 +1,47 @@ +--- +id: 177fac5c-52bf-4ce1-a4cc-20b97fe949fd +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "frequency_penalty" : 0.5, + "max_tokens" : 1000, + "presence_penalty" : 0.3, + "seed" : 100, + "stop" : [ "foo" ], + "stream" : false, + "temperature" : 0.8, + "top_p" : 1 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"Southern Ocean\",\"role\":\"assistant\"\ + },\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"object\":\"chat.completion\"\ + ,\"usage\":{\"prompt_tokens\":23,\"completion_tokens\":2,\"total_tokens\":25},\"\ + created\":1758118388,\"system_fingerprint\":null,\"model\":\"qwen3-coder-flash\"\ + ,\"id\":\"chatcmpl-f8d57a86-8c10-4f2f-8f0f-149a19e74d6a\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: f8d57a86-8c10-4f2f-8f0f-149a19e74d6a + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "400" + req-arrive-time: "1758118387328" + resp-start-time: "1758118387728" + x-envoy-upstream-service-time: "399" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:07 GMT" + server: istio-envoy +uuid: 177fac5c-52bf-4ce1-a4cc-20b97fe949fd +persistent: true +insertionIndex: 2 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml new file mode 100644 index 000000000000..7f2bdc6fed3c --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml @@ -0,0 +1,41 @@ +--- +id: ef519d87-3023-46d4-bdb2-d272ce8dba4b +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"South Atlantic\",\"role\":\"assistant\"\ + },\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"object\":\"chat.completion\"\ + ,\"usage\":{\"prompt_tokens\":23,\"completion_tokens\":2,\"total_tokens\":25},\"\ + created\":1758118390,\"system_fingerprint\":null,\"model\":\"qwen3-coder-flash\"\ + ,\"id\":\"chatcmpl-73cce568-68f8-4c6a-b4ce-3ac371989aa5\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 73cce568-68f8-4c6a-b4ce-3ac371989aa5 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "304" + req-arrive-time: "1758118389297" + resp-start-time: "1758118389602" + x-envoy-upstream-service-time: "303" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:09 GMT" + server: istio-envoy +uuid: ef519d87-3023-46d4-bdb2-d272ce8dba4b +persistent: true +insertionIndex: 17 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml new file mode 100644 index 000000000000..157b0583b646 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml @@ -0,0 +1,44 @@ +--- +id: 5cb55360-eef4-4668-b687-b3f60fc7e201 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "n" : 2, + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"Southern Ocean\",\"role\":\"assistant\"\ + },\"index\":0,\"finish_reason\":\"stop\",\"logprobs\":null},{\"message\":{\"content\"\ + :\"South\",\"role\":\"assistant\"},\"index\":1,\"finish_reason\":\"stop\",\"logprobs\"\ + :null}],\"object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":23,\"completion_tokens\"\ + :3,\"total_tokens\":26},\"created\":1758119593,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-25a1be03-506a-465a-bdfe-6c9b388ce006\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 25a1be03-506a-465a-bdfe-6c9b388ce006 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "323" + req-arrive-time: "1758119592942" + resp-start-time: "1758119593265" + x-envoy-upstream-service-time: "322" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:33:12 GMT" + server: istio-envoy +uuid: 5cb55360-eef4-4668-b687-b3f60fc7e201 +persistent: true +insertionIndex: 10 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml new file mode 100644 index 000000000000..0ac27586c73f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml @@ -0,0 +1,47 @@ +--- +id: 62a7985a-5ede-4b53-949a-c6cf100938e4 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":0,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"South"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: {"choices":[{"delta":{"content":" Atlantic"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: {"choices":[{"finish_reason":"stop","delta":{"content":""},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: c444b53a-0e32-4059-b033-d4e157f9c3ce + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "351" + req-arrive-time: "1758118388403" + resp-start-time: "1758118388755" + x-envoy-upstream-service-time: "350" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:08 GMT" + server: istio-envoy +uuid: 62a7985a-5ede-4b53-949a-c6cf100938e4 +persistent: true +insertionIndex: 6 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml new file mode 100644 index 000000000000..b351cd1a72ee --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml @@ -0,0 +1,62 @@ +--- +id: 31332ee9-3ec2-408c-9acd-73e8e68446dd +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "n" : 2, + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":0,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"Southern"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":" Ocean"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":1,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"South"},"index":1}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":1,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":null},"finish_reason":"stop","index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":null},"finish_reason":"stop","index":1,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: 2909fece-ee65-4a06-836c-369f008065a9 + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "362" + req-arrive-time: "1758119593605" + resp-start-time: "1758119593968" + x-envoy-upstream-service-time: "361" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:33:13 GMT" + server: istio-envoy +uuid: 31332ee9-3ec2-408c-9acd-73e8e68446dd +persistent: true +insertionIndex: 18 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml new file mode 100644 index 000000000000..2e2c781aa1c7 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml @@ -0,0 +1,82 @@ +--- +id: eaa62748-608a-4627-bda2-93545f27c16d +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"call_b2455b7da6524dc2b90f11ff","type":"function","function":{"name":"get_weather","arguments":""}}],"role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"New York City"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"\""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"call_587ea3e2c2184dcfb35d3c7e","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"London"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"\"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"tool_calls":[{"function":{"arguments":""},"index":1,"id":"","type":"function"}]},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"finish_reason":"tool_calls","delta":{},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: 604a5ad5-5d76-42ad-a689-6f6f60c61e5f + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "252" + req-arrive-time: "1758165570636" + resp-start-time: "1758165570888" + x-envoy-upstream-service-time: "250" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 03:19:30 GMT" + server: istio-envoy +uuid: eaa62748-608a-4627-bda2-93545f27c16d +persistent: true +insertionIndex: 14 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml new file mode 100644 index 000000000000..342c90f90e3e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml @@ -0,0 +1,159 @@ +--- +id: 9738f87e-fdf1-435a-a1bd-9b5565c0efdf +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"\",\"role\":\"assistant\",\"tool_calls\"\ + :[{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"New York City\\\"}\",\"\ + name\":\"get_weather\"},\"id\":\"call_69db468ee59a4613a15e7ae4\",\"index\":0,\"\ + type\":\"function\"},{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"London\\\ + \"}\",\"name\":\"get_weather\"},\"id\":\"call_4941c4a1092340ceb42d6804\",\"index\"\ + :1,\"type\":\"function\"}]},\"finish_reason\":\"tool_calls\",\"index\":0,\"logprobs\"\ + :null}],\"object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":311,\"completion_tokens\"\ + :45,\"total_tokens\":356},\"created\":1758165572,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-b861091d-874d-4a2d-a9f8-0e96dd81955f\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: b861091d-874d-4a2d-a9f8-0e96dd81955f + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "714" + req-arrive-time: "1758165571689" + resp-start-time: "1758165572403" + x-envoy-upstream-service-time: "713" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 03:19:32 GMT" + server: istio-envoy +uuid: 9738f87e-fdf1-435a-a1bd-9b5565c0efdf +persistent: true +insertionIndex: 24 +--- +id: 230c9b97-cf95-4d49-b2c1-50cf33df6458 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + }, { + "content" : "", + "role" : "assistant", + "tool_calls" : [ { + "id" : "call_69db468ee59a4613a15e7ae4", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"New York City\"}" + } + }, { + "id" : "call_4941c4a1092340ceb42d6804", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"London\"}" + } + } ] + }, { + "content" : "25 degrees and sunny", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_69db468ee59a4613a15e7ae4" + }, { + "content" : "15 degrees and sunny", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_4941c4a1092340ceb42d6804" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"The current weather is as follows:\\\ + n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and sunny.\"\ + ,\"role\":\"assistant\"},\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"\ + object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":386,\"completion_tokens\"\ + :31,\"total_tokens\":417},\"created\":1758165573,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-9f138fc5-3b65-4888-84b1-552113825783\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 9f138fc5-3b65-4888-84b1-552113825783 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "630" + req-arrive-time: "1758165572714" + resp-start-time: "1758165573344" + x-envoy-upstream-service-time: "629" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 03:19:32 GMT" + server: istio-envoy +uuid: 230c9b97-cf95-4d49-b2c1-50cf33df6458 +persistent: true +insertionIndex: 25 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml new file mode 100644 index 000000000000..bea00affda81 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml @@ -0,0 +1,38 @@ +--- +id: 7efd30b4-7368-4454-b018-c2ef226a3f40 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "gpt-4o", + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 404 + body: "{\"error\":{\"message\":\"The model `gpt-4o` does not exist or you do not\ + \ have access to it.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\"\ + :\"model_not_found\"},\"request_id\":\"5710f1fc-5b88-4835-9c4e-20efc1ef35af\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 5710f1fc-5b88-4835-9c4e-20efc1ef35af + content-type: application/json + req-cost-time: "8" + req-arrive-time: "1758118389053" + resp-start-time: "1758118389061" + x-envoy-upstream-service-time: "7" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:08 GMT" + server: istio-envoy +uuid: 7efd30b4-7368-4454-b018-c2ef226a3f40 +persistent: true +insertionIndex: 11 diff --git a/settings.gradle.kts b/settings.gradle.kts index 593700b40dda..14168dcd87f6 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -502,6 +502,7 @@ include(":instrumentation:ratpack:ratpack-1.4:javaagent") include(":instrumentation:ratpack:ratpack-1.4:testing") include(":instrumentation:ratpack:ratpack-1.7:javaagent") include(":instrumentation:ratpack:ratpack-1.7:library") +include(":instrumentation:reactor:reactor-3.1:bootstrap") include(":instrumentation:reactor:reactor-3.1:javaagent") include(":instrumentation:reactor:reactor-3.1:library") include(":instrumentation:reactor:reactor-3.1:testing") @@ -560,6 +561,10 @@ include(":instrumentation:servlet:servlet-common:bootstrap") include(":instrumentation:servlet:servlet-common:javaagent") include(":instrumentation:servlet:servlet-javax-common:javaagent") include(":instrumentation:spark-2.3:javaagent") +include(":instrumentation:spring:spring-ai:spring-ai-1.0:javaagent") +include(":instrumentation:spring:spring-ai:spring-ai-1.0:testing") +include(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:javaagent") +include(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:testing") include(":instrumentation:spring:spring-batch-3.0:javaagent") include(":instrumentation:spring:spring-boot-actuator-autoconfigure-2.0:javaagent") include(":instrumentation:spring:spring-boot-autoconfigure") From 744c3a0f9b5d3775030a504aee750f0968c2acf2 Mon Sep 17 00:00:00 2001 From: otelbot <197425009+otelbot@users.noreply.github.com> Date: Mon, 20 Oct 2025 16:44:07 +0000 Subject: [PATCH 2/2] ./gradlew spotlessApply --- .../genai/GenAiAgentAttributesExtractor.java | 17 +- .../genai/GenAiAgentAttributesGetter.java | 5 + .../semconv/genai/GenAiMessagesExtractor.java | 86 ++-- .../semconv/genai/GenAiMessagesProvider.java | 5 + .../genai/GenAiOperationAttributesGetter.java | 5 + .../incubator/AgentIncubatingAttributes.java | 11 +- .../incubator/GenAiIncubatingAttributes.java | 41 +- .../GenAiToolIncubatingAttributes.java | 14 +- .../tool/GenAiToolAttributesExtractor.java | 3 +- .../genai/tool/GenAiToolAttributesGetter.java | 9 +- .../reactor-3.1/bootstrap/build.gradle.kts | 2 +- .../spring-ai-1.0/javaagent/build.gradle.kts | 96 ++--- .../v1_0/SpringAiInstrumentationModule.java | 6 +- .../spring/ai/v1_0/SpringAiSingletons.java | 4 +- .../spring/ai/v1_0/SpringAiTelemetry.java | 6 +- .../ai/v1_0/SpringAiTelemetryBuilder.java | 43 +- .../client/ChatClientAttributesGetter.java | 22 +- .../chat/client/ChatClientMessageBuffer.java | 33 +- .../client/ChatClientMessagesProvider.java | 86 ++-- .../chat/client/ChatClientStreamListener.java | 32 +- .../chat/client/ChatClientStreamWrapper.java | 16 +- ...efaultCallResponseSpecInstrumentation.java | 20 +- ...aultStreamResponseSpecInstrumentation.java | 28 +- ...aultToolCallingManagerInstrumentation.java | 24 +- .../v1_0/tool/ToolCallAttributesGetter.java | 5 + .../spring/ai/v1_0/tool/ToolCallContext.java | 10 +- .../spring/ai/v1_0/tool/ToolCallRequest.java | 5 +- .../tool/ToolCallbackInstrumentation.java | 19 +- .../spring/ai/v1_0/ChatClientTest.java | 5 + .../spring-ai-1.0/testing/build.gradle.kts | 12 +- .../spring/ai/v1_0/AbstractChatClient.java | 286 ++++++++----- .../spring/ai/v1_0/AbstractSpringAiTest.java | 40 +- .../javaagent/build.gradle.kts | 92 ++-- .../v1_0/ChatModelAttributesGetter.java | 31 +- .../openai/v1_0/ChatModelMessageBuffer.java | 34 +- .../v1_0/ChatModelMessagesProvider.java | 113 ++--- .../openai/v1_0/ChatModelStreamListener.java | 51 +-- .../openai/v1_0/ChatModelStreamWrapper.java | 20 +- .../openai/v1_0/OpenAiApiInstrumentation.java | 42 +- .../v1_0/OpenAiChatModelInstrumentation.java | 4 +- .../SpringAiOpenaiInstrumentationModule.java | 3 +- .../openai/v1_0/SpringAiOpenaiSingletons.java | 4 +- .../openai/v1_0/SpringAiOpenaiTelemetry.java | 12 +- .../v1_0/SpringAiOpenaiTelemetryBuilder.java | 38 +- .../ai/openai/v1_0/ChatCompletionTest.java | 5 + .../testing/build.gradle.kts | 12 +- .../v1_0/AbstractChatCompletionTest.java | 401 +++++++++++------- .../v1_0/AbstractSpringAiOpenAiTest.java | 40 +- 48 files changed, 1153 insertions(+), 745 deletions(-) diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java index 1b0098e37b93..c4e14b528b48 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_DESCRIPTION; @@ -12,11 +17,12 @@ import javax.annotation.Nullable; /** - * Extractor of GenAI Agent + * Extractor of GenAI Agent * attributes. * - *

This class delegates to a type-specific {@link GenAiAgentAttributesGetter} for individual attribute - * extraction from request/response objects. + *

This class delegates to a type-specific {@link GenAiAgentAttributesGetter} for individual + * attribute extraction from request/response objects. */ public final class GenAiAgentAttributesExtractor implements AttributesExtractor { @@ -29,8 +35,7 @@ public static AttributesExtractor create( private final GenAiAgentAttributesGetter getter; - private GenAiAgentAttributesExtractor( - GenAiAgentAttributesGetter getter) { + private GenAiAgentAttributesExtractor(GenAiAgentAttributesGetter getter) { this.getter = getter; } @@ -51,4 +56,4 @@ public void onEnd( @Nullable Throwable error) { // do nothing } -} \ No newline at end of file +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java index e0f97405ce9b..d7837cd57341 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai; public interface GenAiAgentAttributesGetter { diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java index 4a13f9615797..a536880f8283 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; @@ -46,7 +51,7 @@ import java.util.logging.Logger; import javax.annotation.Nullable; -public class GenAiMessagesExtractor +public class GenAiMessagesExtractor implements AttributesExtractor { private static final Logger LOGGER = Logger.getLogger(GenAiMessagesExtractor.class.getName()); @@ -57,7 +62,8 @@ public static AttributesExtractor create( GenAiMessagesProvider messagesProvider, MessageCaptureOptions messageCaptureOptions, String instrumentationName) { - return new GenAiMessagesExtractor<>(attributesGetter, messagesProvider, messageCaptureOptions, instrumentationName); + return new GenAiMessagesExtractor<>( + attributesGetter, messagesProvider, messageCaptureOptions, instrumentationName); } private final MessageCaptureOptions messageCaptureOptions; @@ -88,20 +94,28 @@ private GenAiMessagesExtractor( @Override public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { tryInit(); - if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals(messageCaptureOptions.captureMessageStrategy())) { + if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals( + messageCaptureOptions.captureMessageStrategy())) { SystemInstructions systemInstructions = messagesProvider.systemInstructions(request, null); if (systemInstructions != null) { - internalSet(attributes, GEN_AI_SYSTEM_INSTRUCTIONS, toJsonString(systemInstructions.getSerializableObject())); + internalSet( + attributes, + GEN_AI_SYSTEM_INSTRUCTIONS, + toJsonString(systemInstructions.getSerializableObject())); } InputMessages inputMessages = messagesProvider.inputMessages(request, null); if (inputMessages != null) { - internalSet(attributes, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); + internalSet( + attributes, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); } ToolDefinitions toolDefinitions = messagesProvider.toolDefinitions(request, null); if (toolDefinitions != null) { - internalSet(attributes, GEN_AI_TOOL_DEFINITIONS, toJsonString(toolDefinitions.getSerializableObject())); + internalSet( + attributes, + GEN_AI_TOOL_DEFINITIONS, + toJsonString(toolDefinitions.getSerializableObject())); } } } @@ -113,38 +127,53 @@ public void onEnd( REQUEST request, @Nullable RESPONSE response, @Nullable Throwable error) { - if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals(messageCaptureOptions.captureMessageStrategy())) { + if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals( + messageCaptureOptions.captureMessageStrategy())) { OutputMessages outputMessages = messagesProvider.outputMessages(request, response); if (outputMessages != null) { - internalSet(attributes, GEN_AI_OUTPUT_MESSAGES, toJsonString(outputMessages.getSerializableObject())); + internalSet( + attributes, + GEN_AI_OUTPUT_MESSAGES, + toJsonString(outputMessages.getSerializableObject())); } - } else if (CaptureMessageStrategy.EVENT.equals(messageCaptureOptions.captureMessageStrategy())) { + } else if (CaptureMessageStrategy.EVENT.equals( + messageCaptureOptions.captureMessageStrategy())) { emitInferenceEvent(context, request, response); } } private void emitInferenceEvent(Context context, REQUEST request, @Nullable RESPONSE response) { if (eventLogger != null) { - LogRecordBuilder builder = eventLogger.logRecordBuilder() - .setAttribute(EVENT_NAME, GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS) - .setContext(context); - - SystemInstructions systemInstructions = messagesProvider.systemInstructions(request, - response); + LogRecordBuilder builder = + eventLogger + .logRecordBuilder() + .setAttribute(EVENT_NAME, GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS) + .setContext(context); + + SystemInstructions systemInstructions = + messagesProvider.systemInstructions(request, response); if (systemInstructions != null) { - internalSetLogAttribute(builder, GEN_AI_SYSTEM_INSTRUCTIONS, toJsonString(systemInstructions.getSerializableObject())); + internalSetLogAttribute( + builder, + GEN_AI_SYSTEM_INSTRUCTIONS, + toJsonString(systemInstructions.getSerializableObject())); } InputMessages inputMessages = messagesProvider.inputMessages(request, response); if (inputMessages != null) { - internalSetLogAttribute(builder, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); + internalSetLogAttribute( + builder, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); } ToolDefinitions toolDefinitions = messagesProvider.toolDefinitions(request, null); if (toolDefinitions != null) { - internalSetLogAttribute(builder, GEN_AI_TOOL_DEFINITIONS, toJsonString(toolDefinitions.getSerializableObject())); + internalSetLogAttribute( + builder, + GEN_AI_TOOL_DEFINITIONS, + toJsonString(toolDefinitions.getSerializableObject())); } OutputMessages outputMessages = messagesProvider.outputMessages(request, response); if (outputMessages != null) { - internalSetLogAttribute(builder, GEN_AI_OUTPUT_MESSAGES, toJsonString(outputMessages.getSerializableObject())); + internalSetLogAttribute( + builder, GEN_AI_OUTPUT_MESSAGES, toJsonString(outputMessages.getSerializableObject())); } internalSetLogAttribute(builder, GEN_AI_OPERATION_NAME, getter.getOperationName(request)); @@ -155,11 +184,14 @@ private void emitInferenceEvent(Context context, REQUEST request, @Nullable RESP internalSetLogAttribute(builder, GEN_AI_REQUEST_SEED, getter.getRequestSeed(request)); internalSetLogAttribute( builder, GEN_AI_REQUEST_FREQUENCY_PENALTY, getter.getRequestFrequencyPenalty(request)); - internalSetLogAttribute(builder, GEN_AI_REQUEST_MAX_TOKENS, getter.getRequestMaxTokens(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_MAX_TOKENS, getter.getRequestMaxTokens(request)); internalSetLogAttribute( builder, GEN_AI_REQUEST_PRESENCE_PENALTY, getter.getRequestPresencePenalty(request)); - internalSetLogAttribute(builder, GEN_AI_REQUEST_STOP_SEQUENCES, getter.getRequestStopSequences(request)); - internalSetLogAttribute(builder, GEN_AI_REQUEST_TEMPERATURE, getter.getRequestTemperature(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_STOP_SEQUENCES, getter.getRequestStopSequences(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_TEMPERATURE, getter.getRequestTemperature(request)); internalSetLogAttribute(builder, GEN_AI_REQUEST_TOP_K, getter.getRequestTopK(request)); internalSetLogAttribute(builder, GEN_AI_REQUEST_TOP_P, getter.getRequestTopP(request)); @@ -168,7 +200,8 @@ private void emitInferenceEvent(Context context, REQUEST request, @Nullable RESP builder.setAttribute(GEN_AI_RESPONSE_FINISH_REASONS, finishReasons); } internalSetLogAttribute(builder, GEN_AI_RESPONSE_ID, getter.getResponseId(request, response)); - internalSetLogAttribute(builder, GEN_AI_RESPONSE_MODEL, getter.getResponseModel(request, response)); + internalSetLogAttribute( + builder, GEN_AI_RESPONSE_MODEL, getter.getResponseModel(request, response)); internalSetLogAttribute( builder, GEN_AI_USAGE_INPUT_TOKENS, getter.getUsageInputTokens(request, response)); internalSetLogAttribute( @@ -177,7 +210,8 @@ private void emitInferenceEvent(Context context, REQUEST request, @Nullable RESP } } - private void internalSetLogAttribute(LogRecordBuilder logRecordBuilder, AttributeKey key, @Nullable T value) { + private void internalSetLogAttribute( + LogRecordBuilder logRecordBuilder, AttributeKey key, @Nullable T value) { if (value == null) { return; } @@ -195,8 +229,8 @@ private void tryInit() { LOGGER.log(Level.WARNING, "failed to init json marshaler, global instance is null"); } - GenAiEventLoggerProvider loggerProvider = GlobalInstanceHolder.getInstance( - GenAiEventLoggerProvider.class); + GenAiEventLoggerProvider loggerProvider = + GlobalInstanceHolder.getInstance(GenAiEventLoggerProvider.class); if (loggerProvider == null) { LOGGER.log(Level.WARNING, "failed to init event logger, logger provider is null"); diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java index 0ad36a6aa610..5730640d46d3 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai; import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java index c3afd4dab5b8..7ac899401077 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai; public interface GenAiOperationAttributesGetter { diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java index f982827d04d4..3c40e1ead0f1 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; import static io.opentelemetry.api.common.AttributeKey.stringKey; @@ -6,8 +11,10 @@ public final class AgentIncubatingAttributes { - public static final AttributeKey GEN_AI_AGENT_DESCRIPTION = stringKey("gen_ai.agent.description"); + public static final AttributeKey GEN_AI_AGENT_DESCRIPTION = + stringKey("gen_ai.agent.description"); public static final AttributeKey GEN_AI_AGENT_ID = stringKey("gen_ai.agent.id"); public static final AttributeKey GEN_AI_AGENT_NAME = stringKey("gen_ai.agent.name"); - public static final AttributeKey GEN_AI_DATA_SOURCE_ID = stringKey("gen_ai.data_source.id"); + public static final AttributeKey GEN_AI_DATA_SOURCE_ID = + stringKey("gen_ai.data_source.id"); } diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java index 2e88cbdd29b3..507315726c22 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; import static io.opentelemetry.api.common.AttributeKey.doubleKey; @@ -10,7 +15,8 @@ public final class GenAiIncubatingAttributes { - public static final AttributeKey GEN_AI_OPERATION_NAME = stringKey("gen_ai.operation.name"); + public static final AttributeKey GEN_AI_OPERATION_NAME = + stringKey("gen_ai.operation.name"); public static final AttributeKey> GEN_AI_REQUEST_ENCODING_FORMATS = stringArrayKey("gen_ai.request.encoding_formats"); public static final AttributeKey GEN_AI_REQUEST_FREQUENCY_PENALTY = @@ -25,25 +31,31 @@ public final class GenAiIncubatingAttributes { stringArrayKey("gen_ai.request.stop_sequences"); public static final AttributeKey GEN_AI_REQUEST_TEMPERATURE = doubleKey("gen_ai.request.temperature"); - public static final AttributeKey GEN_AI_REQUEST_TOP_K = - doubleKey("gen_ai.request.top_k"); - public static final AttributeKey GEN_AI_REQUEST_TOP_P = - doubleKey("gen_ai.request.top_p"); + public static final AttributeKey GEN_AI_REQUEST_TOP_K = doubleKey("gen_ai.request.top_k"); + public static final AttributeKey GEN_AI_REQUEST_TOP_P = doubleKey("gen_ai.request.top_p"); public static final AttributeKey> GEN_AI_RESPONSE_FINISH_REASONS = stringArrayKey("gen_ai.response.finish_reasons"); public static final AttributeKey GEN_AI_RESPONSE_ID = stringKey("gen_ai.response.id"); - public static final AttributeKey GEN_AI_RESPONSE_MODEL = stringKey("gen_ai.response.model"); + public static final AttributeKey GEN_AI_RESPONSE_MODEL = + stringKey("gen_ai.response.model"); public static final AttributeKey GEN_AI_PROVIDER_NAME = stringKey("gen_ai.provider.name"); - public static final AttributeKey GEN_AI_CONVERSATION_ID = stringKey("gen_ai.conversation.id"); - public static final AttributeKey GEN_AI_USAGE_INPUT_TOKENS = longKey("gen_ai.usage.input_tokens"); + public static final AttributeKey GEN_AI_CONVERSATION_ID = + stringKey("gen_ai.conversation.id"); + public static final AttributeKey GEN_AI_USAGE_INPUT_TOKENS = + longKey("gen_ai.usage.input_tokens"); public static final AttributeKey GEN_AI_USAGE_OUTPUT_TOKENS = longKey("gen_ai.usage.output_tokens"); - public static final AttributeKey GEN_AI_REQUEST_CHOICE_COUNT = longKey("gen_ai.request.choice.count"); + public static final AttributeKey GEN_AI_REQUEST_CHOICE_COUNT = + longKey("gen_ai.request.choice.count"); public static final AttributeKey GEN_AI_OUTPUT_TYPE = stringKey("gen_ai.output.type"); - public static final AttributeKey GEN_AI_SYSTEM_INSTRUCTIONS = stringKey("gen_ai.system_instructions"); - public static final AttributeKey GEN_AI_INPUT_MESSAGES = stringKey("gen_ai.input.messages"); - public static final AttributeKey GEN_AI_OUTPUT_MESSAGES = stringKey("gen_ai.output.messages"); - public static final AttributeKey GEN_AI_TOOL_DEFINITIONS = stringKey("gen_ai.tool.definitions"); + public static final AttributeKey GEN_AI_SYSTEM_INSTRUCTIONS = + stringKey("gen_ai.system_instructions"); + public static final AttributeKey GEN_AI_INPUT_MESSAGES = + stringKey("gen_ai.input.messages"); + public static final AttributeKey GEN_AI_OUTPUT_MESSAGES = + stringKey("gen_ai.output.messages"); + public static final AttributeKey GEN_AI_TOOL_DEFINITIONS = + stringKey("gen_ai.tool.definitions"); public static class GenAiOperationNameIncubatingValues { public static final String CHAT = "chat"; @@ -75,6 +87,7 @@ public static class GenAiProviderNameIncubatingValues { } public static class GenAiEventName { - public static final String GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS = "gen_ai.client.inference.operation.details"; + public static final String GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS = + "gen_ai.client.inference.operation.details"; } } diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java index d73e4d292342..42eab245b3b7 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; import static io.opentelemetry.api.common.AttributeKey.stringKey; @@ -7,9 +12,12 @@ public class GenAiToolIncubatingAttributes { public static final AttributeKey GEN_AI_TOOL_CALL_ID = stringKey("gen_ai.tool.call.id"); - public static final AttributeKey GEN_AI_TOOL_DESCRIPTION = stringKey("gen_ai.tool.description"); + public static final AttributeKey GEN_AI_TOOL_DESCRIPTION = + stringKey("gen_ai.tool.description"); public static final AttributeKey GEN_AI_TOOL_NAME = stringKey("gen_ai.tool.name"); public static final AttributeKey GEN_AI_TOOL_TYPE = stringKey("gen_ai.tool.type"); - public static final AttributeKey GEN_AI_TOOL_CALL_ARGUMENTS = stringKey("gen_ai.tool.call.arguments"); - public static final AttributeKey GEN_AI_TOOL_CALL_RESULT = stringKey("gen_ai.tool.call.result"); + public static final AttributeKey GEN_AI_TOOL_CALL_ARGUMENTS = + stringKey("gen_ai.tool.call.arguments"); + public static final AttributeKey GEN_AI_TOOL_CALL_RESULT = + stringKey("gen_ai.tool.call.result"); } diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java index a970c061eff3..d06a4e4e9ead 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java @@ -25,7 +25,8 @@ public final class GenAiToolAttributesExtractor /** Creates the GenAI attributes extractor. */ public static AttributesExtractor create( - GenAiToolAttributesGetter attributesGetter, MessageCaptureOptions messageCaptureOptions) { + GenAiToolAttributesGetter attributesGetter, + MessageCaptureOptions messageCaptureOptions) { return new GenAiToolAttributesExtractor<>(attributesGetter, messageCaptureOptions); } diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java index ea891cecf2ee..53b181e8d6e7 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java @@ -1,10 +1,15 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.api.incubator.semconv.genai.tool; import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiOperationAttributesGetter; import javax.annotation.Nullable; -public interface GenAiToolAttributesGetter extends - GenAiOperationAttributesGetter { +public interface GenAiToolAttributesGetter + extends GenAiOperationAttributesGetter { String getToolDescription(REQUEST request); diff --git a/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts b/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts index ef2537931a62..072a96df450f 100644 --- a/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts +++ b/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts @@ -1,3 +1,3 @@ plugins { - id("otel.javaagent-bootstrap") + id("otel.javaagent-bootstrap") } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts index dc9feb4e1eb0..697cb13f2302 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts @@ -1,71 +1,71 @@ plugins { - id("otel.javaagent-instrumentation") + id("otel.javaagent-instrumentation") } otelJava { - // Spring AI 3 requires java 17 - minJavaVersionSupported.set(JavaVersion.VERSION_17) + // Spring AI 3 requires java 17 + minJavaVersionSupported.set(JavaVersion.VERSION_17) } muzzle { - pass { - group.set("org.springframework.ai") - module.set("spring-ai-client-chat") - versions.set("(,)") - } + pass { + group.set("org.springframework.ai") + module.set("spring-ai-client-chat") + versions.set("(,)") + } } repositories { - mavenLocal() - maven { - url = uri("https://repo.spring.io/milestone") - content { - includeGroup("org.springframework.ai") - includeGroup("org.springframework.boot") - includeGroup("org.springframework") - } + mavenLocal() + maven { + url = uri("https://repo.spring.io/milestone") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + } + maven { + url = uri("https://repo.spring.io/snapshot") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") } - maven { - url = uri("https://repo.spring.io/snapshot") - content { - includeGroup("org.springframework.ai") - includeGroup("org.springframework.boot") - includeGroup("org.springframework") - } - mavenContent { - snapshotsOnly() - } + mavenContent { + snapshotsOnly() } - mavenCentral() + } + mavenCentral() } dependencies { - library("io.projectreactor:reactor-core:3.7.0") - library("org.springframework.ai:spring-ai-client-chat:1.0.0") - library("org.springframework.ai:spring-ai-model:1.0.0") + library("io.projectreactor:reactor-core:3.7.0") + library("org.springframework.ai:spring-ai-client-chat:1.0.0") + library("org.springframework.ai:spring-ai-model:1.0.0") - implementation(project(":instrumentation:reactor:reactor-3.1:library")) + implementation(project(":instrumentation:reactor:reactor-3.1:library")) - bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) + bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) - testInstrumentation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:javaagent")) - testInstrumentation(project(":instrumentation:reactor:reactor-3.1:javaagent")) - testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-1.0:testing")) + testInstrumentation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:javaagent")) + testInstrumentation(project(":instrumentation:reactor:reactor-3.1:javaagent")) + testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-1.0:testing")) } tasks { - withType().configureEach { - val latestDepTest = findProperty("testLatestDeps") as Boolean - systemProperty("testLatestDeps", latestDepTest) - // spring ai requires java 17 - if (latestDepTest) { - otelJava { - minJavaVersionSupported.set(JavaVersion.VERSION_17) - } - } - - // TODO run tests both with and without genai message capture - systemProperty("otel.instrumentation.genai.capture-message-content", "true") - systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + withType().configureEach { + val latestDepTest = findProperty("testLatestDeps") as Boolean + systemProperty("testLatestDeps", latestDepTest) + // spring ai requires java 17 + if (latestDepTest) { + otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) + } } + + // TODO run tests both with and without genai message capture + systemProperty("otel.instrumentation.genai.capture-message-content", "true") + systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + } } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java index 84e99a1b7f68..adfd63e00213 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java @@ -7,14 +7,14 @@ import static java.util.Arrays.asList; -import java.util.List; import com.google.auto.service.AutoService; import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.DefaultCallResponseSpecInstrumentation; import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.DefaultStreamResponseSpecInstrumentation; -import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallbackInstrumentation; import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.DefaultToolCallingManagerInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallbackInstrumentation; +import java.util.List; @AutoService(InstrumentationModule.class) public class SpringAiInstrumentationModule extends InstrumentationModule { @@ -26,7 +26,7 @@ public SpringAiInstrumentationModule() { @Override public List typeInstrumentations() { return asList( - new DefaultCallResponseSpecInstrumentation(), + new DefaultCallResponseSpecInstrumentation(), new DefaultStreamResponseSpecInstrumentation(), new ToolCallbackInstrumentation(), new DefaultToolCallingManagerInstrumentation()); diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java index 438a1f772ac4..ad016b51aec0 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java @@ -19,7 +19,9 @@ public final class SpringAiSingletons { .getInt("otel.instrumentation.genai.message-content.max-length", 8192)) .setCaptureMessageStrategy( InstrumentationConfig.get() - .getString("otel.instrumentation.genai.message-content.capture-strategy", "span-attributes")) + .getString( + "otel.instrumentation.genai.message-content.capture-strategy", + "span-attributes")) .build(); private SpringAiSingletons() {} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java index 0e81b9adaf01..83c254f69d9a 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java @@ -6,20 +6,18 @@ package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.context.Context; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; -import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallRequest; -import org.springframework.ai.chat.client.ChatClientResponse; import org.springframework.ai.chat.client.ChatClientRequest; -import reactor.core.publisher.Flux; +import org.springframework.ai.chat.client.ChatClientResponse; public final class SpringAiTelemetry { public static SpringAiTelemetryBuilder builder(OpenTelemetry openTelemetry) { return new SpringAiTelemetryBuilder(openTelemetry); } + private final Instrumenter chatClientInstrumenter; private final Instrumenter toolCallInstrumenter; private final MessageCaptureOptions messageCaptureOptions; diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java index d8c10951395c..dbd2b146d9fc 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java @@ -5,6 +5,7 @@ package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; @@ -19,7 +20,6 @@ import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallRequest; import org.springframework.ai.chat.client.ChatClientRequest; import org.springframework.ai.chat.client.ChatClientResponse; -import com.google.errorprone.annotations.CanIgnoreReturnValue; public final class SpringAiTelemetryBuilder { @@ -36,27 +36,21 @@ public final class SpringAiTelemetryBuilder { this.openTelemetry = openTelemetry; } - /** - * Sets whether to capture message content in spans. Defaults to false. - */ + /** Sets whether to capture message content in spans. Defaults to false. */ @CanIgnoreReturnValue public SpringAiTelemetryBuilder setCaptureMessageContent(boolean captureMessageContent) { this.captureMessageContent = captureMessageContent; return this; } - /** - * Sets the maximum length of message content to capture. Defaults to 8192. - */ + /** Sets the maximum length of message content to capture. Defaults to 8192. */ @CanIgnoreReturnValue public SpringAiTelemetryBuilder setContentMaxLength(int contentMaxLength) { this.contentMaxLength = contentMaxLength; return this; } - /** - * Sets the strategy to capture message content. Defaults to "span-attributes". - */ + /** Sets the strategy to capture message content. Defaults to "span-attributes". */ @CanIgnoreReturnValue public SpringAiTelemetryBuilder setCaptureMessageStrategy(String captureMessageStrategy) { this.captureMessageStrategy = captureMessageStrategy; @@ -64,20 +58,25 @@ public SpringAiTelemetryBuilder setCaptureMessageStrategy(String captureMessageS } public SpringAiTelemetry build() { - MessageCaptureOptions messageCaptureOptions = MessageCaptureOptions.create( - captureMessageContent, contentMaxLength, captureMessageStrategy); + MessageCaptureOptions messageCaptureOptions = + MessageCaptureOptions.create( + captureMessageContent, contentMaxLength, captureMessageStrategy); Instrumenter chatClientInstrumenter = Instrumenter.builder( openTelemetry, INSTRUMENTATION_NAME, GenAiSpanNameExtractor.create(ChatClientAttributesGetter.INSTANCE)) - .addAttributesExtractor(GenAiAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) - .addAttributesExtractor(GenAiAgentAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) - .addAttributesExtractor(GenAiMessagesExtractor.create( - ChatClientAttributesGetter.INSTANCE, - ChatClientMessagesProvider.create(messageCaptureOptions), - messageCaptureOptions, INSTRUMENTATION_NAME)) + .addAttributesExtractor( + GenAiAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiAgentAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiMessagesExtractor.create( + ChatClientAttributesGetter.INSTANCE, + ChatClientMessagesProvider.create(messageCaptureOptions), + messageCaptureOptions, + INSTRUMENTATION_NAME)) .buildInstrumenter(); Instrumenter toolCallInstrumenter = @@ -85,10 +84,12 @@ public SpringAiTelemetry build() { openTelemetry, INSTRUMENTATION_NAME, GenAiSpanNameExtractor.create(ToolCallAttributesGetter.INSTANCE)) - .addAttributesExtractor(GenAiToolAttributesExtractor.create( - ToolCallAttributesGetter.INSTANCE, messageCaptureOptions)) + .addAttributesExtractor( + GenAiToolAttributesExtractor.create( + ToolCallAttributesGetter.INSTANCE, messageCaptureOptions)) .buildInstrumenter(); - return new SpringAiTelemetry(chatClientInstrumenter, toolCallInstrumenter, messageCaptureOptions); + return new SpringAiTelemetry( + chatClientInstrumenter, toolCallInstrumenter, messageCaptureOptions); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java index 9617dc660284..0f06be5ea993 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java @@ -15,9 +15,10 @@ import org.springframework.ai.chat.client.ChatClientRequest; import org.springframework.ai.chat.client.ChatClientResponse; -public enum ChatClientAttributesGetter implements - GenAiAttributesGetter, - GenAiAgentAttributesGetter { +public enum ChatClientAttributesGetter + implements + GenAiAttributesGetter, + GenAiAgentAttributesGetter { INSTANCE; @Override @@ -70,7 +71,8 @@ public Double getRequestFrequencyPenalty(ChatClientRequest request) { @Nullable @Override public Long getRequestMaxTokens(ChatClientRequest request) { - if (request.prompt().getOptions() == null || request.prompt().getOptions().getMaxTokens() == null) { + if (request.prompt().getOptions() == null + || request.prompt().getOptions().getMaxTokens() == null) { return null; } return request.prompt().getOptions().getMaxTokens().longValue(); @@ -122,7 +124,8 @@ public Double getRequestTopP(ChatClientRequest request) { } @Override - public List getResponseFinishReasons(ChatClientRequest request, @Nullable ChatClientResponse response) { + public List getResponseFinishReasons( + ChatClientRequest request, @Nullable ChatClientResponse response) { if (response == null || response.chatResponse() == null || response.chatResponse().getResult() == null @@ -131,7 +134,8 @@ public List getResponseFinishReasons(ChatClientRequest request, @Nullabl return emptyList(); } - return singletonList(response.chatResponse().getResult().getMetadata().getFinishReason().toLowerCase()); + return singletonList( + response.chatResponse().getResult().getMetadata().getFinishReason().toLowerCase()); } @Nullable @@ -162,7 +166,8 @@ public String getResponseModel(ChatClientRequest request, @Nullable ChatClientRe @Nullable @Override - public Long getUsageInputTokens(ChatClientRequest request, @Nullable ChatClientResponse response) { + public Long getUsageInputTokens( + ChatClientRequest request, @Nullable ChatClientResponse response) { if (response == null || response.chatResponse() == null || response.chatResponse().getMetadata() == null @@ -177,7 +182,8 @@ public Long getUsageInputTokens(ChatClientRequest request, @Nullable ChatClientR @Nullable @Override - public Long getUsageOutputTokens(ChatClientRequest request, @Nullable ChatClientResponse response) { + public Long getUsageOutputTokens( + ChatClientRequest request, @Nullable ChatClientResponse response) { if (response == null || response.chatResponse() == null || response.chatResponse().getMetadata() == null diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java index 9b4d9cb1b12a..c1887033b0fe 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java @@ -28,8 +28,7 @@ final class ChatClientMessageBuffer { @Nullable private Map toolCalls; - ChatClientMessageBuffer(int index, - MessageCaptureOptions messageCaptureOptions) { + ChatClientMessageBuffer(int index, MessageCaptureOptions messageCaptureOptions) { this.index = index; this.messageCaptureOptions = messageCaptureOptions; } @@ -52,8 +51,12 @@ Generation toGeneration() { if (entry.getValue().function.name == null) { entry.getValue().function.name = ""; } - toolCalls.add(new ToolCall(entry.getValue().id, entry.getValue().type, - entry.getValue().function.name, arguments)); + toolCalls.add( + new ToolCall( + entry.getValue().id, + entry.getValue().type, + entry.getValue().function.name, + arguments)); } } } else { @@ -66,7 +69,8 @@ Generation toGeneration() { content = this.rawContentBuffer.toString(); } - return new Generation(new AssistantMessage(content, Collections.emptyMap(), toolCalls), + return new Generation( + new AssistantMessage(content, Collections.emptyMap(), toolCalls), ChatGenerationMetadata.builder().finishReason(this.finishReason).build()); } @@ -80,9 +84,15 @@ void append(Generation generation) { } String deltaContent = message.getText(); - if (this.rawContentBuffer.length() < this.messageCaptureOptions.maxMessageContentLength()) { - if (this.rawContentBuffer.length() + deltaContent.length() >= this.messageCaptureOptions.maxMessageContentLength()) { - deltaContent = deltaContent.substring(0, this.messageCaptureOptions.maxMessageContentLength() - this.rawContentBuffer.length()); + if (this.rawContentBuffer.length() + < this.messageCaptureOptions.maxMessageContentLength()) { + if (this.rawContentBuffer.length() + deltaContent.length() + >= this.messageCaptureOptions.maxMessageContentLength()) { + deltaContent = + deltaContent.substring( + 0, + this.messageCaptureOptions.maxMessageContentLength() + - this.rawContentBuffer.length()); this.rawContentBuffer.append(deltaContent).append(TRUNCATE_FLAG); } else { this.rawContentBuffer.append(deltaContent); @@ -99,8 +109,7 @@ void append(Generation generation) { for (int i = 0; i < message.getToolCalls().size(); i++) { ToolCall toolCall = message.getToolCalls().get(i); ToolCallBuffer buffer = - this.toolCalls.computeIfAbsent( - i, unused -> new ToolCallBuffer(toolCall.id())); + this.toolCalls.computeIfAbsent(i, unused -> new ToolCallBuffer(toolCall.id())); buffer.type = toolCall.type(); buffer.function.name = toolCall.name(); @@ -115,7 +124,9 @@ void append(Generation generation) { } ChatGenerationMetadata metadata = generation.getMetadata(); - if (metadata != null && metadata.getFinishReason() != null && !metadata.getFinishReason().isEmpty()) { + if (metadata != null + && metadata.getFinishReason() != null + && !metadata.getFinishReason().isEmpty()) { this.finishReason = metadata.getFinishReason(); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java index 569c0c4db647..df203a5dd054 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; @@ -31,8 +36,8 @@ import org.springframework.ai.chat.model.Generation; import org.springframework.ai.model.tool.ToolCallingChatOptions; -public class ChatClientMessagesProvider implements - GenAiMessagesProvider { +public class ChatClientMessagesProvider + implements GenAiMessagesProvider { private static final String TRUNCATE_FLAG = "...[truncated]"; @@ -48,7 +53,8 @@ public static ChatClientMessagesProvider create(MessageCaptureOptions messageCap @Nullable @Override - public InputMessages inputMessages(ChatClientRequest request, @Nullable ChatClientResponse response) { + public InputMessages inputMessages( + ChatClientRequest request, @Nullable ChatClientResponse response) { if (!messageCaptureOptions.captureMessageContent() || request.prompt().getInstructions() == null) { return null; @@ -70,16 +76,17 @@ public InputMessages inputMessages(ChatClientRequest request, @Nullable ChatClie } if (assistantMessage.hasToolCalls()) { - messageParts.addAll(assistantMessage - .getToolCalls() - .stream() - .map(this::toolCallToMessagePart) - .collect(Collectors.toList())); + messageParts.addAll( + assistantMessage.getToolCalls().stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); } inputMessages.append(InputMessage.create(Role.ASSISTANT, messageParts)); } else if (msg.getMessageType() == MessageType.TOOL) { ToolResponseMessage toolResponseMessage = (ToolResponseMessage) msg; - inputMessages.append(InputMessage.create(Role.TOOL, contentToMessageParts(toolResponseMessage.getResponses()))); + inputMessages.append( + InputMessage.create( + Role.TOOL, contentToMessageParts(toolResponseMessage.getResponses()))); } } return inputMessages; @@ -87,7 +94,8 @@ public InputMessages inputMessages(ChatClientRequest request, @Nullable ChatClie @Nullable @Override - public OutputMessages outputMessages(ChatClientRequest request, @Nullable ChatClientResponse response) { + public OutputMessages outputMessages( + ChatClientRequest request, @Nullable ChatClientResponse response) { if (!messageCaptureOptions.captureMessageContent() || response == null || response.chatResponse() == null @@ -105,11 +113,10 @@ public OutputMessages outputMessages(ChatClientRequest request, @Nullable ChatCl } if (message.hasToolCalls()) { - messageParts.addAll(message - .getToolCalls() - .stream() - .map(this::toolCallToMessagePart) - .collect(Collectors.toList())); + messageParts.addAll( + message.getToolCalls().stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); } } @@ -124,32 +131,35 @@ public OutputMessages outputMessages(ChatClientRequest request, @Nullable ChatCl @Nullable @Override - public SystemInstructions systemInstructions(ChatClientRequest request, @Nullable ChatClientResponse response) { + public SystemInstructions systemInstructions( + ChatClientRequest request, @Nullable ChatClientResponse response) { return null; } @Nullable @Override - public ToolDefinitions toolDefinitions(ChatClientRequest request, @Nullable ChatClientResponse response) { - if (request.prompt().getOptions() == null || !(request.prompt() - .getOptions() instanceof ToolCallingChatOptions options)) { + public ToolDefinitions toolDefinitions( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (request.prompt().getOptions() == null + || !(request.prompt().getOptions() instanceof ToolCallingChatOptions options)) { return null; } ToolDefinitions toolDefinitions = ToolDefinitions.create(); // See: org.springframework.ai.model.tool.DefaultToolCallingManager.resolveToolDefinitions - options.getToolCallbacks() - .stream() - .map(toolCallback -> { - String name = toolCallback.getToolDefinition().name(); - String type = "function"; - if (messageCaptureOptions.captureMessageContent()) { - return ToolDefinition.create(type, name, toolCallback.getToolDefinition().description(), null); - } else { - return ToolDefinition.create(type, name, null, null); - } - }) + options.getToolCallbacks().stream() + .map( + toolCallback -> { + String name = toolCallback.getToolDefinition().name(); + String type = "function"; + if (messageCaptureOptions.captureMessageContent()) { + return ToolDefinition.create( + type, name, toolCallback.getToolDefinition().description(), null); + } else { + return ToolDefinition.create(type, name, null, null); + } + }) .filter(Objects::nonNull) .forEach(toolDefinitions::append); @@ -157,8 +167,7 @@ public ToolDefinitions toolDefinitions(ChatClientRequest request, @Nullable Chat // Skip the tool if it is already present in the request toolCallbacks. // That might happen if a tool is defined in the options // both as a ToolCallback and as a tool name. - if (options.getToolCallbacks() - .stream() + if (options.getToolCallbacks().stream() .anyMatch(tool -> tool.getToolDefinition().name().equals(toolName))) { continue; } @@ -185,15 +194,18 @@ private List contentToMessageParts(List toolResponses } return toolResponses.stream() - .map(response -> - ToolCallResponsePart.create( - response.id(), truncateTextContent(response.responseData()))) + .map( + response -> + ToolCallResponsePart.create( + response.id(), truncateTextContent(response.responseData()))) .collect(Collectors.toList()); } private String truncateTextContent(String content) { - if (!content.endsWith(TRUNCATE_FLAG) && content.length() > messageCaptureOptions.maxMessageContentLength()) { - content = content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; + if (!content.endsWith(TRUNCATE_FLAG) + && content.length() > messageCaptureOptions.maxMessageContentLength()) { + content = + content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; } return content; } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java index 21213374afd6..10e81fab9011 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java @@ -115,20 +115,24 @@ public void endSpan(@Nullable Throwable error) { outputTokens = (int) this.outputTokens.get(); } - List generations = this.chatClientMessageBuffers.stream() - .map(ChatClientMessageBuffer::toGeneration) - .collect(Collectors.toList()); - - ChatClientResponse response = ChatClientResponse.builder() - .chatResponse(ChatResponse.builder() - .generations(generations) - .metadata(ChatResponseMetadata.builder() - .usage(new DefaultUsage(inputTokens, outputTokens)) - .id(requestId.get()) - .model(model.get()) - .build()) - .build()) - .build(); + List generations = + this.chatClientMessageBuffers.stream() + .map(ChatClientMessageBuffer::toGeneration) + .collect(Collectors.toList()); + + ChatClientResponse response = + ChatClientResponse.builder() + .chatResponse( + ChatResponse.builder() + .generations(generations) + .metadata( + ChatResponseMetadata.builder() + .usage(new DefaultUsage(inputTokens, outputTokens)) + .id(requestId.get()) + .model(model.get()) + .build()) + .build()) + .build(); if (this.newSpan) { this.instrumenter.end(this.context, this.request, response, error); diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java index 3f921d646476..8aff1300f28f 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; import io.opentelemetry.context.Context; @@ -12,12 +17,13 @@ public static Flux wrap( ChatClientStreamListener streamListener, Context context) { - Flux chatClientResponseFlux = originFlux.doOnNext( - chunk -> streamListener.onChunk(chunk)) - .doOnComplete(() -> streamListener.endSpan(null)) - .doOnError(streamListener::endSpan); + Flux chatClientResponseFlux = + originFlux + .doOnNext(chunk -> streamListener.onChunk(chunk)) + .doOnComplete(() -> streamListener.endSpan(null)) + .doOnError(streamListener::endSpan); return ContextPropagationOperator.runWithContext(chatClientResponseFlux, context); } private ChatClientStreamWrapper() {} -} \ No newline at end of file +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java index abbd4b1791be..02a2c8db6796 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java @@ -8,10 +8,10 @@ import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; import static net.bytebuddy.matcher.ElementMatchers.isMethod; -import static net.bytebuddy.matcher.ElementMatchers.named; -import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import static net.bytebuddy.matcher.ElementMatchers.isPrivate; +import static net.bytebuddy.matcher.ElementMatchers.named; import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import com.google.auto.service.AutoService; import io.opentelemetry.context.Context; @@ -21,15 +21,16 @@ import net.bytebuddy.asm.Advice; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.matcher.ElementMatcher; -import org.springframework.ai.chat.client.ChatClientResponse; import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; @AutoService(TypeInstrumentation.class) public class DefaultCallResponseSpecInstrumentation implements TypeInstrumentation { @Override public ElementMatcher classLoaderOptimization() { - return hasClassesNamed("org.springframework.ai.chat.client.DefaultChatClient$DefaultCallResponseSpec"); + return hasClassesNamed( + "org.springframework.ai.chat.client.DefaultChatClient$DefaultCallResponseSpec"); } @Override @@ -40,8 +41,11 @@ public ElementMatcher typeMatcher() { @Override public void transform(TypeTransformer transformer) { transformer.applyAdviceToMethod( - isMethod().and(named("doGetObservableChatClientResponse")).and(takesArguments(2)) - .and(isPrivate()).and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), + isMethod() + .and(named("doGetObservableChatClientResponse")) + .and(takesArguments(2)) + .and(isPrivate()) + .and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), this.getClass().getName() + "$DoGetObservableChatClientResponseAdvice"); } @@ -54,7 +58,7 @@ public static void doGetObservableChatClientResponseEnter( @Advice.Local("otelContext") Context context, @Advice.Local("otelScope") Scope scope) { context = Context.current(); - + if (TELEMETRY.chatClientInstrumenter().shouldStart(context, request)) { context = TELEMETRY.chatClientInstrumenter().start(context, request); } @@ -72,7 +76,7 @@ public static void doGetObservableChatClientResponseExit( return; } scope.close(); - + TELEMETRY.chatClientInstrumenter().end(context, request, response, throwable); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java index 2eec32010ba3..2624b2c71bca 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java @@ -8,10 +8,10 @@ import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; import static net.bytebuddy.matcher.ElementMatchers.isMethod; -import static net.bytebuddy.matcher.ElementMatchers.named; -import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import static net.bytebuddy.matcher.ElementMatchers.isPrivate; +import static net.bytebuddy.matcher.ElementMatchers.named; import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import com.google.auto.service.AutoService; import io.opentelemetry.context.Context; @@ -20,8 +20,8 @@ import net.bytebuddy.asm.Advice; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.matcher.ElementMatcher; -import org.springframework.ai.chat.client.ChatClientResponse; import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; import reactor.core.publisher.Flux; @AutoService(TypeInstrumentation.class) @@ -29,7 +29,8 @@ public class DefaultStreamResponseSpecInstrumentation implements TypeInstrumenta @Override public ElementMatcher classLoaderOptimization() { - return hasClassesNamed("org.springframework.ai.chat.client.DefaultChatClient$DefaultStreamResponseSpec"); + return hasClassesNamed( + "org.springframework.ai.chat.client.DefaultChatClient$DefaultStreamResponseSpec"); } @Override @@ -40,8 +41,11 @@ public ElementMatcher typeMatcher() { @Override public void transform(TypeTransformer transformer) { transformer.applyAdviceToMethod( - isMethod().and(named("doGetObservableFluxChatResponse")).and(takesArguments(1)) - .and(isPrivate()).and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), + isMethod() + .and(named("doGetObservableFluxChatResponse")) + .and(takesArguments(1)) + .and(isPrivate()) + .and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), this.getClass().getName() + "$DoGetObservableFluxChatResponseAdvice"); } @@ -54,12 +58,16 @@ public static void doGetObservableFluxChatResponseEnter( @Advice.Local("otelContext") Context context, @Advice.Local("otelStreamListener") ChatClientStreamListener streamListener) { context = Context.current(); - + if (TELEMETRY.chatClientInstrumenter().shouldStart(context, request)) { context = TELEMETRY.chatClientInstrumenter().start(context, request); - streamListener = new ChatClientStreamListener( - context, request, TELEMETRY.chatClientInstrumenter(), - TELEMETRY.messageCaptureOptions(), true); + streamListener = + new ChatClientStreamListener( + context, + request, + TELEMETRY.chatClientInstrumenter(), + TELEMETRY.messageCaptureOptions(), + true); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java index 85cdbfa0d511..df6a310a6328 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java @@ -8,19 +8,19 @@ import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; import static net.bytebuddy.matcher.ElementMatchers.isMethod; import static net.bytebuddy.matcher.ElementMatchers.named; -import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import com.google.auto.service.AutoService; import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import java.util.HashMap; +import java.util.Map; import net.bytebuddy.asm.Advice; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.matcher.ElementMatcher; -import java.util.HashMap; -import java.util.Map; import org.springframework.ai.chat.messages.AssistantMessage; @AutoService(TypeInstrumentation.class) @@ -39,9 +39,10 @@ public ElementMatcher typeMatcher() { @Override public void transform(TypeTransformer transformer) { transformer.applyAdviceToMethod( - isMethod().and(named("executeToolCall")) - .and(takesArguments(3)) - .and(takesArgument(1, named("org.springframework.ai.chat.messages.AssistantMessage"))), + isMethod() + .and(named("executeToolCall")) + .and(takesArguments(3)) + .and(takesArgument(1, named("org.springframework.ai.chat.messages.AssistantMessage"))), this.getClass().getName() + "$ExecuteToolCallAdvice"); } @@ -53,18 +54,18 @@ public static void executeToolCallEnter( @Advice.Argument(1) AssistantMessage assistantMessage, @Advice.Local("otelContext") Context context, @Advice.Local("otelScope") Scope scope) { - + context = Context.current(); - + if (assistantMessage != null && assistantMessage.getToolCalls() != null) { Map toolNameToIdMap = new HashMap<>(); - + for (AssistantMessage.ToolCall toolCall : assistantMessage.getToolCalls()) { if (toolCall.id() != null && toolCall.name() != null) { toolNameToIdMap.put(toolCall.name(), toolCall.id()); } } - + // store tool call ids map to context if (!toolNameToIdMap.isEmpty()) { context = ToolCallContext.storeToolCalls(context, toolNameToIdMap); @@ -74,8 +75,7 @@ public static void executeToolCallEnter( } @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) - public static void executeToolCallExit( - @Advice.Local("otelScope") Scope scope) { + public static void executeToolCallExit(@Advice.Local("otelScope") Scope scope) { if (scope == null) { return; } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java index d5f52689feb0..b9f0d2e49dac 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; import io.opentelemetry.instrumentation.api.instrumenter.genai.tool.GenAiToolAttributesGetter; diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java index d5a20e33c1ec..f9e04c52a316 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java @@ -9,12 +9,10 @@ import io.opentelemetry.context.ContextKey; import java.util.Map; -/** - * Tool call context to store tool call ids map - */ +/** Tool call context to store tool call ids map */ public final class ToolCallContext { - private static final ContextKey> TOOL_CALL_IDS_KEY = + private static final ContextKey> TOOL_CALL_IDS_KEY = ContextKey.named("spring-ai-tool-call-ids"); private ToolCallContext() {} @@ -30,12 +28,12 @@ public static String getToolCallId(Context context, String toolName) { if (context == null || toolName == null) { return null; } - + Map toolCallIds = context.get(TOOL_CALL_IDS_KEY); if (toolCallIds == null) { return null; } - + return toolCallIds.get(toolName); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java index 5b96220da34e..38b0d3e34e91 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java @@ -19,7 +19,8 @@ private ToolCallRequest(String toolInput, String toolCallId, ToolDefinition tool this.toolDefinition = toolDefinition; } - public static ToolCallRequest create(String toolInput, String toolCallId, ToolDefinition toolDefinition) { + public static ToolCallRequest create( + String toolInput, String toolCallId, ToolDefinition toolDefinition) { return new ToolCallRequest(toolInput, toolCallId, toolDefinition); } @@ -31,7 +32,7 @@ public String getType() { // spring ai support function only return "function"; } - + public String getName() { if (toolDefinition == null) { return null; diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java index 0139c4476cff..f6b9cf04e0da 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java @@ -10,9 +10,9 @@ import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; import static net.bytebuddy.matcher.ElementMatchers.isMethod; import static net.bytebuddy.matcher.ElementMatchers.named; -import static net.bytebuddy.matcher.ElementMatchers.takesArguments; -import static net.bytebuddy.matcher.ElementMatchers.takesArgument; import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import com.google.auto.service.AutoService; import io.opentelemetry.context.Context; @@ -40,9 +40,11 @@ public ElementMatcher typeMatcher() { @Override public void transform(TypeTransformer transformer) { transformer.applyAdviceToMethod( - isMethod().and(named("call")).and(takesArguments(2)) - .and(takesArgument(0, named("java.lang.String"))) - .and(returns(named("java.lang.String"))), + isMethod() + .and(named("call")) + .and(takesArguments(2)) + .and(takesArgument(0, named("java.lang.String"))) + .and(returns(named("java.lang.String"))), this.getClass().getName() + "$CallAdvice"); } @@ -57,11 +59,12 @@ public static void callEnter( @Advice.Local("otelScope") Scope scope, @Advice.Local("toolCallRequest") ToolCallRequest request) { context = Context.current(); - + // get tool call id from context - String toolCallId = ToolCallContext.getToolCallId(context, toolCallback.getToolDefinition().name()); + String toolCallId = + ToolCallContext.getToolCallId(context, toolCallback.getToolDefinition().name()); request = ToolCallRequest.create(toolInput, toolCallId, toolCallback.getToolDefinition()); - + if (TELEMETRY.toolCallInstrumenter().shouldStart(context, request)) { context = TELEMETRY.toolCallInstrumenter().start(context, request); } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java index 6cf4aabfcbbc..670bb2f7d717 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; import io.opentelemetry.instrumentation.spring.ai.v1_0.AbstractChatClientTest; diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts index 018ea7ab229f..3921db5aa237 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts @@ -1,15 +1,15 @@ plugins { - id("otel.java-conventions") + id("otel.java-conventions") } otelJava { - minJavaVersionSupported.set(JavaVersion.VERSION_17) + minJavaVersionSupported.set(JavaVersion.VERSION_17) } dependencies { - implementation(project(":testing-common")) + implementation(project(":testing-common")) - api("org.springframework.ai:spring-ai-openai:1.0.0") - api("org.springframework.ai:spring-ai-client-chat:1.0.0") - api(project(":instrumentation-api-incubator")) + api("org.springframework.ai:spring-ai-openai:1.0.0") + api("org.springframework.ai:spring-ai-client-chat:1.0.0") + api(project(":instrumentation-api-incubator")) } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java index ee302f4200a9..15b3a92546d7 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.spring.ai.v1_0; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_NAME; @@ -6,7 +11,6 @@ import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; -import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TEMPERATURE; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; @@ -16,7 +20,6 @@ import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.EXECUTE_TOOL; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.INVOKE_AGENT; -import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiProviderNameIncubatingValues.OPENAI; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_DESCRIPTION; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_NAME; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_TYPE; @@ -33,16 +36,11 @@ import java.util.stream.Collectors; import org.junit.jupiter.api.Test; import org.springframework.ai.chat.client.ChatClient; -import org.springframework.ai.chat.client.ChatClient.CallResponseSpec; -import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; import org.springframework.ai.chat.messages.SystemMessage; -import org.springframework.ai.chat.messages.ToolResponseMessage; -import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; import org.springframework.ai.chat.messages.UserMessage; import org.springframework.ai.chat.model.ChatResponse; import org.springframework.ai.chat.prompt.ChatOptions; import org.springframework.ai.chat.prompt.Prompt; -import org.springframework.ai.openai.OpenAiChatModel; import org.springframework.ai.openai.OpenAiChatOptions; public abstract class AbstractChatClientTest extends AbstractSpringAiTest { @@ -55,10 +53,11 @@ public abstract class AbstractChatClientTest extends AbstractSpringAiTest { @Test void basic() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); ChatClient chatClient = getChatClient(); ChatResponse response = chatClient.prompt(prompt).call().chatResponse(); @@ -70,8 +69,7 @@ void basic() { trace -> trace.hasSpansSatisfyingExactly( span -> - span - .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) .hasAttributesSatisfying( equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), @@ -85,14 +83,23 @@ void basic() { equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), equalTo(GEN_AI_SPAN_KIND, "AGENT"), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), span -> - span - .hasName(CHAT + " " + TEST_CHAT_MODEL) + span.hasName(CHAT + " " + TEST_CHAT_MODEL) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, CHAT), @@ -101,14 +108,15 @@ void basic() { @Test void stream() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); ChatClient chatClient = getChatClient(); - List chunks = chatClient.prompt(prompt).stream().chatResponse().toStream().collect( - Collectors.toList()); + List chunks = + chatClient.prompt(prompt).stream().chatResponse().toStream().collect(Collectors.toList()); String fullMessage = chunks.stream() @@ -131,8 +139,7 @@ void stream() { trace -> trace.hasSpansSatisfyingExactly( span -> - span - .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) .hasAttributesSatisfying( equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), @@ -143,14 +150,23 @@ void stream() { GEN_AI_RESPONSE_FINISH_REASONS, reasons -> reasons.containsExactly("stop")), equalTo(GEN_AI_SPAN_KIND, "AGENT"), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("South Atlantic")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("South Atlantic")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), span -> - span - .hasName(CHAT + " " + TEST_CHAT_MODEL) + span.hasName(CHAT + " " + TEST_CHAT_MODEL) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, CHAT), @@ -159,10 +175,11 @@ void stream() { @Test void with400Error() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(ChatOptions.builder().model("gpt-4o").build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model("gpt-4o").build()) + .build(); ChatClient chatClient = getChatClient(); Throwable thrown = catchThrowable(() -> chatClient.prompt(prompt).call().chatResponse()); @@ -173,8 +190,7 @@ void with400Error() { trace -> trace.hasSpansSatisfyingExactly( span -> - span - .hasStatus(StatusData.error()) + span.hasStatus(StatusData.error()) .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) .hasAttributesSatisfying( equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), @@ -182,22 +198,33 @@ void with400Error() { equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), equalTo(GEN_AI_REQUEST_MODEL, "gpt-4o"), equalTo(GEN_AI_SPAN_KIND, "AGENT"), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); } @Test void toolCalls() { - Prompt prompt = Prompt.builder() - .messages(asList( - SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), - UserMessage.builder().text("What is the weather in New York City and London?").build())) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .toolCallbacks(getToolCallbacks()) - .build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(getToolCallbacks()) + .build()) + .build(); ChatClient chatClient = getChatClient(); @@ -208,8 +235,7 @@ void toolCalls() { trace -> trace.hasSpansSatisfyingExactly( span -> - span - .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) .hasAttributesSatisfying( equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), @@ -224,19 +250,43 @@ void toolCalls() { equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 76L), equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 815L), equalTo(GEN_AI_SPAN_KIND, "AGENT"), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("The location to get the current temperature for"))), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> + messages.contains( + "The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> + messages.contains( + "The location to get the current temperature for"))), span -> - span - .hasName(CHAT + " " + TEST_CHAT_MODEL) + span.hasName(CHAT + " " + TEST_CHAT_MODEL) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, CHAT), @@ -249,18 +299,18 @@ void toolCalls() { equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 376L)), // 2 spans are compressed into 1 span span -> - span - .hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) + span.hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, EXECUTE_TOOL), equalTo(GEN_AI_SPAN_KIND, "TOOL"), - equalTo(GEN_AI_TOOL_DESCRIPTION, "The location to get the current temperature for"), + equalTo( + GEN_AI_TOOL_DESCRIPTION, + "The location to get the current temperature for"), equalTo(GEN_AI_TOOL_TYPE, "function"), equalTo(GEN_AI_TOOL_NAME, TEST_TOOL_NAME)), span -> - span - .hasName(CHAT + " " + TEST_CHAT_MODEL) + span.hasName(CHAT + " " + TEST_CHAT_MODEL) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, CHAT), @@ -275,29 +325,34 @@ void toolCalls() { @Test void streamToolCalls() { - Prompt prompt = Prompt.builder() - .messages(asList( - SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), - UserMessage.builder().text("What is the weather in New York City and London?").build())) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .toolCallbacks(getToolCallbacks()) - .build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(getToolCallbacks()) + .build()) + .build(); ChatClient chatClient = getChatClient(); - List chunks = chatClient.prompt(prompt).stream().chatResponse().toStream() - .collect(Collectors.toList()); + List chunks = + chatClient.prompt(prompt).stream().chatResponse().toStream().collect(Collectors.toList()); getTesting() .waitAndAssertTraces( trace -> trace.hasSpansSatisfyingExactly( span -> - span - .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) .hasAttributesSatisfying( equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), @@ -308,19 +363,43 @@ void streamToolCalls() { GEN_AI_RESPONSE_FINISH_REASONS, reasons -> reasons.containsExactly("stop")), equalTo(GEN_AI_SPAN_KIND, "AGENT"), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("The location to get the current temperature for"))), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> + messages.contains( + "The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> + messages.contains( + "The location to get the current temperature for"))), span -> - span - .hasName(CHAT + " " + TEST_CHAT_MODEL) + span.hasName(CHAT + " " + TEST_CHAT_MODEL) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, CHAT), @@ -330,18 +409,18 @@ void streamToolCalls() { equalTo(GEN_AI_SPAN_KIND, "LLM")), // 2 spans are compressed into 1 span span -> - span - .hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) + span.hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, EXECUTE_TOOL), equalTo(GEN_AI_SPAN_KIND, "TOOL"), - equalTo(GEN_AI_TOOL_DESCRIPTION, "The location to get the current temperature for"), + equalTo( + GEN_AI_TOOL_DESCRIPTION, + "The location to get the current temperature for"), equalTo(GEN_AI_TOOL_TYPE, "function"), equalTo(GEN_AI_TOOL_NAME, TEST_TOOL_NAME)), span -> - span - .hasName(CHAT + " " + TEST_CHAT_MODEL) + span.hasName(CHAT + " " + TEST_CHAT_MODEL) .hasParent(trace.getSpan(0)) .hasAttributesSatisfying( equalTo(GEN_AI_OPERATION_NAME, CHAT), @@ -350,5 +429,4 @@ void streamToolCalls() { reasons -> reasons.containsExactly("stop")), equalTo(GEN_AI_SPAN_KIND, "LLM")))); } - } diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java index ffd16778015c..d75df9e89b1e 100644 --- a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.spring.ai.v1_0; import static java.util.Collections.singletonList; @@ -31,8 +36,7 @@ public abstract class AbstractSpringAiTest { private static final String API_URL = "https://dashscope.aliyuncs.com/compatible-mode"; - @RegisterExtension - static final RecordingExtension recording = new RecordingExtension(API_URL); + @RegisterExtension static final RecordingExtension recording = new RecordingExtension(API_URL); protected abstract InstrumentationExtension getTesting(); @@ -42,16 +46,15 @@ public abstract class AbstractSpringAiTest { protected final OpenAiApi getOpenAiApi() { if (openAiApi == null) { - HttpClient httpClient = HttpClient.newBuilder() - .version(Version.HTTP_1_1) - .build(); - - OpenAiApi.Builder builder = OpenAiApi.builder() - .restClientBuilder(RestClient.builder() - .requestFactory(new JdkClientHttpRequestFactory(httpClient))) - .webClientBuilder(WebClient.builder() - .clientConnector(new JdkClientHttpConnector(httpClient))) - .baseUrl("http://localhost:" + recording.getPort()); + HttpClient httpClient = HttpClient.newBuilder().version(Version.HTTP_1_1).build(); + + OpenAiApi.Builder builder = + OpenAiApi.builder() + .restClientBuilder( + RestClient.builder().requestFactory(new JdkClientHttpRequestFactory(httpClient))) + .webClientBuilder( + WebClient.builder().clientConnector(new JdkClientHttpConnector(httpClient))) + .baseUrl("http://localhost:" + recording.getPort()); if (recording.isRecording()) { builder.apiKey(System.getenv("OPENAI_API_KEY")); } else { @@ -64,17 +67,17 @@ protected final OpenAiApi getOpenAiApi() { protected final ToolCallingManager getToolCallingManager() { return ToolCallingManager.builder() - .toolCallbackResolver( - new StaticToolCallbackResolver(getToolCallbacks())) + .toolCallbackResolver(new StaticToolCallbackResolver(getToolCallbacks())) .build(); } protected final OpenAiChatModel getChatModel() { if (chatModel == null) { - chatModel = OpenAiChatModel.builder() - .openAiApi(getOpenAiApi()) - .toolCallingManager(getToolCallingManager()) - .build(); + chatModel = + OpenAiChatModel.builder() + .openAiApi(getOpenAiApi()) + .toolCallingManager(getToolCallingManager()) + .build(); } return chatModel; } @@ -119,5 +122,4 @@ public String apply(ToolInput location) { return "25 degrees and sunny"; } } - } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts index e30f8520ff61..6ea07daf793f 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts @@ -1,69 +1,69 @@ plugins { - id("otel.javaagent-instrumentation") + id("otel.javaagent-instrumentation") } otelJava { - // Spring AI OpenAI requires java 17 (same as Spring AI) - minJavaVersionSupported.set(JavaVersion.VERSION_17) + // Spring AI OpenAI requires java 17 (same as Spring AI) + minJavaVersionSupported.set(JavaVersion.VERSION_17) } muzzle { - pass { - group.set("org.springframework.ai") - module.set("spring-ai-openai") - versions.set("(,)") - } + pass { + group.set("org.springframework.ai") + module.set("spring-ai-openai") + versions.set("(,)") + } } repositories { - mavenLocal() - maven { - url = uri("https://repo.spring.io/milestone") - content { - includeGroup("org.springframework.ai") - includeGroup("org.springframework.boot") - includeGroup("org.springframework") - } + mavenLocal() + maven { + url = uri("https://repo.spring.io/milestone") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + } + maven { + url = uri("https://repo.spring.io/snapshot") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") } - maven { - url = uri("https://repo.spring.io/snapshot") - content { - includeGroup("org.springframework.ai") - includeGroup("org.springframework.boot") - includeGroup("org.springframework") - } - mavenContent { - snapshotsOnly() - } + mavenContent { + snapshotsOnly() } - mavenCentral() + } + mavenCentral() } dependencies { - library("io.projectreactor:reactor-core:3.7.0") - library("org.springframework.ai:spring-ai-openai:1.0.0") - library("org.springframework.ai:spring-ai-model:1.0.0") + library("io.projectreactor:reactor-core:3.7.0") + library("org.springframework.ai:spring-ai-openai:1.0.0") + library("org.springframework.ai:spring-ai-model:1.0.0") - implementation(project(":instrumentation:reactor:reactor-3.1:library")) + implementation(project(":instrumentation:reactor:reactor-3.1:library")) - bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) + bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) - testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:testing")) + testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:testing")) } tasks { - withType().configureEach { - val latestDepTest = findProperty("testLatestDeps") as Boolean - systemProperty("testLatestDeps", latestDepTest) - // spring ai requires java 17 - if (latestDepTest) { - otelJava { - minJavaVersionSupported.set(JavaVersion.VERSION_17) - } - } - - // TODO run tests both with and without genai message capture - systemProperty("otel.instrumentation.genai.capture-message-content", "true") - systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + withType().configureEach { + val latestDepTest = findProperty("testLatestDeps") as Boolean + systemProperty("testLatestDeps", latestDepTest) + // spring ai requires java 17 + if (latestDepTest) { + otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) + } } + + // TODO run tests both with and without genai message capture + systemProperty("otel.instrumentation.genai.capture-message-content", "true") + systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + } } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java index fb9d2a2feb66..9306a3a10135 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java @@ -1,14 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; import static java.util.Collections.emptyList; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesGetter; import io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes; +import java.util.List; +import java.util.stream.Collectors; import javax.annotation.Nullable; import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; -import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesGetter; -import java.util.List; -import java.util.stream.Collectors; enum ChatModelAttributesGetter implements GenAiAttributesGetter { @@ -36,7 +41,6 @@ public String getOperationTarget(ChatCompletionRequest request) { return getRequestModel(request); } - @Nullable @Override public Long getRequestSeed(ChatCompletionRequest request) { @@ -65,7 +69,8 @@ public Long getRequestMaxTokens(ChatCompletionRequest request) { return null; } // Use maxCompletionTokens if available, otherwise fall back to maxTokens - Integer maxTokens = request.maxCompletionTokens() != null ? request.maxCompletionTokens() : request.maxTokens(); + Integer maxTokens = + request.maxCompletionTokens() != null ? request.maxCompletionTokens() : request.maxTokens(); return maxTokens != null ? Long.valueOf(maxTokens) : null; } @@ -119,14 +124,15 @@ public List getResponseFinishReasons( return emptyList(); } return response.choices().stream() - .map(choice -> choice.finishReason() != null ? choice.finishReason().name().toLowerCase() : "") + .map( + choice -> + choice.finishReason() != null ? choice.finishReason().name().toLowerCase() : "") .collect(Collectors.toList()); } @Override @Nullable - public String getResponseId( - ChatCompletionRequest request, @Nullable ChatCompletion response) { + public String getResponseId(ChatCompletionRequest request, @Nullable ChatCompletion response) { if (response == null) { return null; } @@ -135,8 +141,7 @@ public String getResponseId( @Override @Nullable - public String getResponseModel( - ChatCompletionRequest request, @Nullable ChatCompletion response) { + public String getResponseModel(ChatCompletionRequest request, @Nullable ChatCompletion response) { if (response == null) { return null; } @@ -157,9 +162,11 @@ public Long getUsageInputTokens( @Nullable public Long getUsageOutputTokens( ChatCompletionRequest request, @Nullable ChatCompletion response) { - if (response == null || response.usage() == null || response.usage().completionTokens() == null) { + if (response == null + || response.usage() == null + || response.usage().completionTokens() == null) { return null; } return Long.valueOf(response.usage().completionTokens()); } -} \ No newline at end of file +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java index 2692c2db3ed0..faea7519eab0 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java @@ -5,18 +5,18 @@ package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionFinishReason; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ChatCompletionFunction; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.Role; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionFinishReason; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ChatCompletionFunction; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.Role; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; final class ChatModelMessageBuffer { private static final String TRUNCATE_FLAG = "...[truncated]"; @@ -50,8 +50,11 @@ Choice toChoice() { if (entry.getValue().function.arguments != null) { arguments = entry.getValue().function.arguments.toString(); } - toolCalls.add(new ToolCall(entry.getValue().id, entry.getValue().type, - new ChatCompletionFunction(entry.getValue().function.name, arguments))); + toolCalls.add( + new ToolCall( + entry.getValue().id, + entry.getValue().type, + new ChatCompletionFunction(entry.getValue().function.name, arguments))); } } } @@ -80,8 +83,13 @@ void append(Choice choice) { String deltaContent = (String) choice.message().rawContent(); if (this.rawContent.length() < this.messageCaptureOptions.maxMessageContentLength()) { - if (this.rawContent.length() + deltaContent.length() >= this.messageCaptureOptions.maxMessageContentLength() ) { - deltaContent = deltaContent.substring(0, this.messageCaptureOptions.maxMessageContentLength() - this.rawContent.length()); + if (this.rawContent.length() + deltaContent.length() + >= this.messageCaptureOptions.maxMessageContentLength()) { + deltaContent = + deltaContent.substring( + 0, + this.messageCaptureOptions.maxMessageContentLength() + - this.rawContent.length()); this.rawContent.append(deltaContent).append(TRUNCATE_FLAG); } else { this.rawContent.append(deltaContent); @@ -98,8 +106,7 @@ void append(Choice choice) { for (int i = 0; i < choice.message().toolCalls().size(); i++) { ToolCall toolCall = choice.message().toolCalls().get(i); ToolCallBuffer buffer = - this.toolCalls.computeIfAbsent( - i, unused -> new ToolCallBuffer(toolCall.id())); + this.toolCalls.computeIfAbsent(i, unused -> new ToolCallBuffer(toolCall.id())); if (toolCall.type() != null) { buffer.type = toolCall.type(); } @@ -108,7 +115,8 @@ void append(Choice choice) { if (toolCall.function().name() != null) { buffer.function.name = toolCall.function().name(); } - if (this.messageCaptureOptions.captureMessageContent() && toolCall.function().arguments() != null) { + if (this.messageCaptureOptions.captureMessageContent() + && toolCall.function().arguments() != null) { if (buffer.function.arguments == null) { buffer.function.arguments = new StringBuilder(); } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java index 812fbd7f1253..e1ee8734eb00 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java @@ -1,11 +1,10 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; -import javax.annotation.Nullable; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; import io.opentelemetry.instrumentation.api.genai.messages.InputMessage; import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; @@ -25,6 +24,12 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; public final class ChatModelMessagesProvider implements GenAiMessagesProvider { @@ -43,9 +48,9 @@ public static ChatModelMessagesProvider create(MessageCaptureOptions messageCapt @Nullable @Override - public InputMessages inputMessages(ChatCompletionRequest request, @Nullable ChatCompletion response) { - if (!messageCaptureOptions.captureMessageContent() - || request.messages() == null) { + public InputMessages inputMessages( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (!messageCaptureOptions.captureMessageContent() || request.messages() == null) { return null; } @@ -56,7 +61,8 @@ public InputMessages inputMessages(ChatCompletionRequest request, @Nullable Chat inputMessages.append( InputMessage.create(Role.SYSTEM, contentToMessageParts(msg.rawContent()))); } else if (msg.role() == ChatCompletionMessage.Role.USER) { - inputMessages.append(InputMessage.create(Role.USER, contentToMessageParts(msg.rawContent()))); + inputMessages.append( + InputMessage.create(Role.USER, contentToMessageParts(msg.rawContent()))); } else if (msg.role() == ChatCompletionMessage.Role.ASSISTANT) { List messageParts = new ArrayList<>(); @@ -67,13 +73,14 @@ public InputMessages inputMessages(ChatCompletionRequest request, @Nullable Chat List toolCalls = msg.toolCalls(); if (toolCalls != null) { - messageParts.addAll(toolCalls.stream() - .map(this::toolCallToMessagePart) - .collect(Collectors.toList())); + messageParts.addAll( + toolCalls.stream().map(this::toolCallToMessagePart).collect(Collectors.toList())); } inputMessages.append(InputMessage.create(Role.ASSISTANT, messageParts)); } else if (msg.role() == ChatCompletionMessage.Role.TOOL) { - inputMessages.append(InputMessage.create(Role.TOOL, contentToToolMessageParts(msg.toolCallId(), msg.rawContent()))); + inputMessages.append( + InputMessage.create( + Role.TOOL, contentToToolMessageParts(msg.toolCallId(), msg.rawContent()))); } } return inputMessages; @@ -81,7 +88,8 @@ public InputMessages inputMessages(ChatCompletionRequest request, @Nullable Chat @Nullable @Override - public OutputMessages outputMessages(ChatCompletionRequest request, @Nullable ChatCompletion response) { + public OutputMessages outputMessages( + ChatCompletionRequest request, @Nullable ChatCompletion response) { if (!messageCaptureOptions.captureMessageContent() || response == null || response.choices() == null) { @@ -100,9 +108,8 @@ public OutputMessages outputMessages(ChatCompletionRequest request, @Nullable Ch } List toolCalls = choiceMsg.toolCalls(); if (toolCalls != null) { - messageParts.addAll(toolCalls.stream() - .map(this::toolCallToMessagePart) - .collect(Collectors.toList())); + messageParts.addAll( + toolCalls.stream().map(this::toolCallToMessagePart).collect(Collectors.toList())); } } @@ -117,33 +124,37 @@ public OutputMessages outputMessages(ChatCompletionRequest request, @Nullable Ch @Nullable @Override - public SystemInstructions systemInstructions(ChatCompletionRequest request, @Nullable ChatCompletion response) { + public SystemInstructions systemInstructions( + ChatCompletionRequest request, @Nullable ChatCompletion response) { return null; } @Nullable @Override - public ToolDefinitions toolDefinitions(ChatCompletionRequest request, @Nullable ChatCompletion response) { + public ToolDefinitions toolDefinitions( + ChatCompletionRequest request, @Nullable ChatCompletion response) { if (request.tools() == null) { return null; } ToolDefinitions toolDefinitions = ToolDefinitions.create(); - request.tools() - .stream() + request.tools().stream() .filter(Objects::nonNull) - .map(tool -> { - if (tool.getFunction() != null) { - String name = tool.getFunction().getName(); - String type = tool.getType().name().toLowerCase(); - if (messageCaptureOptions.captureMessageContent() && tool.getFunction().getDescription() != null) { - return ToolDefinition.create(type, name, tool.getFunction().getDescription(), null); - } else { - return ToolDefinition.create(type, name, null, null); - } - } - return null; - }) + .map( + tool -> { + if (tool.getFunction() != null) { + String name = tool.getFunction().getName(); + String type = tool.getType().name().toLowerCase(); + if (messageCaptureOptions.captureMessageContent() + && tool.getFunction().getDescription() != null) { + return ToolDefinition.create( + type, name, tool.getFunction().getDescription(), null); + } else { + return ToolDefinition.create(type, name, null, null); + } + } + return null; + }) .filter(Objects::nonNull) .forEach(toolDefinitions::append); @@ -152,11 +163,12 @@ public ToolDefinitions toolDefinitions(ChatCompletionRequest request, @Nullable /** * Support content: + * *

    - *
  • {@code String}
  • - *
  • {@code List}
  • + *
  • {@code String} + *
  • {@code List} *
- * */ + */ private List contentToMessageParts(Object rawContent) { List messageParts = contentToMessagePartsOrNull(rawContent); return messageParts == null ? Collections.singletonList(TextPart.create("")) : messageParts; @@ -164,11 +176,12 @@ private List contentToMessageParts(Object rawContent) { /** * Support content: + * *
    - *
  • {@code String}
  • - *
  • {@code List}
  • + *
  • {@code String} + *
  • {@code List} *
- * */ + */ @SuppressWarnings({"unchecked", "rawtypes"}) private List contentToMessagePartsOrNull(Object rawContent) { if (rawContent instanceof String && !((String) rawContent).isEmpty()) { @@ -182,21 +195,24 @@ private List contentToMessagePartsOrNull(Object rawContent) { private MessagePart toolCallToMessagePart(ToolCall call) { if (call != null && call.function() != null) { - return ToolCallRequestPart.create(call.id(), call.function().name(), call.function().arguments()); + return ToolCallRequestPart.create( + call.id(), call.function().name(), call.function().arguments()); } return ToolCallRequestPart.create("unknown_function"); } /** * Support content: + * *
    - *
  • {@code String}
  • - *
  • {@code List}
  • + *
  • {@code String} + *
  • {@code List} *
- * */ + */ private List contentToToolMessageParts(String toolCallId, Object rawContent) { if (rawContent instanceof String && !((String) rawContent).isEmpty()) { - return Collections.singletonList(ToolCallResponsePart.create(toolCallId, truncateTextContent((String) rawContent))); + return Collections.singletonList( + ToolCallResponsePart.create(toolCallId, truncateTextContent((String) rawContent))); } return Collections.singletonList(ToolCallResponsePart.create(toolCallId)); } @@ -210,10 +226,11 @@ private List joinContentParts(List contentParts) { } private String truncateTextContent(String content) { - if (!content.endsWith(TRUNCATE_FLAG) && content.length() > messageCaptureOptions.maxMessageContentLength()) { - content = content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; + if (!content.endsWith(TRUNCATE_FLAG) + && content.length() > messageCaptureOptions.maxMessageContentLength()) { + content = + content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; } return content; } - } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java index 19764b4c134d..eaaa93fb2b8f 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java @@ -5,11 +5,6 @@ package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk.ChunkChoice; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; -import org.springframework.ai.openai.api.OpenAiApi.Usage; import io.opentelemetry.context.Context; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; @@ -20,6 +15,11 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk.ChunkChoice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import org.springframework.ai.openai.api.OpenAiApi.Usage; public final class ChatModelStreamListener { @@ -83,11 +83,7 @@ public void onChunk(ChatCompletionChunk chunk) { // Convert ChunkChoice to Choice for compatibility with buffer buffer.append( new org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice( - choice.finishReason(), - choice.index(), - choice.delta(), - choice.logprobs()) - ); + choice.finishReason(), choice.index(), choice.delta(), choice.logprobs())); } } } @@ -117,21 +113,26 @@ public void endSpan(@Nullable Throwable error) { outputTokens = (int) this.outputTokens.get(); } - List choices = this.chatModelMessageBuffers.stream() - .map(ChatModelMessageBuffer::toChoice) - .collect(Collectors.toList()); - - ChatCompletion result = new ChatCompletion( - this.requestId.get(), - choices, - null, // created - null, // model - null, // serviceTier - null, // systemFingerprint - "chat.completion", - new Usage(outputTokens, inputTokens, - inputTokens != null && outputTokens != null ? inputTokens + outputTokens : null, - null, null)); + List choices = + this.chatModelMessageBuffers.stream() + .map(ChatModelMessageBuffer::toChoice) + .collect(Collectors.toList()); + + ChatCompletion result = + new ChatCompletion( + this.requestId.get(), + choices, + null, // created + null, // model + null, // serviceTier + null, // systemFingerprint + "chat.completion", + new Usage( + outputTokens, + inputTokens, + inputTokens != null && outputTokens != null ? inputTokens + outputTokens : null, + null, + null)); if (this.newSpan) { this.instrumenter.end(this.context, this.request, result, error); diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java index 9973677f584a..db376065a638 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java @@ -1,10 +1,15 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; import io.opentelemetry.context.Context; import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; import io.opentelemetry.javaagent.bootstrap.reactor.ReactorSubscribeOnProcessTracing; import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; import reactor.core.publisher.Flux; public final class ChatModelStreamWrapper { @@ -14,16 +19,17 @@ public static Flux wrap( ChatModelStreamListener streamListener, Context context) { - Flux chatCompletionChunkFlux = originFlux.doOnNext( - chunk -> streamListener.onChunk(chunk)) - .doOnComplete(() -> streamListener.endSpan(null)) - .doOnError(streamListener::endSpan); + Flux chatCompletionChunkFlux = + originFlux + .doOnNext(chunk -> streamListener.onChunk(chunk)) + .doOnComplete(() -> streamListener.endSpan(null)) + .doOnError(streamListener::endSpan); return ContextPropagationOperator.runWithContext(chatCompletionChunkFlux, context); } public static Flux enableContextPropagation(Flux originFlux) { - return originFlux - .contextWrite(ctx -> ctx.put(ReactorSubscribeOnProcessTracing.CONTEXT_PROPAGATION_KEY, true)); + return originFlux.contextWrite( + ctx -> ctx.put(ReactorSubscribeOnProcessTracing.CONTEXT_PROPAGATION_KEY, true)); } private ChatModelStreamWrapper() {} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java index 8e876d5d21f4..e26a0e527bc4 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; @@ -8,9 +13,6 @@ import static net.bytebuddy.matcher.ElementMatchers.takesArgument; import static net.bytebuddy.matcher.ElementMatchers.takesArguments; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; import com.google.auto.service.AutoService; import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; @@ -19,6 +21,9 @@ import net.bytebuddy.asm.Advice; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; import org.springframework.http.ResponseEntity; import reactor.core.publisher.Flux; @@ -38,14 +43,22 @@ public ElementMatcher typeMatcher() { @Override public void transform(TypeTransformer transformer) { transformer.applyAdviceToMethod( - isMethod().and(named("chatCompletionEntity")).and(takesArguments(2)) - .and(takesArgument(0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) + isMethod() + .and(named("chatCompletionEntity")) + .and(takesArguments(2)) + .and( + takesArgument( + 0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) .and(returns(named("org.springframework.http.ResponseEntity"))), this.getClass().getName() + "$CallAdvice"); transformer.applyAdviceToMethod( - isMethod().and(named("chatCompletionStream")).and(takesArguments(2)) - .and(takesArgument(0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) + isMethod() + .and(named("chatCompletionStream")) + .and(takesArguments(2)) + .and( + takesArgument( + 0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) .and(returns(named("reactor.core.publisher.Flux"))), this.getClass().getName() + "$StreamAdvice"); } @@ -79,7 +92,8 @@ public static void callExit( } scope.close(); - TELEMETRY.chatCompletionInstrumenter() + TELEMETRY + .chatCompletionInstrumenter() .end(context, request, response.hasBody() ? response.getBody() : null, throwable); } } @@ -93,12 +107,16 @@ public static void streamEnter( @Advice.Local("otelContext") Context context, @Advice.Local("otelStreamListener") ChatModelStreamListener streamListener) { context = Context.current(); - + if (TELEMETRY.chatCompletionInstrumenter().shouldStart(context, request)) { context = TELEMETRY.chatCompletionInstrumenter().start(context, request); - streamListener = new ChatModelStreamListener( - context, request, TELEMETRY.chatCompletionInstrumenter(), - TELEMETRY.messageCaptureOptions(), true); + streamListener = + new ChatModelStreamListener( + context, + request, + TELEMETRY.chatCompletionInstrumenter(), + TELEMETRY.messageCaptureOptions(), + true); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java index 335b0e61a2d8..60a40dcb0935 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java @@ -36,7 +36,9 @@ public ElementMatcher typeMatcher() { @Override public void transform(TypeTransformer transformer) { transformer.applyAdviceToMethod( - isMethod().and(named("internalStream")).and(takesArguments(2)) + isMethod() + .and(named("internalStream")) + .and(takesArguments(2)) .and(returns(named("reactor.core.publisher.Flux"))), this.getClass().getName() + "$StreamAdvice"); } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java index 19c358b1f36a..76827a57db37 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java @@ -20,7 +20,6 @@ public SpringAiOpenaiInstrumentationModule() { @Override public List typeInstrumentations() { - return asList(new OpenAiChatModelInstrumentation(), - new OpenAiApiInstrumentation()); + return asList(new OpenAiChatModelInstrumentation(), new OpenAiApiInstrumentation()); } } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java index b240df7f8d12..7bfa0f1733d0 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java @@ -19,7 +19,9 @@ public final class SpringAiOpenaiSingletons { .getInt("otel.instrumentation.genai.message-content.max-length", 8192)) .setCaptureMessageStrategy( InstrumentationConfig.get() - .getString("otel.instrumentation.genai.message-content.capture-strategy", "span-attributes")) + .getString( + "otel.instrumentation.genai.message-content.capture-strategy", + "span-attributes")) .build(); private SpringAiOpenaiSingletons() {} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java index 140f1473558a..ef54d2898971 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java @@ -5,19 +5,18 @@ package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; -/** - * Entrypoint for instrumenting Spring AI OpenAI clients. - */ +/** Entrypoint for instrumenting Spring AI OpenAI clients. */ public final class SpringAiOpenaiTelemetry { /** - * Returns a new {@link SpringAiOpenaiTelemetryBuilder} configured with the given {@link OpenTelemetry}. + * Returns a new {@link SpringAiOpenaiTelemetryBuilder} configured with the given {@link + * OpenTelemetry}. */ public static SpringAiOpenaiTelemetryBuilder builder(OpenTelemetry openTelemetry) { return new SpringAiOpenaiTelemetryBuilder(openTelemetry); @@ -40,5 +39,4 @@ public Instrumenter chatCompletionInstrum public MessageCaptureOptions messageCaptureOptions() { return messageCaptureOptions; } - } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java index 9d4b2e005f29..eec2caee672a 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java @@ -5,8 +5,6 @@ package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; import com.google.errorprone.annotations.CanIgnoreReturnValue; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; @@ -14,10 +12,10 @@ import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesExtractor; import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesExtractor; import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiSpanNameExtractor; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; -/** - * Builder for {@link SpringAiOpenaiTelemetry}. - */ +/** Builder for {@link SpringAiOpenaiTelemetry}. */ public final class SpringAiOpenaiTelemetryBuilder { private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-openai-1.0"; @@ -34,27 +32,21 @@ public final class SpringAiOpenaiTelemetryBuilder { this.openTelemetry = openTelemetry; } - /** - * Sets whether to capture message content in spans. Defaults to false. - */ + /** Sets whether to capture message content in spans. Defaults to false. */ @CanIgnoreReturnValue public SpringAiOpenaiTelemetryBuilder setCaptureMessageContent(boolean captureMessageContent) { this.captureMessageContent = captureMessageContent; return this; } - /** - * Sets the maximum length of message content to capture. Defaults to 8192. - */ + /** Sets the maximum length of message content to capture. Defaults to 8192. */ @CanIgnoreReturnValue public SpringAiOpenaiTelemetryBuilder setContentMaxLength(int contentMaxLength) { this.contentMaxLength = contentMaxLength; return this; } - /** - * Sets the strategy to capture message content. Defaults to "span-attributes". - */ + /** Sets the strategy to capture message content. Defaults to "span-attributes". */ @CanIgnoreReturnValue public SpringAiOpenaiTelemetryBuilder setCaptureMessageStrategy(String captureMessageStrategy) { this.captureMessageStrategy = captureMessageStrategy; @@ -66,19 +58,23 @@ public SpringAiOpenaiTelemetryBuilder setCaptureMessageStrategy(String captureMe * SpringAiOpenaiTelemetryBuilder}. */ public SpringAiOpenaiTelemetry build() { - MessageCaptureOptions messageCaptureOptions = MessageCaptureOptions.create( - captureMessageContent, contentMaxLength, captureMessageStrategy); + MessageCaptureOptions messageCaptureOptions = + MessageCaptureOptions.create( + captureMessageContent, contentMaxLength, captureMessageStrategy); Instrumenter chatCompletionInstrumenter = Instrumenter.builder( openTelemetry, INSTRUMENTATION_NAME, GenAiSpanNameExtractor.create(ChatModelAttributesGetter.INSTANCE)) - .addAttributesExtractor(GenAiAttributesExtractor.create(ChatModelAttributesGetter.INSTANCE)) - .addAttributesExtractor(GenAiMessagesExtractor.create( - ChatModelAttributesGetter.INSTANCE, - ChatModelMessagesProvider.create(messageCaptureOptions), - messageCaptureOptions, INSTRUMENTATION_NAME)) + .addAttributesExtractor( + GenAiAttributesExtractor.create(ChatModelAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiMessagesExtractor.create( + ChatModelAttributesGetter.INSTANCE, + ChatModelMessagesProvider.create(messageCaptureOptions), + messageCaptureOptions, + INSTRUMENTATION_NAME)) .buildInstrumenter(); return new SpringAiOpenaiTelemetry(chatCompletionInstrumenter, messageCaptureOptions); diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java index 743d1e309b28..9f3fece1a3e4 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; import io.opentelemetry.instrumentation.spring.ai.openai.v1_0.AbstractChatCompletionTest; diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts index f2cd5575ae3f..56ff1781fcf7 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts @@ -1,14 +1,14 @@ plugins { - id("otel.java-conventions") + id("otel.java-conventions") } otelJava { - minJavaVersionSupported.set(JavaVersion.VERSION_17) + minJavaVersionSupported.set(JavaVersion.VERSION_17) } dependencies { - api(project(":testing-common")) + api(project(":testing-common")) - api("org.springframework.ai:spring-ai-openai:1.0.0") - api(project(":instrumentation-api-incubator")) -} \ No newline at end of file + api("org.springframework.ai:spring-ai-openai:1.0.0") + api(project(":instrumentation-api-incubator")) +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java index d898ed230429..8affac4605ff 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java @@ -1,3 +1,8 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.spring.ai.openai.v1_0; import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; @@ -29,31 +34,23 @@ import static org.assertj.core.api.Assertions.catchThrowable; import static org.junit.jupiter.api.Assertions.assertEquals; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; +import java.util.List; +import java.util.Optional; import java.util.function.Function; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; import org.springframework.ai.chat.messages.SystemMessage; import org.springframework.ai.chat.messages.ToolResponseMessage; import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; -import org.springframework.ai.openai.OpenAiChatModel; -import org.springframework.ai.openai.OpenAiChatOptions; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; import org.springframework.ai.chat.messages.UserMessage; import org.springframework.ai.chat.model.ChatResponse; import org.springframework.ai.chat.prompt.ChatOptions; import org.springframework.ai.chat.prompt.Prompt; -import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.OpenAiChatOptions; import org.springframework.ai.tool.ToolCallback; import org.springframework.ai.tool.function.FunctionToolCallback; -import org.springframework.ai.tool.metadata.ToolMetadata; -import org.springframework.ai.tool.method.MethodToolCallback; -import reactor.core.publisher.Flux; public abstract class AbstractChatCompletionTest extends AbstractSpringAiOpenaiTest { @@ -63,10 +60,11 @@ public abstract class AbstractChatCompletionTest extends AbstractSpringAiOpenaiT @Test void basic() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); OpenAiChatModel chatModel = getChatModel(); ChatResponse response = chatModel.call(prompt); @@ -93,18 +91,27 @@ void basic() { equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("South Atlantic")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("South Atlantic")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); } @Test void stream() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); OpenAiChatModel chatModel = getChatModel(); List chunks = chatModel.stream(prompt).collectList().block(); @@ -141,28 +148,38 @@ void stream() { reasons -> reasons.containsExactly("stop")), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("South Atlantic")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("South Atlantic")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); } @Test void allTheClientOptions() { - OpenAiChatOptions options = OpenAiChatOptions.builder() - .model(TEST_CHAT_MODEL) - .maxTokens(1000) - .seed(100) - .stop(singletonList("foo")) - .topP(1.0) - .temperature(0.8) - .frequencyPenalty(0.5) - .presencePenalty(0.3) - .build(); - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(options) - .build(); + OpenAiChatOptions options = + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .maxTokens(1000) + .seed(100) + .stop(singletonList("foo")) + .topP(1.0) + .temperature(0.8) + .frequencyPenalty(0.5) + .presencePenalty(0.3) + .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(options) + .build(); OpenAiChatModel chatModel = getChatModel(); ChatResponse response = chatModel.call(prompt); @@ -179,11 +196,15 @@ void allTheClientOptions() { equalTo(GEN_AI_OPERATION_NAME, CHAT), equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.8d), - equalTo(GEN_AI_REQUEST_MAX_TOKENS, Long.valueOf(options.getMaxTokens())), + equalTo( + GEN_AI_REQUEST_MAX_TOKENS, Long.valueOf(options.getMaxTokens())), equalTo(GEN_AI_REQUEST_SEED, Long.valueOf(options.getSeed())), - satisfies(GEN_AI_REQUEST_STOP_SEQUENCES, seq -> seq.hasSize(options.getStop().size())), + satisfies( + GEN_AI_REQUEST_STOP_SEQUENCES, + seq -> seq.hasSize(options.getStop().size())), equalTo(GEN_AI_REQUEST_TOP_P, options.getTopP()), - equalTo(GEN_AI_REQUEST_FREQUENCY_PENALTY, options.getFrequencyPenalty()), + equalTo( + GEN_AI_REQUEST_FREQUENCY_PENALTY, options.getFrequencyPenalty()), equalTo(GEN_AI_REQUEST_PRESENCE_PENALTY, options.getPresencePenalty()), equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), @@ -195,18 +216,27 @@ void allTheClientOptions() { equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); } @Test void with400Error() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(ChatOptions.builder().model("gpt-4o").build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model("gpt-4o").build()) + .build(); OpenAiChatModel chatModel = getChatModel(); Throwable thrown = catchThrowable(() -> chatModel.stream(prompt).collectList().block()); @@ -224,19 +254,20 @@ void with400Error() { equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); } @Test void multipleChoices() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .N(2) - .build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(OpenAiChatOptions.builder().model(TEST_CHAT_MODEL).N(2).build()) + .build(); OpenAiChatModel chatModel = getChatModel(); ChatResponse response = chatModel.call(prompt); @@ -266,22 +297,27 @@ void multipleChoices() { equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 26L), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); } @Test void streamMultipleChoices() { - Prompt prompt = Prompt.builder() - .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .N(2) - .build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(OpenAiChatOptions.builder().model(TEST_CHAT_MODEL).N(2).build()) + .build(); OpenAiChatModel chatModel = getChatModel(); // there's a bug in open-ai chat model, thus we couldn't agg multi choice @@ -304,24 +340,38 @@ void streamMultipleChoices() { reasons -> reasons.containsExactly("stop", "stop")), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("Answer in up to 3 words: Which ocean contains Bouvet Island?")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("Southern Ocean")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); } @Test void toolCalls() { - Prompt prompt = Prompt.builder() - .messages(asList( - SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), - UserMessage.builder().text("What is the weather in New York City and London?").build())) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .toolCallbacks(buildGetWeatherToolDefinition()) - .build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); OpenAiChatModel chatModel = getChatModel(); ChatResponse response = chatModel.call(prompt); @@ -351,35 +401,62 @@ void toolCalls() { equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 45L), equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 356L), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("get_weather")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("New York City")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_calls")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("New York City")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("tool_calls")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather"))))); getTesting().clearData(); - prompt = Prompt.builder() - .messages( - asList( - SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), - UserMessage.builder().text("What is the weather in New York City and London?").build(), - response.getResult().getOutput(), - new ToolResponseMessage( - asList( - new ToolResponse(toolCalls.get(0).id(), "get_weather", "25 degrees and sunny"), - new ToolResponse(toolCalls.get(1).id(), "get_weather", "15 degrees and sunny"))))) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .toolCallbacks(buildGetWeatherToolDefinition()) - .build()) - .build(); + prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build(), + response.getResult().getOutput(), + new ToolResponseMessage( + asList( + new ToolResponse( + toolCalls.get(0).id(), "get_weather", "25 degrees and sunny"), + new ToolResponse( + toolCalls.get(1).id(), "get_weather", "15 degrees and sunny"))))) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); response = chatModel.call(prompt); @@ -402,27 +479,44 @@ void toolCalls() { equalTo(GEN_AI_USAGE_INPUT_TOKENS, 386L), equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 31L), equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 417L), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("tool_call_response")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("25 degrees and sunny")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("15 degrees and sunny")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> messages.contains("tool_call_response")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> messages.contains("25 degrees and sunny")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> messages.contains("15 degrees and sunny")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather"))))); } @Test void streamToolCalls() { - Prompt prompt = Prompt.builder() - .messages(asList( - SystemMessage.builder().text("You are a helpful assistant providing weather updates.").build(), - UserMessage.builder().text("What is the weather in New York City and London?").build())) - .chatOptions(OpenAiChatOptions - .builder() - .model(TEST_CHAT_MODEL) - .toolCallbacks(buildGetWeatherToolDefinition()) - .build()) - .build(); + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); OpenAiChatModel chatModel = getChatModel(); List chunks = chatModel.stream(prompt).toStream().collect(Collectors.toList()); @@ -443,16 +537,36 @@ void streamToolCalls() { reasons -> reasons.containsExactly("tool_calls")), equalTo(GEN_AI_SPAN_KIND, "LLM"), satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("What is the weather in New York City and London?")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), - satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("You are a helpful assistant providing weather updates.")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("get_weather")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("New York City")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), - satisfies(GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_calls")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), - satisfies(GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("get_weather"))))); + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("New York City")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("tool_calls")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather"))))); } private ToolCallback buildGetWeatherToolDefinition() { @@ -480,5 +594,4 @@ public String apply(ToolInput location) { return "test function"; } } - } diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java index 1812e14c71a5..938fcfd970c9 100644 --- a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java @@ -1,12 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + package io.opentelemetry.instrumentation.spring.ai.openai.v1_0; -import org.springframework.ai.openai.OpenAiChatModel; -import org.springframework.ai.openai.api.OpenAiApi; import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; import io.opentelemetry.instrumentation.testing.recording.RecordingExtension; import java.net.http.HttpClient; import java.net.http.HttpClient.Version; import org.junit.jupiter.api.extension.RegisterExtension; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.api.OpenAiApi; import org.springframework.http.client.JdkClientHttpRequestFactory; import org.springframework.http.client.reactive.JdkClientHttpConnector; import org.springframework.web.client.RestClient; @@ -18,8 +23,7 @@ public abstract class AbstractSpringAiOpenaiTest { private static final String API_URL = "https://dashscope.aliyuncs.com/compatible-mode"; - @RegisterExtension - static final RecordingExtension recording = new RecordingExtension(API_URL); + @RegisterExtension static final RecordingExtension recording = new RecordingExtension(API_URL); protected abstract InstrumentationExtension getTesting(); @@ -29,16 +33,15 @@ public abstract class AbstractSpringAiOpenaiTest { protected final OpenAiApi getOpenAiApi() { if (openAiApi == null) { - HttpClient httpClient = HttpClient.newBuilder() - .version(Version.HTTP_1_1) - .build(); - - OpenAiApi.Builder builder = OpenAiApi.builder() - .restClientBuilder(RestClient.builder() - .requestFactory(new JdkClientHttpRequestFactory(httpClient))) - .webClientBuilder(WebClient.builder() - .clientConnector(new JdkClientHttpConnector(httpClient))) - .baseUrl("http://localhost:" + recording.getPort()); + HttpClient httpClient = HttpClient.newBuilder().version(Version.HTTP_1_1).build(); + + OpenAiApi.Builder builder = + OpenAiApi.builder() + .restClientBuilder( + RestClient.builder().requestFactory(new JdkClientHttpRequestFactory(httpClient))) + .webClientBuilder( + WebClient.builder().clientConnector(new JdkClientHttpConnector(httpClient))) + .baseUrl("http://localhost:" + recording.getPort()); if (recording.isRecording()) { builder.apiKey(System.getenv("OPENAI_API_KEY")); } else { @@ -51,10 +54,11 @@ protected final OpenAiApi getOpenAiApi() { protected final OpenAiChatModel getChatModel() { if (chatModel == null) { - chatModel = OpenAiChatModel.builder() - .openAiApi(getOpenAiApi()) - .toolExecutionEligibilityPredicate((o1, o2) -> false) - .build(); + chatModel = + OpenAiChatModel.builder() + .openAiApi(getOpenAiApi()) + .toolExecutionEligibilityPredicate((o1, o2) -> false) + .build(); } return chatModel; }