diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java new file mode 100644 index 000000000000..c4e14b528b48 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesExtractor.java @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_DESCRIPTION; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_DATA_SOURCE_ID; +import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; +import javax.annotation.Nullable; + +/** + * Extractor of GenAI Agent + * attributes. + * + *

This class delegates to a type-specific {@link GenAiAgentAttributesGetter} for individual + * attribute extraction from request/response objects. + */ +public final class GenAiAgentAttributesExtractor + implements AttributesExtractor { + + /** Creates the GenAI Agent attributes extractor. */ + public static AttributesExtractor create( + GenAiAgentAttributesGetter attributesGetter) { + return new GenAiAgentAttributesExtractor<>(attributesGetter); + } + + private final GenAiAgentAttributesGetter getter; + + private GenAiAgentAttributesExtractor(GenAiAgentAttributesGetter getter) { + this.getter = getter; + } + + @Override + public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { + internalSet(attributes, GEN_AI_AGENT_ID, getter.getId(request)); + internalSet(attributes, GEN_AI_AGENT_NAME, getter.getName(request)); + internalSet(attributes, GEN_AI_AGENT_DESCRIPTION, getter.getDescription(request)); + internalSet(attributes, GEN_AI_DATA_SOURCE_ID, getter.getDataSourceId(request)); + } + + @Override + public void onEnd( + AttributesBuilder attributes, + Context context, + REQUEST request, + @Nullable RESPONSE response, + @Nullable Throwable error) { + // do nothing + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java new file mode 100644 index 000000000000..d7837cd57341 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAgentAttributesGetter.java @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +public interface GenAiAgentAttributesGetter { + + String getName(REQUEST request); + + @Nullable + String getDescription(REQUEST request); + + @Nullable + String getId(REQUEST request); + + @Nullable + String getDataSourceId(REQUEST request); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java index ed2e48cd8024..d333f8c0eef5 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiAttributesGetter.java @@ -15,8 +15,8 @@ * library/framework. It will be used by the {@link GenAiAttributesExtractor} to obtain the various * GenAI attributes in a type-generic way. */ -public interface GenAiAttributesGetter { - String getOperationName(REQUEST request); +public interface GenAiAttributesGetter + extends GenAiOperationAttributesGetter { String getSystem(REQUEST request); diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java new file mode 100644 index 000000000000..a536880f8283 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesExtractor.java @@ -0,0 +1,251 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_TYPE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_CHOICE_COUNT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_FREQUENCY_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MAX_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_PRESENCE_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_SEED; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_STOP_SEQUENCES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TEMPERATURE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TOP_K; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TOP_P; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_SYSTEM_INSTRUCTIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_TOOL_DEFINITIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_INPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiEventName.GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS; +import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; +import static io.opentelemetry.semconv.trace.attributes.SemanticAttributes.EVENT_NAME; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.logs.LogRecordBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.aliyun.common.JsonMarshaler; +import io.opentelemetry.instrumentation.api.aliyun.common.provider.GlobalInstanceHolder; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions.CaptureMessageStrategy; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; +import io.opentelemetry.instrumentation.api.log.genai.GenAiEventLoggerProvider; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.annotation.Nullable; + +public class GenAiMessagesExtractor + implements AttributesExtractor { + + private static final Logger LOGGER = Logger.getLogger(GenAiMessagesExtractor.class.getName()); + + /** Creates the GenAI attributes extractor. */ + public static AttributesExtractor create( + GenAiAttributesGetter attributesGetter, + GenAiMessagesProvider messagesProvider, + MessageCaptureOptions messageCaptureOptions, + String instrumentationName) { + return new GenAiMessagesExtractor<>( + attributesGetter, messagesProvider, messageCaptureOptions, instrumentationName); + } + + private final MessageCaptureOptions messageCaptureOptions; + + private final GenAiAttributesGetter getter; + + private final GenAiMessagesProvider messagesProvider; + + private final String instrumentationName; + + private final AtomicBoolean lazyInit = new AtomicBoolean(false); + + private JsonMarshaler jsonMarshaler; + + private io.opentelemetry.api.logs.Logger eventLogger; + + private GenAiMessagesExtractor( + GenAiAttributesGetter getter, + GenAiMessagesProvider messagesProvider, + MessageCaptureOptions messageCaptureOptions, + String instrumentationName) { + this.getter = getter; + this.messagesProvider = messagesProvider; + this.messageCaptureOptions = messageCaptureOptions; + this.instrumentationName = instrumentationName; + } + + @Override + public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { + tryInit(); + if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals( + messageCaptureOptions.captureMessageStrategy())) { + SystemInstructions systemInstructions = messagesProvider.systemInstructions(request, null); + if (systemInstructions != null) { + internalSet( + attributes, + GEN_AI_SYSTEM_INSTRUCTIONS, + toJsonString(systemInstructions.getSerializableObject())); + } + + InputMessages inputMessages = messagesProvider.inputMessages(request, null); + if (inputMessages != null) { + internalSet( + attributes, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); + } + + ToolDefinitions toolDefinitions = messagesProvider.toolDefinitions(request, null); + if (toolDefinitions != null) { + internalSet( + attributes, + GEN_AI_TOOL_DEFINITIONS, + toJsonString(toolDefinitions.getSerializableObject())); + } + } + } + + @Override + public void onEnd( + AttributesBuilder attributes, + Context context, + REQUEST request, + @Nullable RESPONSE response, + @Nullable Throwable error) { + if (CaptureMessageStrategy.SPAN_ATTRIBUTES.equals( + messageCaptureOptions.captureMessageStrategy())) { + OutputMessages outputMessages = messagesProvider.outputMessages(request, response); + if (outputMessages != null) { + internalSet( + attributes, + GEN_AI_OUTPUT_MESSAGES, + toJsonString(outputMessages.getSerializableObject())); + } + } else if (CaptureMessageStrategy.EVENT.equals( + messageCaptureOptions.captureMessageStrategy())) { + emitInferenceEvent(context, request, response); + } + } + + private void emitInferenceEvent(Context context, REQUEST request, @Nullable RESPONSE response) { + if (eventLogger != null) { + LogRecordBuilder builder = + eventLogger + .logRecordBuilder() + .setAttribute(EVENT_NAME, GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS) + .setContext(context); + + SystemInstructions systemInstructions = + messagesProvider.systemInstructions(request, response); + if (systemInstructions != null) { + internalSetLogAttribute( + builder, + GEN_AI_SYSTEM_INSTRUCTIONS, + toJsonString(systemInstructions.getSerializableObject())); + } + InputMessages inputMessages = messagesProvider.inputMessages(request, response); + if (inputMessages != null) { + internalSetLogAttribute( + builder, GEN_AI_INPUT_MESSAGES, toJsonString(inputMessages.getSerializableObject())); + } + ToolDefinitions toolDefinitions = messagesProvider.toolDefinitions(request, null); + if (toolDefinitions != null) { + internalSetLogAttribute( + builder, + GEN_AI_TOOL_DEFINITIONS, + toJsonString(toolDefinitions.getSerializableObject())); + } + OutputMessages outputMessages = messagesProvider.outputMessages(request, response); + if (outputMessages != null) { + internalSetLogAttribute( + builder, GEN_AI_OUTPUT_MESSAGES, toJsonString(outputMessages.getSerializableObject())); + } + + internalSetLogAttribute(builder, GEN_AI_OPERATION_NAME, getter.getOperationName(request)); + internalSetLogAttribute(builder, GEN_AI_OUTPUT_TYPE, getter.getOutputType(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_CHOICE_COUNT, getter.getChoiceCount(request)); + internalSetLogAttribute(builder, GEN_AI_PROVIDER_NAME, getter.getSystem(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_MODEL, getter.getRequestModel(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_SEED, getter.getRequestSeed(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_FREQUENCY_PENALTY, getter.getRequestFrequencyPenalty(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_MAX_TOKENS, getter.getRequestMaxTokens(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_PRESENCE_PENALTY, getter.getRequestPresencePenalty(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_STOP_SEQUENCES, getter.getRequestStopSequences(request)); + internalSetLogAttribute( + builder, GEN_AI_REQUEST_TEMPERATURE, getter.getRequestTemperature(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_TOP_K, getter.getRequestTopK(request)); + internalSetLogAttribute(builder, GEN_AI_REQUEST_TOP_P, getter.getRequestTopP(request)); + + List finishReasons = getter.getResponseFinishReasons(request, response); + if (finishReasons != null && !finishReasons.isEmpty()) { + builder.setAttribute(GEN_AI_RESPONSE_FINISH_REASONS, finishReasons); + } + internalSetLogAttribute(builder, GEN_AI_RESPONSE_ID, getter.getResponseId(request, response)); + internalSetLogAttribute( + builder, GEN_AI_RESPONSE_MODEL, getter.getResponseModel(request, response)); + internalSetLogAttribute( + builder, GEN_AI_USAGE_INPUT_TOKENS, getter.getUsageInputTokens(request, response)); + internalSetLogAttribute( + builder, GEN_AI_USAGE_OUTPUT_TOKENS, getter.getUsageOutputTokens(request, response)); + builder.emit(); + } + } + + private void internalSetLogAttribute( + LogRecordBuilder logRecordBuilder, AttributeKey key, @Nullable T value) { + if (value == null) { + return; + } + logRecordBuilder.setAttribute(key, value); + } + + private void tryInit() { + if (lazyInit.get()) { + return; + } + + if (lazyInit.compareAndSet(false, true)) { + jsonMarshaler = GlobalInstanceHolder.getInstance(JsonMarshaler.class); + if (jsonMarshaler == null) { + LOGGER.log(Level.WARNING, "failed to init json marshaler, global instance is null"); + } + + GenAiEventLoggerProvider loggerProvider = + GlobalInstanceHolder.getInstance(GenAiEventLoggerProvider.class); + + if (loggerProvider == null) { + LOGGER.log(Level.WARNING, "failed to init event logger, logger provider is null"); + return; + } + + eventLogger = loggerProvider.get(instrumentationName); + } + } + + private String toJsonString(Object object) { + if (jsonMarshaler == null) { + LOGGER.log(Level.INFO, "failed to serialize object, json marshaler is null"); + return null; + } + return jsonMarshaler.toJSONStringNonEmpty(object); + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java new file mode 100644 index 000000000000..5730640d46d3 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiMessagesProvider.java @@ -0,0 +1,27 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import javax.annotation.Nullable; + +public interface GenAiMessagesProvider { + + @Nullable + InputMessages inputMessages(REQUEST request, @Nullable RESPONSE response); + + @Nullable + OutputMessages outputMessages(REQUEST request, @Nullable RESPONSE response); + + @Nullable + SystemInstructions systemInstructions(REQUEST request, @Nullable RESPONSE response); + + @Nullable + ToolDefinitions toolDefinitions(REQUEST request, @Nullable RESPONSE response); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java new file mode 100644 index 000000000000..7ac899401077 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiOperationAttributesGetter.java @@ -0,0 +1,14 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai; + +public interface GenAiOperationAttributesGetter { + + String getOperationName(REQUEST request); + + @Nullable + String getOperationTarget(REQUEST request); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java index d8a7f517da3c..66ebb9949c6c 100644 --- a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/GenAiSpanNameExtractor.java @@ -19,19 +19,19 @@ public static SpanNameExtractor create( return new GenAiSpanNameExtractor<>(attributesGetter); } - private final GenAiAttributesGetter getter; + private final GenAiOperationAttributesGetter getter; - private GenAiSpanNameExtractor(GenAiAttributesGetter getter) { + private GenAiSpanNameExtractor(GenAiOperationAttributesGetter getter) { this.getter = getter; } @Override public String extract(REQUEST request) { String operation = getter.getOperationName(request); - String model = getter.getRequestModel(request); - if (model == null) { + String operationTarget = getter.getOperationTarget(request); + if (operationTarget == null) { return operation; } - return operation + ' ' + model; + return operation + ' ' + operationTarget; } } diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java new file mode 100644 index 000000000000..3c40e1ead0f1 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/AgentIncubatingAttributes.java @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.AttributeKey; + +public final class AgentIncubatingAttributes { + + public static final AttributeKey GEN_AI_AGENT_DESCRIPTION = + stringKey("gen_ai.agent.description"); + public static final AttributeKey GEN_AI_AGENT_ID = stringKey("gen_ai.agent.id"); + public static final AttributeKey GEN_AI_AGENT_NAME = stringKey("gen_ai.agent.name"); + public static final AttributeKey GEN_AI_DATA_SOURCE_ID = + stringKey("gen_ai.data_source.id"); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java new file mode 100644 index 000000000000..507315726c22 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiIncubatingAttributes.java @@ -0,0 +1,93 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; + +import static io.opentelemetry.api.common.AttributeKey.doubleKey; +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringArrayKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.AttributeKey; +import java.util.List; + +public final class GenAiIncubatingAttributes { + + public static final AttributeKey GEN_AI_OPERATION_NAME = + stringKey("gen_ai.operation.name"); + public static final AttributeKey> GEN_AI_REQUEST_ENCODING_FORMATS = + stringArrayKey("gen_ai.request.encoding_formats"); + public static final AttributeKey GEN_AI_REQUEST_FREQUENCY_PENALTY = + doubleKey("gen_ai.request.frequency_penalty"); + public static final AttributeKey GEN_AI_REQUEST_MAX_TOKENS = + longKey("gen_ai.request.max_tokens"); + public static final AttributeKey GEN_AI_REQUEST_MODEL = stringKey("gen_ai.request.model"); + public static final AttributeKey GEN_AI_REQUEST_PRESENCE_PENALTY = + doubleKey("gen_ai.request.presence_penalty"); + public static final AttributeKey GEN_AI_REQUEST_SEED = longKey("gen_ai.request.seed"); + public static final AttributeKey> GEN_AI_REQUEST_STOP_SEQUENCES = + stringArrayKey("gen_ai.request.stop_sequences"); + public static final AttributeKey GEN_AI_REQUEST_TEMPERATURE = + doubleKey("gen_ai.request.temperature"); + public static final AttributeKey GEN_AI_REQUEST_TOP_K = doubleKey("gen_ai.request.top_k"); + public static final AttributeKey GEN_AI_REQUEST_TOP_P = doubleKey("gen_ai.request.top_p"); + public static final AttributeKey> GEN_AI_RESPONSE_FINISH_REASONS = + stringArrayKey("gen_ai.response.finish_reasons"); + public static final AttributeKey GEN_AI_RESPONSE_ID = stringKey("gen_ai.response.id"); + public static final AttributeKey GEN_AI_RESPONSE_MODEL = + stringKey("gen_ai.response.model"); + public static final AttributeKey GEN_AI_PROVIDER_NAME = stringKey("gen_ai.provider.name"); + public static final AttributeKey GEN_AI_CONVERSATION_ID = + stringKey("gen_ai.conversation.id"); + public static final AttributeKey GEN_AI_USAGE_INPUT_TOKENS = + longKey("gen_ai.usage.input_tokens"); + public static final AttributeKey GEN_AI_USAGE_OUTPUT_TOKENS = + longKey("gen_ai.usage.output_tokens"); + public static final AttributeKey GEN_AI_REQUEST_CHOICE_COUNT = + longKey("gen_ai.request.choice.count"); + public static final AttributeKey GEN_AI_OUTPUT_TYPE = stringKey("gen_ai.output.type"); + public static final AttributeKey GEN_AI_SYSTEM_INSTRUCTIONS = + stringKey("gen_ai.system_instructions"); + public static final AttributeKey GEN_AI_INPUT_MESSAGES = + stringKey("gen_ai.input.messages"); + public static final AttributeKey GEN_AI_OUTPUT_MESSAGES = + stringKey("gen_ai.output.messages"); + public static final AttributeKey GEN_AI_TOOL_DEFINITIONS = + stringKey("gen_ai.tool.definitions"); + + public static class GenAiOperationNameIncubatingValues { + public static final String CHAT = "chat"; + public static final String CREATE_AGENT = "create_agent"; + public static final String EMBEDDINGS = "embeddings"; + public static final String EXECUTE_TOOL = "execute_tool"; + public static final String GENERATE_CONTENT = "generate_content"; + public static final String INVOKE_AGENT = "invoke_agent"; + public static final String TEXT_COMPLETION = "text_completion"; + } + + public static class GenAiProviderNameIncubatingValues { + public static final String ANTHROPIC = "anthropic"; + public static final String AWS_BEDROCK = "aws.bedrock"; + public static final String AZURE_AI_INFERENCE = "azure.ai.inference"; + public static final String AZURE_AI_OPENAI = "azure.ai.openai"; + public static final String COHERE = "cohere"; + public static final String DEEPSEEK = "deepseek"; + public static final String GCP_GEMINI = "gcp.gemini"; + public static final String GCP_GEN_AI = "gcp.gen_ai"; + public static final String GCP_VERTEX_AI = "gcp.vertex_ai"; + public static final String GROQ = "groq"; + public static final String IBM_WATSONX_AI = "ibm.watsonx.ai"; + public static final String MISTRAL_AI = "mistral_ai"; + public static final String OPENAI = "openai"; + public static final String PERPLEXITY = "perplexity"; + public static final String X_AI = "x_ai"; + public static final String DASHSCOPE = "dashscope"; + } + + public static class GenAiEventName { + public static final String GEN_AI_CLIENT_INFERENCE_OPERATION_DETAILS = + "gen_ai.client.inference.operation.details"; + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java new file mode 100644 index 000000000000..42eab245b3b7 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/incubator/GenAiToolIncubatingAttributes.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.incubator; + +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +import io.opentelemetry.api.common.AttributeKey; + +public class GenAiToolIncubatingAttributes { + + public static final AttributeKey GEN_AI_TOOL_CALL_ID = stringKey("gen_ai.tool.call.id"); + public static final AttributeKey GEN_AI_TOOL_DESCRIPTION = + stringKey("gen_ai.tool.description"); + public static final AttributeKey GEN_AI_TOOL_NAME = stringKey("gen_ai.tool.name"); + public static final AttributeKey GEN_AI_TOOL_TYPE = stringKey("gen_ai.tool.type"); + public static final AttributeKey GEN_AI_TOOL_CALL_ARGUMENTS = + stringKey("gen_ai.tool.call.arguments"); + public static final AttributeKey GEN_AI_TOOL_CALL_RESULT = + stringKey("gen_ai.tool.call.result"); +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java new file mode 100644 index 000000000000..d06a4e4e9ead --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesExtractor.java @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.tool; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_CALL_ARGUMENTS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_CALL_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_CALL_RESULT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_DESCRIPTION; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_TYPE; +import static io.opentelemetry.instrumentation.api.internal.AttributesExtractorUtil.internalSet; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; +import javax.annotation.Nullable; + +public final class GenAiToolAttributesExtractor + implements AttributesExtractor { + + /** Creates the GenAI attributes extractor. */ + public static AttributesExtractor create( + GenAiToolAttributesGetter attributesGetter, + MessageCaptureOptions messageCaptureOptions) { + return new GenAiToolAttributesExtractor<>(attributesGetter, messageCaptureOptions); + } + + private final GenAiToolAttributesGetter getter; + + private final MessageCaptureOptions messageCaptureOptions; + + private GenAiToolAttributesExtractor( + GenAiToolAttributesGetter getter, + MessageCaptureOptions messageCaptureOptions) { + this.getter = getter; + this.messageCaptureOptions = messageCaptureOptions; + } + + @Override + public void onStart(AttributesBuilder attributes, Context parentContext, REQUEST request) { + internalSet(attributes, GEN_AI_OPERATION_NAME, getter.getOperationName(request)); + internalSet(attributes, GEN_AI_TOOL_DESCRIPTION, getter.getToolDescription(request)); + internalSet(attributes, GEN_AI_TOOL_NAME, getter.getToolName(request)); + internalSet(attributes, GEN_AI_TOOL_TYPE, getter.getToolType(request)); + if (messageCaptureOptions.captureMessageContent()) { + internalSet(attributes, GEN_AI_TOOL_CALL_ARGUMENTS, getter.getToolCallArguments(request)); + } + } + + @Override + public void onEnd( + AttributesBuilder attributes, + Context context, + REQUEST request, + @Nullable RESPONSE response, + @Nullable Throwable error) { + internalSet(attributes, GEN_AI_TOOL_CALL_ID, getter.getToolCallId(request, response)); + if (messageCaptureOptions.captureMessageContent()) { + internalSet(attributes, GEN_AI_TOOL_CALL_RESULT, getter.getToolCallResult(request, response)); + } + } +} diff --git a/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java new file mode 100644 index 000000000000..53b181e8d6e7 --- /dev/null +++ b/instrumentation-api-incubator/src/main/java/io/opentelemetry/instrumentation/api/incubator/semconv/genai/tool/GenAiToolAttributesGetter.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.api.incubator.semconv.genai.tool; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiOperationAttributesGetter; +import javax.annotation.Nullable; + +public interface GenAiToolAttributesGetter + extends GenAiOperationAttributesGetter { + + String getToolDescription(REQUEST request); + + String getToolName(REQUEST request); + + String getToolType(REQUEST request); + + @Nullable + String getToolCallArguments(REQUEST request); + + @Nullable + String getToolCallId(REQUEST request, RESPONSE response); + + @Nullable + String getToolCallResult(REQUEST request, RESPONSE response); +} diff --git a/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts b/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts new file mode 100644 index 000000000000..072a96df450f --- /dev/null +++ b/instrumentation/reactor/reactor-3.1/bootstrap/build.gradle.kts @@ -0,0 +1,3 @@ +plugins { + id("otel.javaagent-bootstrap") +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts new file mode 100644 index 000000000000..697cb13f2302 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/build.gradle.kts @@ -0,0 +1,71 @@ +plugins { + id("otel.javaagent-instrumentation") +} + +otelJava { + // Spring AI 3 requires java 17 + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +muzzle { + pass { + group.set("org.springframework.ai") + module.set("spring-ai-client-chat") + versions.set("(,)") + } +} + +repositories { + mavenLocal() + maven { + url = uri("https://repo.spring.io/milestone") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + } + maven { + url = uri("https://repo.spring.io/snapshot") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + mavenContent { + snapshotsOnly() + } + } + mavenCentral() +} + +dependencies { + library("io.projectreactor:reactor-core:3.7.0") + library("org.springframework.ai:spring-ai-client-chat:1.0.0") + library("org.springframework.ai:spring-ai-model:1.0.0") + + implementation(project(":instrumentation:reactor:reactor-3.1:library")) + + bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) + + testInstrumentation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:javaagent")) + testInstrumentation(project(":instrumentation:reactor:reactor-3.1:javaagent")) + testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-1.0:testing")) +} + +tasks { + withType().configureEach { + val latestDepTest = findProperty("testLatestDeps") as Boolean + systemProperty("testLatestDeps", latestDepTest) + // spring ai requires java 17 + if (latestDepTest) { + otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) + } + } + + // TODO run tests both with and without genai message capture + systemProperty("otel.instrumentation.genai.capture-message-content", "true") + systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java new file mode 100644 index 000000000000..adfd63e00213 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiInstrumentationModule.java @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import static java.util.Arrays.asList; + +import com.google.auto.service.AutoService; +import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.DefaultCallResponseSpecInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.DefaultStreamResponseSpecInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.DefaultToolCallingManagerInstrumentation; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallbackInstrumentation; +import java.util.List; + +@AutoService(InstrumentationModule.class) +public class SpringAiInstrumentationModule extends InstrumentationModule { + + public SpringAiInstrumentationModule() { + super("spring-ai", "spring-ai-1.0"); + } + + @Override + public List typeInstrumentations() { + return asList( + new DefaultCallResponseSpecInstrumentation(), + new DefaultStreamResponseSpecInstrumentation(), + new ToolCallbackInstrumentation(), + new DefaultToolCallingManagerInstrumentation()); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java new file mode 100644 index 000000000000..ad016b51aec0 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiSingletons.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.javaagent.bootstrap.internal.InstrumentationConfig; + +public final class SpringAiSingletons { + public static final SpringAiTelemetry TELEMETRY = + SpringAiTelemetry.builder(GlobalOpenTelemetry.get()) + .setCaptureMessageContent( + InstrumentationConfig.get() + .getBoolean("otel.instrumentation.genai.capture-message-content", true)) + .setContentMaxLength( + InstrumentationConfig.get() + .getInt("otel.instrumentation.genai.message-content.max-length", 8192)) + .setCaptureMessageStrategy( + InstrumentationConfig.get() + .getString( + "otel.instrumentation.genai.message-content.capture-strategy", + "span-attributes")) + .build(); + + private SpringAiSingletons() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java new file mode 100644 index 000000000000..83c254f69d9a --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetry.java @@ -0,0 +1,45 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallRequest; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; + +public final class SpringAiTelemetry { + + public static SpringAiTelemetryBuilder builder(OpenTelemetry openTelemetry) { + return new SpringAiTelemetryBuilder(openTelemetry); + } + + private final Instrumenter chatClientInstrumenter; + private final Instrumenter toolCallInstrumenter; + private final MessageCaptureOptions messageCaptureOptions; + + SpringAiTelemetry( + Instrumenter chatClientInstrumenter, + Instrumenter toolCallInstrumenter, + MessageCaptureOptions messageCaptureOptions) { + this.chatClientInstrumenter = chatClientInstrumenter; + this.toolCallInstrumenter = toolCallInstrumenter; + this.messageCaptureOptions = messageCaptureOptions; + } + + public Instrumenter chatClientInstrumenter() { + return chatClientInstrumenter; + } + + public Instrumenter toolCallInstrumenter() { + return toolCallInstrumenter; + } + + public MessageCaptureOptions messageCaptureOptions() { + return messageCaptureOptions; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java new file mode 100644 index 000000000000..dbd2b146d9fc --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/SpringAiTelemetryBuilder.java @@ -0,0 +1,95 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAgentAttributesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiSpanNameExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.tool.GenAiToolAttributesExtractor; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.ChatClientAttributesGetter; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client.ChatClientMessagesProvider; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallAttributesGetter; +import io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool.ToolCallRequest; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; + +public final class SpringAiTelemetryBuilder { + + private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-1.0"; + + private final OpenTelemetry openTelemetry; + private boolean captureMessageContent; + + private int contentMaxLength; + + private String captureMessageStrategy; + + SpringAiTelemetryBuilder(OpenTelemetry openTelemetry) { + this.openTelemetry = openTelemetry; + } + + /** Sets whether to capture message content in spans. Defaults to false. */ + @CanIgnoreReturnValue + public SpringAiTelemetryBuilder setCaptureMessageContent(boolean captureMessageContent) { + this.captureMessageContent = captureMessageContent; + return this; + } + + /** Sets the maximum length of message content to capture. Defaults to 8192. */ + @CanIgnoreReturnValue + public SpringAiTelemetryBuilder setContentMaxLength(int contentMaxLength) { + this.contentMaxLength = contentMaxLength; + return this; + } + + /** Sets the strategy to capture message content. Defaults to "span-attributes". */ + @CanIgnoreReturnValue + public SpringAiTelemetryBuilder setCaptureMessageStrategy(String captureMessageStrategy) { + this.captureMessageStrategy = captureMessageStrategy; + return this; + } + + public SpringAiTelemetry build() { + MessageCaptureOptions messageCaptureOptions = + MessageCaptureOptions.create( + captureMessageContent, contentMaxLength, captureMessageStrategy); + + Instrumenter chatClientInstrumenter = + Instrumenter.builder( + openTelemetry, + INSTRUMENTATION_NAME, + GenAiSpanNameExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiAgentAttributesExtractor.create(ChatClientAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiMessagesExtractor.create( + ChatClientAttributesGetter.INSTANCE, + ChatClientMessagesProvider.create(messageCaptureOptions), + messageCaptureOptions, + INSTRUMENTATION_NAME)) + .buildInstrumenter(); + + Instrumenter toolCallInstrumenter = + Instrumenter.builder( + openTelemetry, + INSTRUMENTATION_NAME, + GenAiSpanNameExtractor.create(ToolCallAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiToolAttributesExtractor.create( + ToolCallAttributesGetter.INSTANCE, messageCaptureOptions)) + .buildInstrumenter(); + + return new SpringAiTelemetry( + chatClientInstrumenter, toolCallInstrumenter, messageCaptureOptions); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java new file mode 100644 index 000000000000..0f06be5ea993 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientAttributesGetter.java @@ -0,0 +1,221 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAgentAttributesGetter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesGetter; +import java.util.List; +import javax.annotation.Nullable; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; + +public enum ChatClientAttributesGetter + implements + GenAiAttributesGetter, + GenAiAgentAttributesGetter { + INSTANCE; + + @Override + public String getOperationName(ChatClientRequest request) { + return "invoke_agent"; + } + + @Override + public String getSystem(ChatClientRequest request) { + return "spring-ai"; + } + + @Nullable + @Override + public String getRequestModel(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getModel(); + } + + @Override + public String getOperationTarget(ChatClientRequest request) { + return getName(request); + } + + @Nullable + @Override + public Long getRequestSeed(ChatClientRequest request) { + // Spring AI currently does not support seed parameter + return null; + } + + @Nullable + @Override + public List getRequestEncodingFormats(ChatClientRequest request) { + // Spring AI currently does not support encoding_formats parameter + return null; + } + + @Nullable + @Override + public Double getRequestFrequencyPenalty(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getFrequencyPenalty(); + } + + @Nullable + @Override + public Long getRequestMaxTokens(ChatClientRequest request) { + if (request.prompt().getOptions() == null + || request.prompt().getOptions().getMaxTokens() == null) { + return null; + } + return request.prompt().getOptions().getMaxTokens().longValue(); + } + + @Nullable + @Override + public Double getRequestPresencePenalty(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getPresencePenalty(); + } + + @Nullable + @Override + public List getRequestStopSequences(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getStopSequences(); + } + + @Nullable + @Override + public Double getRequestTemperature(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getTemperature(); + } + + @Nullable + @Override + public Double getRequestTopK(ChatClientRequest request) { + if (request.prompt().getOptions() == null || request.prompt().getOptions().getTopK() == null) { + return null; + } + return request.prompt().getOptions().getTopK().doubleValue(); + } + + @Nullable + @Override + public Double getRequestTopP(ChatClientRequest request) { + if (request.prompt().getOptions() == null) { + return null; + } + return request.prompt().getOptions().getTopP(); + } + + @Override + public List getResponseFinishReasons( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getResult() == null + || response.chatResponse().getResult().getMetadata() == null + || response.chatResponse().getResult().getMetadata().getFinishReason() == null) { + return emptyList(); + } + + return singletonList( + response.chatResponse().getResult().getMetadata().getFinishReason().toLowerCase()); + } + + @Nullable + @Override + public String getResponseId(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null) { + return null; + } + + return response.chatResponse().getMetadata().getId(); + } + + @Nullable + @Override + public String getResponseModel(ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null + || response.chatResponse().getMetadata().getModel() == null + || response.chatResponse().getMetadata().getModel().isEmpty()) { + return null; + } + + return response.chatResponse().getMetadata().getModel(); + } + + @Nullable + @Override + public Long getUsageInputTokens( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null + || response.chatResponse().getMetadata().getUsage() == null + || response.chatResponse().getMetadata().getUsage().getPromptTokens() == null + || response.chatResponse().getMetadata().getUsage().getPromptTokens() == 0) { + return null; + } + + return response.chatResponse().getMetadata().getUsage().getPromptTokens().longValue(); + } + + @Nullable + @Override + public Long getUsageOutputTokens( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (response == null + || response.chatResponse() == null + || response.chatResponse().getMetadata() == null + || response.chatResponse().getMetadata().getUsage() == null + || response.chatResponse().getMetadata().getUsage().getCompletionTokens() == null + || response.chatResponse().getMetadata().getUsage().getCompletionTokens() == 0) { + return null; + } + + return response.chatResponse().getMetadata().getUsage().getCompletionTokens().longValue(); + } + + @Override + public String getName(ChatClientRequest request) { + return "spring_ai chat_client"; + } + + @Nullable + @Override + public String getDescription(ChatClientRequest request) { + return null; + } + + @Nullable + @Override + public String getId(ChatClientRequest request) { + return null; + } + + @Nullable + @Override + public String getDataSourceId(ChatClientRequest request) { + return null; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java new file mode 100644 index 000000000000..c1887033b0fe --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessageBuffer.java @@ -0,0 +1,148 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; +import org.springframework.ai.chat.messages.AssistantMessage; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.metadata.ChatGenerationMetadata; +import org.springframework.ai.chat.model.Generation; + +final class ChatClientMessageBuffer { + private static final String TRUNCATE_FLAG = "...[truncated]"; + private final int index; + private final MessageCaptureOptions messageCaptureOptions; + + @Nullable private String finishReason; + + @Nullable private StringBuilder rawContentBuffer; + + @Nullable private Map toolCalls; + + ChatClientMessageBuffer(int index, MessageCaptureOptions messageCaptureOptions) { + this.index = index; + this.messageCaptureOptions = messageCaptureOptions; + } + + Generation toGeneration() { + List toolCalls; + if (this.toolCalls != null) { + toolCalls = new ArrayList<>(this.toolCalls.size()); + for (Map.Entry entry : this.toolCalls.entrySet()) { + if (entry.getValue() != null) { + String arguments; + if (entry.getValue().function.arguments != null) { + arguments = entry.getValue().function.arguments.toString(); + } else { + arguments = ""; + } + if (entry.getValue().type == null) { + entry.getValue().type = "function"; + } + if (entry.getValue().function.name == null) { + entry.getValue().function.name = ""; + } + toolCalls.add( + new ToolCall( + entry.getValue().id, + entry.getValue().type, + entry.getValue().function.name, + arguments)); + } + } + } else { + toolCalls = Collections.emptyList(); + } + + String content = ""; + + if (this.rawContentBuffer != null) { + content = this.rawContentBuffer.toString(); + } + + return new Generation( + new AssistantMessage(content, Collections.emptyMap(), toolCalls), + ChatGenerationMetadata.builder().finishReason(this.finishReason).build()); + } + + void append(Generation generation) { + AssistantMessage message = generation.getOutput(); + if (message != null) { + if (this.messageCaptureOptions.captureMessageContent()) { + if (message.getText() != null) { + if (this.rawContentBuffer == null) { + this.rawContentBuffer = new StringBuilder(); + } + + String deltaContent = message.getText(); + if (this.rawContentBuffer.length() + < this.messageCaptureOptions.maxMessageContentLength()) { + if (this.rawContentBuffer.length() + deltaContent.length() + >= this.messageCaptureOptions.maxMessageContentLength()) { + deltaContent = + deltaContent.substring( + 0, + this.messageCaptureOptions.maxMessageContentLength() + - this.rawContentBuffer.length()); + this.rawContentBuffer.append(deltaContent).append(TRUNCATE_FLAG); + } else { + this.rawContentBuffer.append(deltaContent); + } + } + } + } + + if (message.hasToolCalls()) { + if (this.toolCalls == null) { + this.toolCalls = new HashMap<>(); + } + + for (int i = 0; i < message.getToolCalls().size(); i++) { + ToolCall toolCall = message.getToolCalls().get(i); + ToolCallBuffer buffer = + this.toolCalls.computeIfAbsent(i, unused -> new ToolCallBuffer(toolCall.id())); + + buffer.type = toolCall.type(); + buffer.function.name = toolCall.name(); + if (this.messageCaptureOptions.captureMessageContent()) { + if (buffer.function.arguments == null) { + buffer.function.arguments = new StringBuilder(); + } + buffer.function.arguments.append(toolCall.arguments()); + } + } + } + } + + ChatGenerationMetadata metadata = generation.getMetadata(); + if (metadata != null + && metadata.getFinishReason() != null + && !metadata.getFinishReason().isEmpty()) { + this.finishReason = metadata.getFinishReason(); + } + } + + private static class FunctionBuffer { + @Nullable String name; + @Nullable StringBuilder arguments; + } + + private static class ToolCallBuffer { + final String id; + final FunctionBuffer function = new FunctionBuffer(); + @Nullable String type; + + ToolCallBuffer(String id) { + this.id = id; + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java new file mode 100644 index 000000000000..df203a5dd054 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientMessagesProvider.java @@ -0,0 +1,212 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.MessagePart; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.Role; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.TextPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallRequestPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallResponsePart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinition; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesProvider; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.messages.AssistantMessage; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.messages.Message; +import org.springframework.ai.chat.messages.MessageType; +import org.springframework.ai.chat.messages.ToolResponseMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; +import org.springframework.ai.chat.model.Generation; +import org.springframework.ai.model.tool.ToolCallingChatOptions; + +public class ChatClientMessagesProvider + implements GenAiMessagesProvider { + + private static final String TRUNCATE_FLAG = "...[truncated]"; + + private final MessageCaptureOptions messageCaptureOptions; + + ChatClientMessagesProvider(MessageCaptureOptions messageCaptureOptions) { + this.messageCaptureOptions = messageCaptureOptions; + } + + public static ChatClientMessagesProvider create(MessageCaptureOptions messageCaptureOptions) { + return new ChatClientMessagesProvider(messageCaptureOptions); + } + + @Nullable + @Override + public InputMessages inputMessages( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (!messageCaptureOptions.captureMessageContent() + || request.prompt().getInstructions() == null) { + return null; + } + + InputMessages inputMessages = InputMessages.create(); + for (Message msg : request.prompt().getInstructions()) { + if (msg.getMessageType() == MessageType.SYSTEM) { + inputMessages.append( + InputMessage.create(Role.SYSTEM, contentToMessageParts(msg.getText()))); + } else if (msg.getMessageType() == MessageType.USER) { + inputMessages.append(InputMessage.create(Role.USER, contentToMessageParts(msg.getText()))); + } else if (msg.getMessageType() == MessageType.ASSISTANT) { + AssistantMessage assistantMessage = (AssistantMessage) msg; + List messageParts = new ArrayList<>(); + + if (assistantMessage.getText() != null && !assistantMessage.getText().isEmpty()) { + messageParts.addAll(contentToMessageParts(assistantMessage.getText())); + } + + if (assistantMessage.hasToolCalls()) { + messageParts.addAll( + assistantMessage.getToolCalls().stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); + } + inputMessages.append(InputMessage.create(Role.ASSISTANT, messageParts)); + } else if (msg.getMessageType() == MessageType.TOOL) { + ToolResponseMessage toolResponseMessage = (ToolResponseMessage) msg; + inputMessages.append( + InputMessage.create( + Role.TOOL, contentToMessageParts(toolResponseMessage.getResponses()))); + } + } + return inputMessages; + } + + @Nullable + @Override + public OutputMessages outputMessages( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (!messageCaptureOptions.captureMessageContent() + || response == null + || response.chatResponse() == null + || response.chatResponse().getResults() == null) { + return null; + } + + OutputMessages outputMessages = OutputMessages.create(); + for (Generation generation : response.chatResponse().getResults()) { + AssistantMessage message = generation.getOutput(); + List messageParts = new ArrayList<>(); + if (message != null) { + if (message.getText() != null && !message.getText().isEmpty()) { + messageParts.addAll(contentToMessageParts(message.getText())); + } + + if (message.hasToolCalls()) { + messageParts.addAll( + message.getToolCalls().stream() + .map(this::toolCallToMessagePart) + .collect(Collectors.toList())); + } + } + + outputMessages.append( + OutputMessage.create( + Role.ASSISTANT, + messageParts, + generation.getMetadata().getFinishReason().toLowerCase())); + } + return outputMessages; + } + + @Nullable + @Override + public SystemInstructions systemInstructions( + ChatClientRequest request, @Nullable ChatClientResponse response) { + return null; + } + + @Nullable + @Override + public ToolDefinitions toolDefinitions( + ChatClientRequest request, @Nullable ChatClientResponse response) { + if (request.prompt().getOptions() == null + || !(request.prompt().getOptions() instanceof ToolCallingChatOptions options)) { + return null; + } + + ToolDefinitions toolDefinitions = ToolDefinitions.create(); + + // See: org.springframework.ai.model.tool.DefaultToolCallingManager.resolveToolDefinitions + options.getToolCallbacks().stream() + .map( + toolCallback -> { + String name = toolCallback.getToolDefinition().name(); + String type = "function"; + if (messageCaptureOptions.captureMessageContent()) { + return ToolDefinition.create( + type, name, toolCallback.getToolDefinition().description(), null); + } else { + return ToolDefinition.create(type, name, null, null); + } + }) + .filter(Objects::nonNull) + .forEach(toolDefinitions::append); + + for (String toolName : options.getToolNames()) { + // Skip the tool if it is already present in the request toolCallbacks. + // That might happen if a tool is defined in the options + // both as a ToolCallback and as a tool name. + if (options.getToolCallbacks().stream() + .anyMatch(tool -> tool.getToolDefinition().name().equals(toolName))) { + continue; + } + toolDefinitions.append(ToolDefinition.create("function", toolName, null, null)); + } + + return toolDefinitions; + } + + private List contentToMessageParts(String content) { + return Collections.singletonList(TextPart.create(truncateTextContent(content))); + } + + private MessagePart toolCallToMessagePart(ToolCall call) { + if (call != null) { + return ToolCallRequestPart.create(call.id(), call.name(), call.arguments()); + } + return ToolCallRequestPart.create("unknown_function"); + } + + private List contentToMessageParts(List toolResponses) { + if (toolResponses == null) { + return Collections.singletonList(ToolCallResponsePart.create("")); + } + + return toolResponses.stream() + .map( + response -> + ToolCallResponsePart.create( + response.id(), truncateTextContent(response.responseData()))) + .collect(Collectors.toList()); + } + + private String truncateTextContent(String content) { + if (!content.endsWith(TRUNCATE_FLAG) + && content.length() > messageCaptureOptions.maxMessageContentLength()) { + content = + content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; + } + return content; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java new file mode 100644 index 000000000000..10e81fab9011 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamListener.java @@ -0,0 +1,141 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import org.springframework.ai.chat.metadata.ChatResponseMetadata; +import org.springframework.ai.chat.metadata.DefaultUsage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.model.Generation; + +public final class ChatClientStreamListener { + + private final Context context; + private final ChatClientRequest request; + private final Instrumenter instrumenter; + private final MessageCaptureOptions messageCaptureOptions; + private final boolean newSpan; + private final AtomicBoolean hasEnded; + private final List chatClientMessageBuffers; + + // Aggregated metadata + private final AtomicLong inputTokens = new AtomicLong(0); + private final AtomicLong outputTokens = new AtomicLong(0); + private final AtomicReference requestId = new AtomicReference<>(); + private final AtomicReference model = new AtomicReference<>(); + + public ChatClientStreamListener( + Context context, + ChatClientRequest request, + Instrumenter instrumenter, + MessageCaptureOptions messageCaptureOptions, + boolean newSpan) { + this.context = context; + this.request = request; + this.instrumenter = instrumenter; + this.messageCaptureOptions = messageCaptureOptions; + this.newSpan = newSpan; + this.hasEnded = new AtomicBoolean(); + this.chatClientMessageBuffers = new ArrayList<>(); + } + + public void onChunk(ChatClientResponse chatClientChunk) { + if (chatClientChunk == null || chatClientChunk.chatResponse() == null) { + return; + } + + ChatResponse chunk = chatClientChunk.chatResponse(); + if (chunk.getMetadata() != null) { + if (chunk.getMetadata().getId() != null) { + requestId.set(chunk.getMetadata().getId()); + } + if (chunk.getMetadata().getUsage() != null) { + if (chunk.getMetadata().getUsage().getPromptTokens() != null) { + inputTokens.set(chunk.getMetadata().getUsage().getPromptTokens().longValue()); + } + if (chunk.getMetadata().getUsage().getCompletionTokens() != null) { + outputTokens.set(chunk.getMetadata().getUsage().getCompletionTokens().longValue()); + } + } + } + + if (chunk.getResults() != null) { + List generations = chunk.getResults(); + for (int i = 0; i < generations.size(); i++) { + while (chatClientMessageBuffers.size() <= i) { + chatClientMessageBuffers.add(null); + } + ChatClientMessageBuffer buffer = chatClientMessageBuffers.get(i); + if (buffer == null) { + buffer = new ChatClientMessageBuffer(i, messageCaptureOptions); + chatClientMessageBuffers.set(i, buffer); + } + + buffer.append(generations.get(i)); + } + } + } + + public void endSpan(@Nullable Throwable error) { + // Use an atomic operation since close() type of methods are exposed to the user + // and can come from any thread. + if (!this.hasEnded.compareAndSet(false, true)) { + return; + } + + if (this.chatClientMessageBuffers.isEmpty()) { + // Only happens if we got no chunks, so we have no response. + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, null, error); + } + return; + } + + Integer inputTokens = null; + if (this.inputTokens.get() > 0) { + inputTokens = (int) this.inputTokens.get(); + } + + Integer outputTokens = null; + if (this.outputTokens.get() > 0) { + outputTokens = (int) this.outputTokens.get(); + } + + List generations = + this.chatClientMessageBuffers.stream() + .map(ChatClientMessageBuffer::toGeneration) + .collect(Collectors.toList()); + + ChatClientResponse response = + ChatClientResponse.builder() + .chatResponse( + ChatResponse.builder() + .generations(generations) + .metadata( + ChatResponseMetadata.builder() + .usage(new DefaultUsage(inputTokens, outputTokens)) + .id(requestId.get()) + .model(model.get()) + .build()) + .build()) + .build(); + + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, response, error); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java new file mode 100644 index 000000000000..8aff1300f28f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/ChatClientStreamWrapper.java @@ -0,0 +1,29 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; +import org.springframework.ai.chat.client.ChatClientResponse; +import reactor.core.publisher.Flux; + +public final class ChatClientStreamWrapper { + + public static Flux wrap( + Flux originFlux, + ChatClientStreamListener streamListener, + Context context) { + + Flux chatClientResponseFlux = + originFlux + .doOnNext(chunk -> streamListener.onChunk(chunk)) + .doOnComplete(() -> streamListener.endSpan(null)) + .doOnError(streamListener::endSpan); + return ContextPropagationOperator.runWithContext(chatClientResponseFlux, context); + } + + private ChatClientStreamWrapper() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java new file mode 100644 index 000000000000..02a2c8db6796 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultCallResponseSpecInstrumentation.java @@ -0,0 +1,83 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.isPrivate; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; + +@AutoService(TypeInstrumentation.class) +public class DefaultCallResponseSpecInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed( + "org.springframework.ai.chat.client.DefaultChatClient$DefaultCallResponseSpec"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.chat.client.DefaultChatClient$DefaultCallResponseSpec"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod() + .and(named("doGetObservableChatClientResponse")) + .and(takesArguments(2)) + .and(isPrivate()) + .and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), + this.getClass().getName() + "$DoGetObservableChatClientResponseAdvice"); + } + + @SuppressWarnings("unused") + public static class DoGetObservableChatClientResponseAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void doGetObservableChatClientResponseEnter( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + context = Context.current(); + + if (TELEMETRY.chatClientInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.chatClientInstrumenter().start(context, request); + } + scope = context.makeCurrent(); + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void doGetObservableChatClientResponseExit( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Return ChatClientResponse response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + if (scope == null) { + return; + } + scope.close(); + + TELEMETRY.chatClientInstrumenter().end(context, request, response, throwable); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java new file mode 100644 index 000000000000..2624b2c71bca --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/chat/client/DefaultStreamResponseSpecInstrumentation.java @@ -0,0 +1,94 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.chat.client; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.isPrivate; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.client.ChatClientRequest; +import org.springframework.ai.chat.client.ChatClientResponse; +import reactor.core.publisher.Flux; + +@AutoService(TypeInstrumentation.class) +public class DefaultStreamResponseSpecInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed( + "org.springframework.ai.chat.client.DefaultChatClient$DefaultStreamResponseSpec"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.chat.client.DefaultChatClient$DefaultStreamResponseSpec"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod() + .and(named("doGetObservableFluxChatResponse")) + .and(takesArguments(1)) + .and(isPrivate()) + .and(takesArgument(0, named("org.springframework.ai.chat.client.ChatClientRequest"))), + this.getClass().getName() + "$DoGetObservableFluxChatResponseAdvice"); + } + + @SuppressWarnings("unused") + public static class DoGetObservableFluxChatResponseAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void doGetObservableFluxChatResponseEnter( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatClientStreamListener streamListener) { + context = Context.current(); + + if (TELEMETRY.chatClientInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.chatClientInstrumenter().start(context, request); + streamListener = + new ChatClientStreamListener( + context, + request, + TELEMETRY.chatClientInstrumenter(), + TELEMETRY.messageCaptureOptions(), + true); + } + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void doGetObservableFluxChatResponseExit( + @Advice.Argument(0) ChatClientRequest request, + @Advice.Return(readOnly = false) Flux response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatClientStreamListener streamListener) { + + if (throwable != null) { + // In case of exception, directly call end + TELEMETRY.chatClientInstrumenter().end(context, request, null, throwable); + return; + } + + if (streamListener != null) { + // Wrap the response to integrate the stream listener + response = ChatClientStreamWrapper.wrap(response, streamListener, context); + } + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java new file mode 100644 index 000000000000..df6a310a6328 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/DefaultToolCallingManagerInstrumentation.java @@ -0,0 +1,85 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import java.util.HashMap; +import java.util.Map; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.messages.AssistantMessage; + +@AutoService(TypeInstrumentation.class) +public class DefaultToolCallingManagerInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.model.tool.DefaultToolCallingManager"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.model.tool.DefaultToolCallingManager"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod() + .and(named("executeToolCall")) + .and(takesArguments(3)) + .and(takesArgument(1, named("org.springframework.ai.chat.messages.AssistantMessage"))), + this.getClass().getName() + "$ExecuteToolCallAdvice"); + } + + @SuppressWarnings("unused") + public static class ExecuteToolCallAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void executeToolCallEnter( + @Advice.Argument(1) AssistantMessage assistantMessage, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + + context = Context.current(); + + if (assistantMessage != null && assistantMessage.getToolCalls() != null) { + Map toolNameToIdMap = new HashMap<>(); + + for (AssistantMessage.ToolCall toolCall : assistantMessage.getToolCalls()) { + if (toolCall.id() != null && toolCall.name() != null) { + toolNameToIdMap.put(toolCall.name(), toolCall.id()); + } + } + + // store tool call ids map to context + if (!toolNameToIdMap.isEmpty()) { + context = ToolCallContext.storeToolCalls(context, toolNameToIdMap); + } + scope = context.makeCurrent(); + } + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void executeToolCallExit(@Advice.Local("otelScope") Scope scope) { + if (scope == null) { + return; + } + scope.close(); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java new file mode 100644 index 000000000000..b9f0d2e49dac --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallAttributesGetter.java @@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.tool.GenAiToolAttributesGetter; +import javax.annotation.Nullable; + +public enum ToolCallAttributesGetter implements GenAiToolAttributesGetter { + INSTANCE; + + @Override + public String getOperationName(ToolCallRequest request) { + return request.getOperationName(); + } + + @Override + public String getOperationTarget(ToolCallRequest request) { + return getToolName(request); + } + + @Override + public String getToolDescription(ToolCallRequest request) { + return request.getDescription(); + } + + @Override + public String getToolName(ToolCallRequest request) { + return request.getName(); + } + + @Override + public String getToolType(ToolCallRequest request) { + return "function"; + } + + @Nullable + @Override + public String getToolCallArguments(ToolCallRequest request) { + return request.getToolInput(); + } + + @Nullable + @Override + public String getToolCallId(ToolCallRequest request, String response) { + return request.getToolCallId(); + } + + @Nullable + @Override + public String getToolCallResult(ToolCallRequest request, String response) { + return response; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java new file mode 100644 index 000000000000..f9e04c52a316 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallContext.java @@ -0,0 +1,39 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; +import java.util.Map; + +/** Tool call context to store tool call ids map */ +public final class ToolCallContext { + + private static final ContextKey> TOOL_CALL_IDS_KEY = + ContextKey.named("spring-ai-tool-call-ids"); + + private ToolCallContext() {} + + public static Context storeToolCalls(Context context, Map toolNameToIdMap) { + if (toolNameToIdMap == null || toolNameToIdMap.isEmpty()) { + return context; + } + return context.with(TOOL_CALL_IDS_KEY, toolNameToIdMap); + } + + public static String getToolCallId(Context context, String toolName) { + if (context == null || toolName == null) { + return null; + } + + Map toolCallIds = context.get(TOOL_CALL_IDS_KEY); + if (toolCallIds == null) { + return null; + } + + return toolCallIds.get(toolName); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java new file mode 100644 index 000000000000..38b0d3e34e91 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallRequest.java @@ -0,0 +1,57 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import org.springframework.ai.tool.definition.ToolDefinition; + +public final class ToolCallRequest { + + private final String toolInput; + private final String toolCallId; + private final ToolDefinition toolDefinition; + + private ToolCallRequest(String toolInput, String toolCallId, ToolDefinition toolDefinition) { + this.toolInput = toolInput; + this.toolCallId = toolCallId; + this.toolDefinition = toolDefinition; + } + + public static ToolCallRequest create( + String toolInput, String toolCallId, ToolDefinition toolDefinition) { + return new ToolCallRequest(toolInput, toolCallId, toolDefinition); + } + + public String getOperationName() { + return "execute_tool"; + } + + public String getType() { + // spring ai support function only + return "function"; + } + + public String getName() { + if (toolDefinition == null) { + return null; + } + return toolDefinition.name(); + } + + public String getDescription() { + if (toolDefinition == null) { + return null; + } + return toolDefinition.description(); + } + + public String getToolInput() { + return toolInput; + } + + public String getToolCallId() { + return toolCallId; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java new file mode 100644 index 000000000000..f6b9cf04e0da --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/tool/ToolCallbackInstrumentation.java @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.tool; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.implementsInterface; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0.SpringAiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.tool.ToolCallback; + +@AutoService(TypeInstrumentation.class) +public class ToolCallbackInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.tool.ToolCallback"); + } + + @Override + public ElementMatcher typeMatcher() { + return implementsInterface(named("org.springframework.ai.tool.ToolCallback")); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod() + .and(named("call")) + .and(takesArguments(2)) + .and(takesArgument(0, named("java.lang.String"))) + .and(returns(named("java.lang.String"))), + this.getClass().getName() + "$CallAdvice"); + } + + @SuppressWarnings("unused") + public static class CallAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void callEnter( + @Advice.This ToolCallback toolCallback, + @Advice.Argument(0) String toolInput, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope, + @Advice.Local("toolCallRequest") ToolCallRequest request) { + context = Context.current(); + + // get tool call id from context + String toolCallId = + ToolCallContext.getToolCallId(context, toolCallback.getToolDefinition().name()); + request = ToolCallRequest.create(toolInput, toolCallId, toolCallback.getToolDefinition()); + + if (TELEMETRY.toolCallInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.toolCallInstrumenter().start(context, request); + } + scope = context.makeCurrent(); + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void callExit( + @Advice.Return String result, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope, + @Advice.Local("toolCallRequest") ToolCallRequest request) { + if (scope == null) { + return; + } + scope.close(); + TELEMETRY.toolCallInstrumenter().end(context, request, result, throwable); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java new file mode 100644 index 000000000000..670bb2f7d717 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/ChatClientTest.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.v1_0; + +import io.opentelemetry.instrumentation.spring.ai.v1_0.AbstractChatClientTest; +import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import org.junit.jupiter.api.extension.RegisterExtension; + +public class ChatClientTest extends AbstractChatClientTest { + + @RegisterExtension + private static final AgentInstrumentationExtension testing = + AgentInstrumentationExtension.create(); + + @Override + protected InstrumentationExtension getTesting() { + return testing; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/metadata.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/metadata.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts new file mode 100644 index 000000000000..3921db5aa237 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/build.gradle.kts @@ -0,0 +1,15 @@ +plugins { + id("otel.java-conventions") +} + +otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +dependencies { + implementation(project(":testing-common")) + + api("org.springframework.ai:spring-ai-openai:1.0.0") + api("org.springframework.ai:spring-ai-client-chat:1.0.0") + api(project(":instrumentation-api-incubator")) +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java new file mode 100644 index 000000000000..15b3a92546d7 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractChatClient.java @@ -0,0 +1,432 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.spring.ai.v1_0; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.AgentIncubatingAttributes.GEN_AI_AGENT_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_TOOL_DEFINITIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_INPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.EXECUTE_TOOL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.INVOKE_AGENT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_DESCRIPTION; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiToolIncubatingAttributes.GEN_AI_TOOL_TYPE; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies; +import static java.util.Arrays.asList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.opentelemetry.sdk.trace.data.StatusData; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; +import org.springframework.ai.chat.client.ChatClient; +import org.springframework.ai.chat.messages.SystemMessage; +import org.springframework.ai.chat.messages.UserMessage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.prompt.ChatOptions; +import org.springframework.ai.chat.prompt.Prompt; +import org.springframework.ai.openai.OpenAiChatOptions; + +public abstract class AbstractChatClientTest extends AbstractSpringAiTest { + + protected static final String TEST_CHAT_MODEL = "qwen3-coder-flash"; + protected static final String TEST_CHAT_INPUT = + "Answer in up to 3 words: Which ocean contains Bouvet Island?"; + protected static final String TEST_AGENT_NAME = "spring_ai chat_client"; + protected static final String TEST_TOOL_NAME = "get_weather"; + + @Test + void basic() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + ChatClient chatClient = getChatClient(); + + ChatResponse response = chatClient.prompt(prompt).call().chatResponse(); + String content = "Southern Ocean"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), + span -> + span.hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_SPAN_KIND, "LLM")))); + } + + @Test + void stream() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + ChatClient chatClient = getChatClient(); + + List chunks = + chatClient.prompt(prompt).stream().chatResponse().toStream().collect(Collectors.toList()); + + String fullMessage = + chunks.stream() + .map( + cc -> { + if (cc.getResults().isEmpty()) { + return Optional.empty(); + } + return Optional.of(cc.getResults().get(0).getOutput().getText()); + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.joining()); + + String content = "South Atlantic"; + assertEquals(fullMessage, content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("South Atlantic")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))), + span -> + span.hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_SPAN_KIND, "LLM")))); + } + + @Test + void with400Error() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model("gpt-4o").build()) + .build(); + ChatClient chatClient = getChatClient(); + + Throwable thrown = catchThrowable(() -> chatClient.prompt(prompt).call().chatResponse()); + assertThat(thrown).isInstanceOf(Exception.class); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasStatus(StatusData.error()) + .hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, "gpt-4o"), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); + } + + @Test + void toolCalls() { + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(getToolCallbacks()) + .build()) + .build(); + + ChatClient chatClient = getChatClient(); + + ChatResponse response = chatClient.prompt(prompt).call().chatResponse(); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 739L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 76L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 815L), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> + messages.contains( + "The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> + messages.contains( + "The location to get the current temperature for"))), + span -> + span.hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 331L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 45L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 376L)), + // 2 spans are compressed into 1 span + span -> + span.hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, EXECUTE_TOOL), + equalTo(GEN_AI_SPAN_KIND, "TOOL"), + equalTo( + GEN_AI_TOOL_DESCRIPTION, + "The location to get the current temperature for"), + equalTo(GEN_AI_TOOL_TYPE, "function"), + equalTo(GEN_AI_TOOL_NAME, TEST_TOOL_NAME)), + span -> + span.hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 408L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 31L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 439L)))); + } + + @Test + void streamToolCalls() { + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(getToolCallbacks()) + .build()) + .build(); + + ChatClient chatClient = getChatClient(); + + List chunks = + chatClient.prompt(prompt).stream().chatResponse().toStream().collect(Collectors.toList()); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasName(INVOKE_AGENT + " " + TEST_AGENT_NAME) + .hasAttributesSatisfying( + equalTo(GEN_AI_AGENT_NAME, TEST_AGENT_NAME), + equalTo(GEN_AI_PROVIDER_NAME, "spring-ai"), + equalTo(GEN_AI_OPERATION_NAME, INVOKE_AGENT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "AGENT"), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> + messages.contains( + "The current weather is as follows:\\n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> + messages.contains( + "The location to get the current temperature for"))), + span -> + span.hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM")), + // 2 spans are compressed into 1 span + span -> + span.hasName(EXECUTE_TOOL + " " + TEST_TOOL_NAME) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, EXECUTE_TOOL), + equalTo(GEN_AI_SPAN_KIND, "TOOL"), + equalTo( + GEN_AI_TOOL_DESCRIPTION, + "The location to get the current temperature for"), + equalTo(GEN_AI_TOOL_TYPE, "function"), + equalTo(GEN_AI_TOOL_NAME, TEST_TOOL_NAME)), + span -> + span.hasName(CHAT + " " + TEST_CHAT_MODEL) + .hasParent(trace.getSpan(0)) + .hasAttributesSatisfying( + equalTo(GEN_AI_OPERATION_NAME, CHAT), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM")))); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java new file mode 100644 index 000000000000..d75df9e89b1e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/v1_0/AbstractSpringAiTest.java @@ -0,0 +1,125 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.spring.ai.v1_0; + +import static java.util.Collections.singletonList; + +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import io.opentelemetry.instrumentation.testing.recording.RecordingExtension; +import java.net.http.HttpClient; +import java.net.http.HttpClient.Version; +import java.util.List; +import java.util.function.Function; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.springframework.ai.chat.client.ChatClient; +import org.springframework.ai.model.tool.ToolCallingManager; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.api.OpenAiApi; +import org.springframework.ai.tool.ToolCallback; +import org.springframework.ai.tool.function.FunctionToolCallback; +import org.springframework.ai.tool.resolution.StaticToolCallbackResolver; +import org.springframework.http.client.JdkClientHttpRequestFactory; +import org.springframework.http.client.reactive.JdkClientHttpConnector; +import org.springframework.web.client.RestClient; +import org.springframework.web.reactive.function.client.WebClient; + +public abstract class AbstractSpringAiTest { + + protected static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-1.0"; + + private static final String API_URL = "https://dashscope.aliyuncs.com/compatible-mode"; + + @RegisterExtension static final RecordingExtension recording = new RecordingExtension(API_URL); + + protected abstract InstrumentationExtension getTesting(); + + private OpenAiApi openAiApi; + + private OpenAiChatModel chatModel; + + protected final OpenAiApi getOpenAiApi() { + if (openAiApi == null) { + HttpClient httpClient = HttpClient.newBuilder().version(Version.HTTP_1_1).build(); + + OpenAiApi.Builder builder = + OpenAiApi.builder() + .restClientBuilder( + RestClient.builder().requestFactory(new JdkClientHttpRequestFactory(httpClient))) + .webClientBuilder( + WebClient.builder().clientConnector(new JdkClientHttpConnector(httpClient))) + .baseUrl("http://localhost:" + recording.getPort()); + if (recording.isRecording()) { + builder.apiKey(System.getenv("OPENAI_API_KEY")); + } else { + builder.apiKey("unused"); + } + openAiApi = builder.build(); + } + return openAiApi; + } + + protected final ToolCallingManager getToolCallingManager() { + return ToolCallingManager.builder() + .toolCallbackResolver(new StaticToolCallbackResolver(getToolCallbacks())) + .build(); + } + + protected final OpenAiChatModel getChatModel() { + if (chatModel == null) { + chatModel = + OpenAiChatModel.builder() + .openAiApi(getOpenAiApi()) + .toolCallingManager(getToolCallingManager()) + .build(); + } + return chatModel; + } + + protected final ChatClient getChatClient() { + return ChatClient.builder(getChatModel()).build(); + } + + protected final List getToolCallbacks() { + return singletonList( + FunctionToolCallback.builder("get_weather", new GetWeatherFunction()) + .description("The location to get the current temperature for") + .inputType(ToolInput.class) + .build()); + } + + @JsonClassDescription("The location to get the current temperature for") + public static class ToolInput { + @JsonPropertyDescription("location") + private String location; + + public String getLocation() { + return location; + } + + public void setLocation(String location) { + this.location = location; + } + + @JsonCreator + public ToolInput(@JsonProperty("location") String location) { + this.location = location; + } + } + + private static class GetWeatherFunction implements Function { + @Override + public String apply(ToolInput location) { + if (location.getLocation().contains("London")) { + return "15 degrees and raining"; + } + return "25 degrees and sunny"; + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml new file mode 100644 index 000000000000..7326c6de9b3e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.basic.yaml @@ -0,0 +1,41 @@ +--- +id: 8fafdbfc-2cc0-4198-85a0-3cdbe8eebaf4 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"Southern Ocean\",\"role\":\"assistant\"\ + },\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"object\":\"chat.completion\"\ + ,\"usage\":{\"prompt_tokens\":23,\"completion_tokens\":2,\"total_tokens\":25},\"\ + created\":1758182305,\"system_fingerprint\":null,\"model\":\"qwen3-coder-flash\"\ + ,\"id\":\"chatcmpl-443cc847-7f2c-486f-ba86-6383748b8842\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 443cc847-7f2c-486f-ba86-6383748b8842 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "261" + req-arrive-time: "1758182305225" + resp-start-time: "1758182305486" + x-envoy-upstream-service-time: "260" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:25 GMT" + server: istio-envoy +uuid: 8fafdbfc-2cc0-4198-85a0-3cdbe8eebaf4 +persistent: true +insertionIndex: 32 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml new file mode 100644 index 000000000000..ef07d4b31ec7 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.stream.yaml @@ -0,0 +1,47 @@ +--- +id: 73a6455e-643f-4321-b633-1a1b98e70f42 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":0,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"South"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: {"choices":[{"delta":{"content":" Atlantic"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: {"choices":[{"finish_reason":"stop","delta":{"content":""},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182302,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-a9fd1b06-0202-4e61-8105-ba5e150d6718"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: a9fd1b06-0202-4e61-8105-ba5e150d6718 + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "243" + req-arrive-time: "1758182301783" + resp-start-time: "1758182302027" + x-envoy-upstream-service-time: "242" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:21 GMT" + server: istio-envoy +uuid: 73a6455e-643f-4321-b633-1a1b98e70f42 +persistent: true +insertionIndex: 8 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml new file mode 100644 index 000000000000..56c1af245723 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.streamtoolcalls.yaml @@ -0,0 +1,198 @@ +--- +id: 404f68b7-6b67-4297-851b-e79c6e4962da +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"call_5ae3e6e00f414bc08b14c713","type":"function","function":{"name":"get_weather","arguments":""}}],"role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"New York City"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"\""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"call_769dac36ee3a449984c540e7","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"London"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"\"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"delta":{"tool_calls":[{"function":{"arguments":""},"index":1,"id":"","type":"function"}]},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: {"choices":[{"finish_reason":"tool_calls","delta":{},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182300,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-155ae8e9-0fbb-4a0e-997d-96e4c8e79c46"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: 155ae8e9-0fbb-4a0e-997d-96e4c8e79c46 + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "266" + req-arrive-time: "1758182299922" + resp-start-time: "1758182300189" + x-envoy-upstream-service-time: "265" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:20 GMT" + server: istio-envoy +uuid: 404f68b7-6b67-4297-851b-e79c6e4962da +persistent: true +insertionIndex: 2 +--- +id: 5fc59074-53b1-46dc-a21e-76e5545e985d +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + }, { + "content" : "", + "role" : "assistant", + "tool_calls" : [ { + "id" : "call_5ae3e6e00f414bc08b14c713", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"New York City\"}" + } + }, { + "id" : "call_769dac36ee3a449984c540e7", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"London\"}" + } + } ] + }, { + "content" : "\"25 degrees and sunny\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_5ae3e6e00f414bc08b14c713" + }, { + "content" : "\"15 degrees and raining\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_769dac36ee3a449984c540e7" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"The","role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" current"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" weather"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" is"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" as follows:\n-"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" **New York City"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":"**: 25"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" degrees and sunny.\n"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":"- **London**:"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" 15 degrees"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"delta":{"content":" and raining."},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: {"choices":[{"finish_reason":"stop","delta":{"content":""},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758182301,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-cf97005f-1370-480f-8702-edd4f33f3dcf"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: cf97005f-1370-480f-8702-edd4f33f3dcf + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "215" + req-arrive-time: "1758182300832" + resp-start-time: "1758182301048" + x-envoy-upstream-service-time: "215" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:21 GMT" + server: istio-envoy +uuid: 5fc59074-53b1-46dc-a21e-76e5545e985d +persistent: true +insertionIndex: 3 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml new file mode 100644 index 000000000000..f51a7b4d3b8e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.toolcalls.yaml @@ -0,0 +1,163 @@ +--- +id: a4b86c3d-75a1-40bd-ac90-93a2d1d062a8 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"\",\"role\":\"assistant\",\"tool_calls\"\ + :[{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"New York City\\\"}\",\"\ + name\":\"get_weather\"},\"id\":\"call_f964ea7704d446a8867e951a\",\"index\":0,\"\ + type\":\"function\"},{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"London\\\ + \"}\",\"name\":\"get_weather\"},\"id\":\"call_b308af719e54417396b302e9\",\"index\"\ + :1,\"type\":\"function\"}]},\"finish_reason\":\"tool_calls\",\"index\":0,\"logprobs\"\ + :null}],\"object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":331,\"completion_tokens\"\ + :45,\"total_tokens\":376},\"created\":1758182304,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-44c186a0-8a78-4a38-8d82-4acd82eb6a54\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 44c186a0-8a78-4a38-8d82-4acd82eb6a54 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "609" + req-arrive-time: "1758182303354" + resp-start-time: "1758182303963" + x-envoy-upstream-service-time: "608" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:23 GMT" + server: istio-envoy +uuid: a4b86c3d-75a1-40bd-ac90-93a2d1d062a8 +persistent: true +insertionIndex: 14 +--- +id: 55b829f9-d19a-431a-b61f-172e0db88979 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + }, { + "content" : "", + "role" : "assistant", + "tool_calls" : [ { + "id" : "call_f964ea7704d446a8867e951a", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"New York City\"}" + } + }, { + "id" : "call_b308af719e54417396b302e9", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"London\"}" + } + } ] + }, { + "content" : "\"25 degrees and sunny\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_f964ea7704d446a8867e951a" + }, { + "content" : "\"15 degrees and raining\"", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_b308af719e54417396b302e9" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "description" : "The location to get the current temperature for", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string", + "description" : "location" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"The current weather is as follows:\\\ + n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and raining.\"\ + ,\"role\":\"assistant\"},\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"\ + object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":408,\"completion_tokens\"\ + :31,\"total_tokens\":439},\"created\":1758182305,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-fcaf6b84-ec12-420a-bcd3-ffbe1411d7bd\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: fcaf6b84-ec12-420a-bcd3-ffbe1411d7bd + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "596" + req-arrive-time: "1758182304162" + resp-start-time: "1758182304759" + x-envoy-upstream-service-time: "595" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 07:58:24 GMT" + server: istio-envoy +uuid: 55b829f9-d19a-431a-b61f-172e0db88979 +persistent: true +insertionIndex: 15 diff --git a/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml new file mode 100644 index 000000000000..1f8598d64aaa --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.v1_0.abstractchatclienttest.with400error.yaml @@ -0,0 +1,38 @@ +--- +id: c54797f6-72e9-482e-ac4c-c46624a7e78c +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "gpt-4o", + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 404 + body: "{\"error\":{\"message\":\"The model `gpt-4o` does not exist or you do not\ + \ have access to it.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\"\ + :\"model_not_found\"},\"request_id\":\"6739e7e5-b528-4989-ac08-e9e293c378f3\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 6739e7e5-b528-4989-ac08-e9e293c378f3 + content-type: application/json + req-cost-time: "9" + req-arrive-time: "1758186777371" + resp-start-time: "1758186777380" + x-envoy-upstream-service-time: "8" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 09:12:57 GMT" + server: istio-envoy +uuid: c54797f6-72e9-482e-ac4c-c46624a7e78c +persistent: true +insertionIndex: 14 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts new file mode 100644 index 000000000000..6ea07daf793f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/build.gradle.kts @@ -0,0 +1,69 @@ +plugins { + id("otel.javaagent-instrumentation") +} + +otelJava { + // Spring AI OpenAI requires java 17 (same as Spring AI) + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +muzzle { + pass { + group.set("org.springframework.ai") + module.set("spring-ai-openai") + versions.set("(,)") + } +} + +repositories { + mavenLocal() + maven { + url = uri("https://repo.spring.io/milestone") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + } + maven { + url = uri("https://repo.spring.io/snapshot") + content { + includeGroup("org.springframework.ai") + includeGroup("org.springframework.boot") + includeGroup("org.springframework") + } + mavenContent { + snapshotsOnly() + } + } + mavenCentral() +} + +dependencies { + library("io.projectreactor:reactor-core:3.7.0") + library("org.springframework.ai:spring-ai-openai:1.0.0") + library("org.springframework.ai:spring-ai-model:1.0.0") + + implementation(project(":instrumentation:reactor:reactor-3.1:library")) + + bootstrap(project(":instrumentation:reactor:reactor-3.1:bootstrap")) + + testImplementation(project(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:testing")) +} + +tasks { + withType().configureEach { + val latestDepTest = findProperty("testLatestDeps") as Boolean + systemProperty("testLatestDeps", latestDepTest) + // spring ai requires java 17 + if (latestDepTest) { + otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) + } + } + + // TODO run tests both with and without genai message capture + systemProperty("otel.instrumentation.genai.capture-message-content", "true") + systemProperty("collectMetadata", findProperty("collectMetadata")?.toString() ?: "false") + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java new file mode 100644 index 000000000000..9306a3a10135 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelAttributesGetter.java @@ -0,0 +1,172 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static java.util.Collections.emptyList; + +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesGetter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; + +enum ChatModelAttributesGetter + implements GenAiAttributesGetter { + INSTANCE; + + @Override + public String getOperationName(ChatCompletionRequest request) { + return GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; + } + + @Override + public String getSystem(ChatCompletionRequest request) { + return GenAiIncubatingAttributes.GenAiProviderNameIncubatingValues.OPENAI; + } + + @Nullable + @Override + public String getRequestModel(ChatCompletionRequest request) { + return request.model(); + } + + @Nullable + @Override + public String getOperationTarget(ChatCompletionRequest request) { + return getRequestModel(request); + } + + @Nullable + @Override + public Long getRequestSeed(ChatCompletionRequest request) { + if (request.seed() == null) { + return null; + } + return Long.valueOf(request.seed()); + } + + @Nullable + @Override + public List getRequestEncodingFormats(ChatCompletionRequest request) { + return null; + } + + @Nullable + @Override + public Double getRequestFrequencyPenalty(ChatCompletionRequest request) { + return request.frequencyPenalty(); + } + + @Nullable + @Override + public Long getRequestMaxTokens(ChatCompletionRequest request) { + if (request.maxTokens() == null && request.maxCompletionTokens() == null) { + return null; + } + // Use maxCompletionTokens if available, otherwise fall back to maxTokens + Integer maxTokens = + request.maxCompletionTokens() != null ? request.maxCompletionTokens() : request.maxTokens(); + return maxTokens != null ? Long.valueOf(maxTokens) : null; + } + + @Nullable + @Override + public Double getRequestPresencePenalty(ChatCompletionRequest request) { + return request.presencePenalty(); + } + + @Nullable + @Override + public List getRequestStopSequences(ChatCompletionRequest request) { + if (request.stop() == null) { + return null; + } + return request.stop(); + } + + @Nullable + @Override + public Double getRequestTemperature(ChatCompletionRequest request) { + return request.temperature(); + } + + @Nullable + @Override + public Double getRequestTopK(ChatCompletionRequest request) { + // OpenAI doesn't support top_k parameter + return null; + } + + @Nullable + @Override + public Double getRequestTopP(ChatCompletionRequest request) { + return request.topP(); + } + + @Nullable + @Override + public Long getChoiceCount(ChatCompletionRequest request) { + if (request.n() == null) { + return null; + } + return Long.valueOf(request.n()); + } + + @Override + public List getResponseFinishReasons( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null || response.choices() == null) { + return emptyList(); + } + return response.choices().stream() + .map( + choice -> + choice.finishReason() != null ? choice.finishReason().name().toLowerCase() : "") + .collect(Collectors.toList()); + } + + @Override + @Nullable + public String getResponseId(ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null) { + return null; + } + return response.id(); + } + + @Override + @Nullable + public String getResponseModel(ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null) { + return null; + } + return response.model(); + } + + @Override + @Nullable + public Long getUsageInputTokens( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null || response.usage() == null || response.usage().promptTokens() == null) { + return null; + } + return Long.valueOf(response.usage().promptTokens()); + } + + @Override + @Nullable + public Long getUsageOutputTokens( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (response == null + || response.usage() == null + || response.usage().completionTokens() == null) { + return null; + } + return Long.valueOf(response.usage().completionTokens()); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java new file mode 100644 index 000000000000..faea7519eab0 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessageBuffer.java @@ -0,0 +1,159 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionFinishReason; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ChatCompletionFunction; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.Role; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; + +final class ChatModelMessageBuffer { + private static final String TRUNCATE_FLAG = "...[truncated]"; + private final int index; + private final MessageCaptureOptions messageCaptureOptions; + + @Nullable private ChatCompletionFinishReason finishReason; + + @Nullable private StringBuilder rawContent; + + @Nullable private Role role; + + @Nullable private String name; + + @Nullable private String toolCallId; + + @Nullable private Map toolCalls; + + ChatModelMessageBuffer(int index, MessageCaptureOptions messageCaptureOptions) { + this.index = index; + this.messageCaptureOptions = messageCaptureOptions; + } + + Choice toChoice() { + List toolCalls = null; + if (this.toolCalls != null) { + toolCalls = new ArrayList<>(this.toolCalls.size()); + for (Map.Entry entry : this.toolCalls.entrySet()) { + if (entry.getValue() != null) { + String arguments = null; + if (entry.getValue().function.arguments != null) { + arguments = entry.getValue().function.arguments.toString(); + } + toolCalls.add( + new ToolCall( + entry.getValue().id, + entry.getValue().type, + new ChatCompletionFunction(entry.getValue().function.name, arguments))); + } + } + } + + String content = ""; + // Type of content is String for OpenAI + if (rawContent != null) { + content = rawContent.toString(); + } + + return new Choice( + finishReason, + index, + new ChatCompletionMessage(content, role, name, toolCallId, toolCalls, null, null, null), + null); + } + + void append(Choice choice) { + if (choice.message() != null) { + if (this.messageCaptureOptions.captureMessageContent()) { + // Type of content is String for OpenAI + if (choice.message().rawContent() instanceof String) { + if (this.rawContent == null) { + this.rawContent = new StringBuilder(); + } + + String deltaContent = (String) choice.message().rawContent(); + if (this.rawContent.length() < this.messageCaptureOptions.maxMessageContentLength()) { + if (this.rawContent.length() + deltaContent.length() + >= this.messageCaptureOptions.maxMessageContentLength()) { + deltaContent = + deltaContent.substring( + 0, + this.messageCaptureOptions.maxMessageContentLength() + - this.rawContent.length()); + this.rawContent.append(deltaContent).append(TRUNCATE_FLAG); + } else { + this.rawContent.append(deltaContent); + } + } + } + } + + if (choice.message().toolCalls() != null) { + if (this.toolCalls == null) { + this.toolCalls = new HashMap<>(); + } + + for (int i = 0; i < choice.message().toolCalls().size(); i++) { + ToolCall toolCall = choice.message().toolCalls().get(i); + ToolCallBuffer buffer = + this.toolCalls.computeIfAbsent(i, unused -> new ToolCallBuffer(toolCall.id())); + if (toolCall.type() != null) { + buffer.type = toolCall.type(); + } + + if (toolCall.function() != null) { + if (toolCall.function().name() != null) { + buffer.function.name = toolCall.function().name(); + } + if (this.messageCaptureOptions.captureMessageContent() + && toolCall.function().arguments() != null) { + if (buffer.function.arguments == null) { + buffer.function.arguments = new StringBuilder(); + } + buffer.function.arguments.append(toolCall.function().arguments()); + } + } + } + } + + if (choice.message().role() != null) { + this.role = choice.message().role(); + } + if (choice.message().name() != null) { + this.name = choice.message().name(); + } + if (choice.message().toolCallId() != null) { + this.toolCallId = choice.message().toolCallId(); + } + } + + if (choice.finishReason() != null) { + this.finishReason = choice.finishReason(); + } + } + + private static class FunctionBuffer { + @Nullable String name; + @Nullable StringBuilder arguments; + } + + private static class ToolCallBuffer { + final String id; + final FunctionBuffer function = new FunctionBuffer(); + @Nullable String type; + + ToolCallBuffer(String id) { + this.id = id; + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java new file mode 100644 index 000000000000..e1ee8734eb00 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelMessagesProvider.java @@ -0,0 +1,236 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.InputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.MessagePart; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessage; +import io.opentelemetry.instrumentation.api.genai.messages.OutputMessages; +import io.opentelemetry.instrumentation.api.genai.messages.Role; +import io.opentelemetry.instrumentation.api.genai.messages.SystemInstructions; +import io.opentelemetry.instrumentation.api.genai.messages.TextPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallRequestPart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolCallResponsePart; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinition; +import io.opentelemetry.instrumentation.api.genai.messages.ToolDefinitions; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesProvider; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionMessage.ToolCall; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; + +public final class ChatModelMessagesProvider + implements GenAiMessagesProvider { + + private static final String TRUNCATE_FLAG = "...[truncated]"; + + private final MessageCaptureOptions messageCaptureOptions; + + ChatModelMessagesProvider(MessageCaptureOptions messageCaptureOptions) { + this.messageCaptureOptions = messageCaptureOptions; + } + + public static ChatModelMessagesProvider create(MessageCaptureOptions messageCaptureOptions) { + return new ChatModelMessagesProvider(messageCaptureOptions); + } + + @Nullable + @Override + public InputMessages inputMessages( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (!messageCaptureOptions.captureMessageContent() || request.messages() == null) { + return null; + } + + InputMessages inputMessages = InputMessages.create(); + for (ChatCompletionMessage msg : request.messages()) { + + if (msg.role() == ChatCompletionMessage.Role.SYSTEM) { + inputMessages.append( + InputMessage.create(Role.SYSTEM, contentToMessageParts(msg.rawContent()))); + } else if (msg.role() == ChatCompletionMessage.Role.USER) { + inputMessages.append( + InputMessage.create(Role.USER, contentToMessageParts(msg.rawContent()))); + } else if (msg.role() == ChatCompletionMessage.Role.ASSISTANT) { + List messageParts = new ArrayList<>(); + + List contentParts = contentToMessagePartsOrNull(msg.rawContent()); + if (contentParts != null) { + messageParts.addAll(contentParts); + } + + List toolCalls = msg.toolCalls(); + if (toolCalls != null) { + messageParts.addAll( + toolCalls.stream().map(this::toolCallToMessagePart).collect(Collectors.toList())); + } + inputMessages.append(InputMessage.create(Role.ASSISTANT, messageParts)); + } else if (msg.role() == ChatCompletionMessage.Role.TOOL) { + inputMessages.append( + InputMessage.create( + Role.TOOL, contentToToolMessageParts(msg.toolCallId(), msg.rawContent()))); + } + } + return inputMessages; + } + + @Nullable + @Override + public OutputMessages outputMessages( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (!messageCaptureOptions.captureMessageContent() + || response == null + || response.choices() == null) { + return null; + } + + OutputMessages outputMessages = OutputMessages.create(); + for (Choice choice : response.choices()) { + ChatCompletionMessage choiceMsg = choice.message(); + List messageParts = new ArrayList<>(); + + if (choiceMsg != null) { + List contentParts = contentToMessagePartsOrNull(choiceMsg.rawContent()); + if (contentParts != null) { + messageParts.addAll(contentParts); + } + List toolCalls = choiceMsg.toolCalls(); + if (toolCalls != null) { + messageParts.addAll( + toolCalls.stream().map(this::toolCallToMessagePart).collect(Collectors.toList())); + } + } + + outputMessages.append( + OutputMessage.create( + Role.ASSISTANT, + messageParts, + choice.finishReason() != null ? choice.finishReason().name().toLowerCase() : "")); + } + return outputMessages; + } + + @Nullable + @Override + public SystemInstructions systemInstructions( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + return null; + } + + @Nullable + @Override + public ToolDefinitions toolDefinitions( + ChatCompletionRequest request, @Nullable ChatCompletion response) { + if (request.tools() == null) { + return null; + } + + ToolDefinitions toolDefinitions = ToolDefinitions.create(); + request.tools().stream() + .filter(Objects::nonNull) + .map( + tool -> { + if (tool.getFunction() != null) { + String name = tool.getFunction().getName(); + String type = tool.getType().name().toLowerCase(); + if (messageCaptureOptions.captureMessageContent() + && tool.getFunction().getDescription() != null) { + return ToolDefinition.create( + type, name, tool.getFunction().getDescription(), null); + } else { + return ToolDefinition.create(type, name, null, null); + } + } + return null; + }) + .filter(Objects::nonNull) + .forEach(toolDefinitions::append); + + return toolDefinitions; + } + + /** + * Support content: + * + *

+ */ + private List contentToMessageParts(Object rawContent) { + List messageParts = contentToMessagePartsOrNull(rawContent); + return messageParts == null ? Collections.singletonList(TextPart.create("")) : messageParts; + } + + /** + * Support content: + * + * + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + private List contentToMessagePartsOrNull(Object rawContent) { + if (rawContent instanceof String && !((String) rawContent).isEmpty()) { + return Collections.singletonList(TextPart.create(truncateTextContent((String) rawContent))); + } else if (rawContent instanceof List) { + return joinContentParts((List) rawContent); + } else { + return null; + } + } + + private MessagePart toolCallToMessagePart(ToolCall call) { + if (call != null && call.function() != null) { + return ToolCallRequestPart.create( + call.id(), call.function().name(), call.function().arguments()); + } + return ToolCallRequestPart.create("unknown_function"); + } + + /** + * Support content: + * + *
    + *
  • {@code String} + *
  • {@code List} + *
+ */ + private List contentToToolMessageParts(String toolCallId, Object rawContent) { + if (rawContent instanceof String && !((String) rawContent).isEmpty()) { + return Collections.singletonList( + ToolCallResponsePart.create(toolCallId, truncateTextContent((String) rawContent))); + } + return Collections.singletonList(ToolCallResponsePart.create(toolCallId)); + } + + private List joinContentParts(List contentParts) { + return contentParts.stream() + .filter(part -> part instanceof String) + .map(part -> this.truncateTextContent((String) part)) + .map(TextPart::create) + .collect(Collectors.toList()); + } + + private String truncateTextContent(String content) { + if (!content.endsWith(TRUNCATE_FLAG) + && content.length() > messageCaptureOptions.maxMessageContentLength()) { + content = + content.substring(0, messageCaptureOptions.maxMessageContentLength()) + TRUNCATE_FLAG; + } + return content; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java new file mode 100644 index 000000000000..eaaa93fb2b8f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamListener.java @@ -0,0 +1,141 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk.ChunkChoice; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import org.springframework.ai.openai.api.OpenAiApi.Usage; + +public final class ChatModelStreamListener { + + private final Context context; + private final ChatCompletionRequest request; + private final Instrumenter instrumenter; + private final MessageCaptureOptions messageCaptureOptions; + private final boolean newSpan; + private final AtomicBoolean hasEnded; + private final List chatModelMessageBuffers; + + // Aggregated metadata + private final AtomicLong inputTokens = new AtomicLong(0); + private final AtomicLong outputTokens = new AtomicLong(0); + private final AtomicReference requestId = new AtomicReference<>(); + + public ChatModelStreamListener( + Context context, + ChatCompletionRequest request, + Instrumenter instrumenter, + MessageCaptureOptions messageCaptureOptions, + boolean newSpan) { + this.context = context; + this.request = request; + this.instrumenter = instrumenter; + this.messageCaptureOptions = messageCaptureOptions; + this.newSpan = newSpan; + this.hasEnded = new AtomicBoolean(); + this.chatModelMessageBuffers = new ArrayList<>(); + } + + public void onChunk(ChatCompletionChunk chunk) { + if (chunk == null) { + return; + } + + if (chunk.id() != null) { + requestId.set(chunk.id()); + } + if (chunk.usage() != null) { + if (chunk.usage().promptTokens() != null) { + inputTokens.set(chunk.usage().promptTokens().longValue()); + } + if (chunk.usage().completionTokens() != null) { + outputTokens.set(chunk.usage().completionTokens().longValue()); + } + } + + if (chunk.choices() != null) { + List choices = chunk.choices(); + for (ChunkChoice choice : choices) { + while (chatModelMessageBuffers.size() <= choice.index()) { + chatModelMessageBuffers.add(null); + } + ChatModelMessageBuffer buffer = chatModelMessageBuffers.get(choice.index()); + if (buffer == null) { + buffer = new ChatModelMessageBuffer(choice.index(), messageCaptureOptions); + chatModelMessageBuffers.set(choice.index(), buffer); + } + + // Convert ChunkChoice to Choice for compatibility with buffer + buffer.append( + new org.springframework.ai.openai.api.OpenAiApi.ChatCompletion.Choice( + choice.finishReason(), choice.index(), choice.delta(), choice.logprobs())); + } + } + } + + public void endSpan(@Nullable Throwable error) { + // Use an atomic operation since close() type of methods are exposed to the user + // and can come from any thread. + if (!this.hasEnded.compareAndSet(false, true)) { + return; + } + + if (this.chatModelMessageBuffers.isEmpty()) { + // Only happens if we got no chunks, so we have no response. + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, null, error); + } + return; + } + + Integer inputTokens = null; + if (this.inputTokens.get() > 0) { + inputTokens = (int) this.inputTokens.get(); + } + + Integer outputTokens = null; + if (this.outputTokens.get() > 0) { + outputTokens = (int) this.outputTokens.get(); + } + + List choices = + this.chatModelMessageBuffers.stream() + .map(ChatModelMessageBuffer::toChoice) + .collect(Collectors.toList()); + + ChatCompletion result = + new ChatCompletion( + this.requestId.get(), + choices, + null, // created + null, // model + null, // serviceTier + null, // systemFingerprint + "chat.completion", + new Usage( + outputTokens, + inputTokens, + inputTokens != null && outputTokens != null ? inputTokens + outputTokens : null, + null, + null)); + + if (this.newSpan) { + this.instrumenter.end(this.context, this.request, result, error); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java new file mode 100644 index 000000000000..db376065a638 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatModelStreamWrapper.java @@ -0,0 +1,36 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.context.Context; +import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator; +import io.opentelemetry.javaagent.bootstrap.reactor.ReactorSubscribeOnProcessTracing; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import reactor.core.publisher.Flux; + +public final class ChatModelStreamWrapper { + + public static Flux wrap( + Flux originFlux, + ChatModelStreamListener streamListener, + Context context) { + + Flux chatCompletionChunkFlux = + originFlux + .doOnNext(chunk -> streamListener.onChunk(chunk)) + .doOnComplete(() -> streamListener.endSpan(null)) + .doOnError(streamListener::endSpan); + return ContextPropagationOperator.runWithContext(chatCompletionChunkFlux, context); + } + + public static Flux enableContextPropagation(Flux originFlux) { + return originFlux.contextWrite( + ctx -> ctx.put(ReactorSubscribeOnProcessTracing.CONTEXT_PROPAGATION_KEY, true)); + } + + private ChatModelStreamWrapper() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java new file mode 100644 index 000000000000..e26a0e527bc4 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiApiInstrumentation.java @@ -0,0 +1,143 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0.SpringAiOpenaiSingletons.TELEMETRY; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArgument; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionChunk; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; +import org.springframework.http.ResponseEntity; +import reactor.core.publisher.Flux; + +@AutoService(TypeInstrumentation.class) +public class OpenAiApiInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.openai.api.OpenAiApi"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.openai.api.OpenAiApi"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod() + .and(named("chatCompletionEntity")) + .and(takesArguments(2)) + .and( + takesArgument( + 0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) + .and(returns(named("org.springframework.http.ResponseEntity"))), + this.getClass().getName() + "$CallAdvice"); + + transformer.applyAdviceToMethod( + isMethod() + .and(named("chatCompletionStream")) + .and(takesArguments(2)) + .and( + takesArgument( + 0, named("org.springframework.ai.openai.api.OpenAiApi$ChatCompletionRequest"))) + .and(returns(named("reactor.core.publisher.Flux"))), + this.getClass().getName() + "$StreamAdvice"); + } + + @SuppressWarnings("unused") + public static class CallAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void callEnter( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + Context parentContext = Context.current(); + if (!TELEMETRY.chatCompletionInstrumenter().shouldStart(parentContext, request)) { + return; + } + + context = TELEMETRY.chatCompletionInstrumenter().start(parentContext, request); + scope = context.makeCurrent(); + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void callExit( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Return ResponseEntity response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelScope") Scope scope) { + if (scope == null) { + return; + } + scope.close(); + + TELEMETRY + .chatCompletionInstrumenter() + .end(context, request, response.hasBody() ? response.getBody() : null, throwable); + } + } + + @SuppressWarnings("unused") + public static class StreamAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void streamEnter( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatModelStreamListener streamListener) { + context = Context.current(); + + if (TELEMETRY.chatCompletionInstrumenter().shouldStart(context, request)) { + context = TELEMETRY.chatCompletionInstrumenter().start(context, request); + streamListener = + new ChatModelStreamListener( + context, + request, + TELEMETRY.chatCompletionInstrumenter(), + TELEMETRY.messageCaptureOptions(), + true); + } + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void streamExit( + @Advice.Argument(0) ChatCompletionRequest request, + @Advice.Return(readOnly = false) Flux response, + @Advice.Thrown Throwable throwable, + @Advice.Local("otelContext") Context context, + @Advice.Local("otelStreamListener") ChatModelStreamListener streamListener) { + + if (throwable != null) { + // In case of exception, directly call end + TELEMETRY.chatCompletionInstrumenter().end(context, request, null, throwable); + return; + } + + if (streamListener != null) { + // Wrap the response to integrate the stream listener + response = ChatModelStreamWrapper.wrap(response, streamListener, context); + } + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java new file mode 100644 index 000000000000..60a40dcb0935 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/OpenAiChatModelInstrumentation.java @@ -0,0 +1,65 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; +import static net.bytebuddy.matcher.ElementMatchers.isMethod; +import static net.bytebuddy.matcher.ElementMatchers.named; +import static net.bytebuddy.matcher.ElementMatchers.returns; +import static net.bytebuddy.matcher.ElementMatchers.takesArguments; + +import com.google.auto.service.AutoService; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; +import net.bytebuddy.asm.Advice; +import net.bytebuddy.description.type.TypeDescription; +import net.bytebuddy.matcher.ElementMatcher; +import org.springframework.ai.chat.model.ChatResponse; +import reactor.core.publisher.Flux; + +@AutoService(TypeInstrumentation.class) +public class OpenAiChatModelInstrumentation implements TypeInstrumentation { + + @Override + public ElementMatcher classLoaderOptimization() { + return hasClassesNamed("org.springframework.ai.openai.OpenAiChatModel"); + } + + @Override + public ElementMatcher typeMatcher() { + return named("org.springframework.ai.openai.OpenAiChatModel"); + } + + @Override + public void transform(TypeTransformer transformer) { + transformer.applyAdviceToMethod( + isMethod() + .and(named("internalStream")) + .and(takesArguments(2)) + .and(returns(named("reactor.core.publisher.Flux"))), + this.getClass().getName() + "$StreamAdvice"); + } + + @SuppressWarnings("unused") + public static class StreamAdvice { + + @Advice.OnMethodEnter(suppress = Throwable.class) + public static void streamEnter() { + // do nothing + } + + @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) + public static void streamExit( + @Advice.Return(readOnly = false) Flux response, + @Advice.Thrown Throwable throwable) { + if (throwable != null) { + return; + } + + response = ChatModelStreamWrapper.enableContextPropagation(response); + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java new file mode 100644 index 000000000000..76827a57db37 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiInstrumentationModule.java @@ -0,0 +1,25 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import static java.util.Arrays.asList; + +import com.google.auto.service.AutoService; +import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; +import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; +import java.util.List; + +@AutoService(InstrumentationModule.class) +public class SpringAiOpenaiInstrumentationModule extends InstrumentationModule { + public SpringAiOpenaiInstrumentationModule() { + super("spring-ai-openai", "spring-ai-openai-1.0"); + } + + @Override + public List typeInstrumentations() { + return asList(new OpenAiChatModelInstrumentation(), new OpenAiApiInstrumentation()); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java new file mode 100644 index 000000000000..7bfa0f1733d0 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiSingletons.java @@ -0,0 +1,28 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.javaagent.bootstrap.internal.InstrumentationConfig; + +public final class SpringAiOpenaiSingletons { + public static final SpringAiOpenaiTelemetry TELEMETRY = + SpringAiOpenaiTelemetry.builder(GlobalOpenTelemetry.get()) + .setCaptureMessageContent( + InstrumentationConfig.get() + .getBoolean("otel.instrumentation.genai.capture-message-content", true)) + .setContentMaxLength( + InstrumentationConfig.get() + .getInt("otel.instrumentation.genai.message-content.max-length", 8192)) + .setCaptureMessageStrategy( + InstrumentationConfig.get() + .getString( + "otel.instrumentation.genai.message-content.capture-strategy", + "span-attributes")) + .build(); + + private SpringAiOpenaiSingletons() {} +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java new file mode 100644 index 000000000000..ef54d2898971 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetry.java @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; + +/** Entrypoint for instrumenting Spring AI OpenAI clients. */ +public final class SpringAiOpenaiTelemetry { + + /** + * Returns a new {@link SpringAiOpenaiTelemetryBuilder} configured with the given {@link + * OpenTelemetry}. + */ + public static SpringAiOpenaiTelemetryBuilder builder(OpenTelemetry openTelemetry) { + return new SpringAiOpenaiTelemetryBuilder(openTelemetry); + } + + private final Instrumenter chatCompletionInstrumenter; + private final MessageCaptureOptions messageCaptureOptions; + + SpringAiOpenaiTelemetry( + Instrumenter chatCompletionInstrumenter, + MessageCaptureOptions messageCaptureOptions) { + this.chatCompletionInstrumenter = chatCompletionInstrumenter; + this.messageCaptureOptions = messageCaptureOptions; + } + + public Instrumenter chatCompletionInstrumenter() { + return chatCompletionInstrumenter; + } + + public MessageCaptureOptions messageCaptureOptions() { + return messageCaptureOptions; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java new file mode 100644 index 000000000000..eec2caee672a --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/SpringAiOpenaiTelemetryBuilder.java @@ -0,0 +1,82 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.api.genai.MessageCaptureOptions; +import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiAttributesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiMessagesExtractor; +import io.opentelemetry.instrumentation.api.instrumenter.genai.GenAiSpanNameExtractor; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletion; +import org.springframework.ai.openai.api.OpenAiApi.ChatCompletionRequest; + +/** Builder for {@link SpringAiOpenaiTelemetry}. */ +public final class SpringAiOpenaiTelemetryBuilder { + + private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-openai-1.0"; + + private final OpenTelemetry openTelemetry; + + private boolean captureMessageContent; + + private int contentMaxLength; + + private String captureMessageStrategy; + + SpringAiOpenaiTelemetryBuilder(OpenTelemetry openTelemetry) { + this.openTelemetry = openTelemetry; + } + + /** Sets whether to capture message content in spans. Defaults to false. */ + @CanIgnoreReturnValue + public SpringAiOpenaiTelemetryBuilder setCaptureMessageContent(boolean captureMessageContent) { + this.captureMessageContent = captureMessageContent; + return this; + } + + /** Sets the maximum length of message content to capture. Defaults to 8192. */ + @CanIgnoreReturnValue + public SpringAiOpenaiTelemetryBuilder setContentMaxLength(int contentMaxLength) { + this.contentMaxLength = contentMaxLength; + return this; + } + + /** Sets the strategy to capture message content. Defaults to "span-attributes". */ + @CanIgnoreReturnValue + public SpringAiOpenaiTelemetryBuilder setCaptureMessageStrategy(String captureMessageStrategy) { + this.captureMessageStrategy = captureMessageStrategy; + return this; + } + + /** + * Returns a new {@link SpringAiOpenaiTelemetry} with the settings of this {@link + * SpringAiOpenaiTelemetryBuilder}. + */ + public SpringAiOpenaiTelemetry build() { + MessageCaptureOptions messageCaptureOptions = + MessageCaptureOptions.create( + captureMessageContent, contentMaxLength, captureMessageStrategy); + + Instrumenter chatCompletionInstrumenter = + Instrumenter.builder( + openTelemetry, + INSTRUMENTATION_NAME, + GenAiSpanNameExtractor.create(ChatModelAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiAttributesExtractor.create(ChatModelAttributesGetter.INSTANCE)) + .addAttributesExtractor( + GenAiMessagesExtractor.create( + ChatModelAttributesGetter.INSTANCE, + ChatModelMessagesProvider.create(messageCaptureOptions), + messageCaptureOptions, + INSTRUMENTATION_NAME)) + .buildInstrumenter(); + + return new SpringAiOpenaiTelemetry(chatCompletionInstrumenter, messageCaptureOptions); + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java new file mode 100644 index 000000000000..9f3fece1a3e4 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/ChatCompletionTest.java @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.javaagent.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.instrumentation.spring.ai.openai.v1_0.AbstractChatCompletionTest; +import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import org.junit.jupiter.api.extension.RegisterExtension; + +public class ChatCompletionTest extends AbstractChatCompletionTest { + + @RegisterExtension + private static final AgentInstrumentationExtension testing = + AgentInstrumentationExtension.create(); + + @Override + protected InstrumentationExtension getTesting() { + return testing; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/metadata.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/metadata.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts new file mode 100644 index 000000000000..56ff1781fcf7 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/build.gradle.kts @@ -0,0 +1,14 @@ +plugins { + id("otel.java-conventions") +} + +otelJava { + minJavaVersionSupported.set(JavaVersion.VERSION_17) +} + +dependencies { + api(project(":testing-common")) + + api("org.springframework.ai:spring-ai-openai:1.0.0") + api(project(":instrumentation-api-incubator")) +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java new file mode 100644 index 000000000000..8affac4605ff --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractChatCompletionTest.java @@ -0,0 +1,597 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.spring.ai.openai.v1_0; + +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_INPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OPERATION_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_OUTPUT_MESSAGES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_PROVIDER_NAME; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_CHOICE_COUNT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_FREQUENCY_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MAX_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_PRESENCE_PENALTY; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_SEED; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_STOP_SEQUENCES; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TEMPERATURE; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_REQUEST_TOP_P; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_FINISH_REASONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_ID; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_RESPONSE_MODEL; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_TOOL_DEFINITIONS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_INPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiOperationNameIncubatingValues.CHAT; +import static io.opentelemetry.instrumentation.api.instrumenter.genai.incubating.GenAiIncubatingAttributes.GenAiProviderNameIncubatingValues.OPENAI; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies; +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; +import org.springframework.ai.chat.messages.AssistantMessage.ToolCall; +import org.springframework.ai.chat.messages.SystemMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage; +import org.springframework.ai.chat.messages.ToolResponseMessage.ToolResponse; +import org.springframework.ai.chat.messages.UserMessage; +import org.springframework.ai.chat.model.ChatResponse; +import org.springframework.ai.chat.prompt.ChatOptions; +import org.springframework.ai.chat.prompt.Prompt; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.OpenAiChatOptions; +import org.springframework.ai.tool.ToolCallback; +import org.springframework.ai.tool.function.FunctionToolCallback; + +public abstract class AbstractChatCompletionTest extends AbstractSpringAiOpenaiTest { + + protected static final String TEST_CHAT_MODEL = "qwen3-coder-flash"; + protected static final String TEST_CHAT_INPUT = + "Answer in up to 3 words: Which ocean contains Bouvet Island?"; + + @Test + void basic() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + String content = "South Atlantic"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("South Atlantic")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void stream() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model(TEST_CHAT_MODEL).build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + List chunks = chatModel.stream(prompt).collectList().block(); + + String fullMessage = + chunks.stream() + .map( + cc -> { + if (cc.getResults().isEmpty()) { + return Optional.empty(); + } + return Optional.of(cc.getResults().get(0).getOutput().getText()); + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.joining()); + + String content = "South Atlantic"; + assertEquals(fullMessage, content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("South Atlantic")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void allTheClientOptions() { + OpenAiChatOptions options = + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .maxTokens(1000) + .seed(100) + .stop(singletonList("foo")) + .topP(1.0) + .temperature(0.8) + .frequencyPenalty(0.5) + .presencePenalty(0.3) + .build(); + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(options) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + String content = "Southern Ocean"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.8d), + equalTo( + GEN_AI_REQUEST_MAX_TOKENS, Long.valueOf(options.getMaxTokens())), + equalTo(GEN_AI_REQUEST_SEED, Long.valueOf(options.getSeed())), + satisfies( + GEN_AI_REQUEST_STOP_SEQUENCES, + seq -> seq.hasSize(options.getStop().size())), + equalTo(GEN_AI_REQUEST_TOP_P, options.getTopP()), + equalTo( + GEN_AI_REQUEST_FREQUENCY_PENALTY, options.getFrequencyPenalty()), + equalTo(GEN_AI_REQUEST_PRESENCE_PENALTY, options.getPresencePenalty()), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 2L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 25L), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void with400Error() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(ChatOptions.builder().model("gpt-4o").build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + Throwable thrown = catchThrowable(() -> chatModel.stream(prompt).collectList().block()); + assertThat(thrown).isInstanceOf(Exception.class); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, "gpt-4o"), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?"))))); + } + + @Test + void multipleChoices() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(OpenAiChatOptions.builder().model(TEST_CHAT_MODEL).N(2).build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + String content1 = "Southern Ocean"; + assertThat(response.getResults().get(0).getOutput().getText()).isEqualTo(content1); + String content2 = "South"; + assertThat(response.getResults().get(1).getOutput().getText()).isEqualTo(content2); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, response.getMetadata().getId()), + equalTo(GEN_AI_RESPONSE_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_CHOICE_COUNT, 2), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop", "stop")), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 23L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 3L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 26L), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void streamMultipleChoices() { + Prompt prompt = + Prompt.builder() + .messages(UserMessage.builder().text(TEST_CHAT_INPUT).build()) + .chatOptions(OpenAiChatOptions.builder().model(TEST_CHAT_MODEL).N(2).build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + // there's a bug in open-ai chat model, thus we couldn't agg multi choice + List chunks = chatModel.stream(prompt).collectList().block(); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + equalTo(GEN_AI_REQUEST_CHOICE_COUNT, 2), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop", "stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "Answer in up to 3 words: Which ocean contains Bouvet Island?")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("Southern Ocean")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop"))))); + } + + @Test + void toolCalls() { + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + ChatResponse response = chatModel.call(prompt); + + List toolCalls = response.getResult().getOutput().getToolCalls(); + + assertThat(toolCalls.get(0).id()).startsWith("call_"); + assertThat(toolCalls.get(1).id()).startsWith("call_"); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_MODEL, "qwen3-coder-flash"), + satisfies(GEN_AI_RESPONSE_ID, id -> id.startsWith("chatcmpl-")), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 311L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 45L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 356L), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("New York City")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("tool_calls")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather"))))); + + getTesting().clearData(); + + prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build(), + response.getResult().getOutput(), + new ToolResponseMessage( + asList( + new ToolResponse( + toolCalls.get(0).id(), "get_weather", "25 degrees and sunny"), + new ToolResponse( + toolCalls.get(1).id(), "get_weather", "15 degrees and sunny"))))) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); + + response = chatModel.call(prompt); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + satisfies(GEN_AI_RESPONSE_ID, id -> id.startsWith("chatcmpl-")), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_MODEL, "qwen3-coder-flash"), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("stop")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + equalTo(GEN_AI_USAGE_INPUT_TOKENS, 386L), + equalTo(GEN_AI_USAGE_OUTPUT_TOKENS, 31L), + equalTo(GEN_AI_USAGE_TOTAL_TOKENS, 417L), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> messages.contains("tool_call_response")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> messages.contains("25 degrees and sunny")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> messages.contains("15 degrees and sunny")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("assistant")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("stop")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather"))))); + } + + @Test + void streamToolCalls() { + Prompt prompt = + Prompt.builder() + .messages( + asList( + SystemMessage.builder() + .text("You are a helpful assistant providing weather updates.") + .build(), + UserMessage.builder() + .text("What is the weather in New York City and London?") + .build())) + .chatOptions( + OpenAiChatOptions.builder() + .model(TEST_CHAT_MODEL) + .toolCallbacks(buildGetWeatherToolDefinition()) + .build()) + .build(); + OpenAiChatModel chatModel = getChatModel(); + + List chunks = chatModel.stream(prompt).toStream().collect(Collectors.toList()); + + getTesting() + .waitAndAssertTraces( + trace -> + trace.hasSpansSatisfyingExactly( + span -> + span.hasAttributesSatisfying( + equalTo(GEN_AI_PROVIDER_NAME, OPENAI), + equalTo(GEN_AI_OPERATION_NAME, CHAT), + equalTo(GEN_AI_REQUEST_MODEL, TEST_CHAT_MODEL), + equalTo(GEN_AI_REQUEST_TEMPERATURE, 0.7d), + equalTo(GEN_AI_RESPONSE_ID, chunks.get(0).getMetadata().getId()), + satisfies( + GEN_AI_RESPONSE_FINISH_REASONS, + reasons -> reasons.containsExactly("tool_calls")), + equalTo(GEN_AI_SPAN_KIND, "LLM"), + satisfies(GEN_AI_INPUT_MESSAGES, messages -> messages.contains("user")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "What is the weather in New York City and London?")), + satisfies( + GEN_AI_INPUT_MESSAGES, messages -> messages.contains("system")), + satisfies( + GEN_AI_INPUT_MESSAGES, + messages -> + messages.contains( + "You are a helpful assistant providing weather updates.")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("tool_call")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("get_weather")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("New York City")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, messages -> messages.contains("London")), + satisfies( + GEN_AI_OUTPUT_MESSAGES, + messages -> messages.contains("tool_calls")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, messages -> messages.contains("function")), + satisfies( + GEN_AI_TOOL_DEFINITIONS, + messages -> messages.contains("get_weather"))))); + } + + private ToolCallback buildGetWeatherToolDefinition() { + return FunctionToolCallback.builder("get_weather", new GetWeatherFunction()) + .description("The location to get the current temperature for") + .inputType(ToolInput.class) + .build(); + } + + public static class ToolInput { + private String location; + + public String getLocation() { + return location; + } + + public ToolInput(String location) { + this.location = location; + } + } + + private static class GetWeatherFunction implements Function { + @Override + public String apply(ToolInput location) { + return "test function"; + } + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java new file mode 100644 index 000000000000..938fcfd970c9 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/java/io/opentelemetry/javaagent/instrumentation/spring/ai/openai/v1_0/AbstractSpringAiOpenAiTest.java @@ -0,0 +1,65 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +package io.opentelemetry.instrumentation.spring.ai.openai.v1_0; + +import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; +import io.opentelemetry.instrumentation.testing.recording.RecordingExtension; +import java.net.http.HttpClient; +import java.net.http.HttpClient.Version; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.springframework.ai.openai.OpenAiChatModel; +import org.springframework.ai.openai.api.OpenAiApi; +import org.springframework.http.client.JdkClientHttpRequestFactory; +import org.springframework.http.client.reactive.JdkClientHttpConnector; +import org.springframework.web.client.RestClient; +import org.springframework.web.reactive.function.client.WebClient; + +public abstract class AbstractSpringAiOpenaiTest { + + protected static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-ai-openai-1.0"; + + private static final String API_URL = "https://dashscope.aliyuncs.com/compatible-mode"; + + @RegisterExtension static final RecordingExtension recording = new RecordingExtension(API_URL); + + protected abstract InstrumentationExtension getTesting(); + + private OpenAiApi openAiApi; + + private OpenAiChatModel chatModel; + + protected final OpenAiApi getOpenAiApi() { + if (openAiApi == null) { + HttpClient httpClient = HttpClient.newBuilder().version(Version.HTTP_1_1).build(); + + OpenAiApi.Builder builder = + OpenAiApi.builder() + .restClientBuilder( + RestClient.builder().requestFactory(new JdkClientHttpRequestFactory(httpClient))) + .webClientBuilder( + WebClient.builder().clientConnector(new JdkClientHttpConnector(httpClient))) + .baseUrl("http://localhost:" + recording.getPort()); + if (recording.isRecording()) { + builder.apiKey(System.getenv("OPENAI_API_KEY")); + } else { + builder.apiKey("unused"); + } + openAiApi = builder.build(); + } + return openAiApi; + } + + protected final OpenAiChatModel getChatModel() { + if (chatModel == null) { + chatModel = + OpenAiChatModel.builder() + .openAiApi(getOpenAiApi()) + .toolExecutionEligibilityPredicate((o1, o2) -> false) + .build(); + } + return chatModel; + } +} diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml new file mode 100644 index 000000000000..7f9762709e16 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.alltheclientoptions.yaml @@ -0,0 +1,47 @@ +--- +id: 177fac5c-52bf-4ce1-a4cc-20b97fe949fd +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "frequency_penalty" : 0.5, + "max_tokens" : 1000, + "presence_penalty" : 0.3, + "seed" : 100, + "stop" : [ "foo" ], + "stream" : false, + "temperature" : 0.8, + "top_p" : 1 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"Southern Ocean\",\"role\":\"assistant\"\ + },\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"object\":\"chat.completion\"\ + ,\"usage\":{\"prompt_tokens\":23,\"completion_tokens\":2,\"total_tokens\":25},\"\ + created\":1758118388,\"system_fingerprint\":null,\"model\":\"qwen3-coder-flash\"\ + ,\"id\":\"chatcmpl-f8d57a86-8c10-4f2f-8f0f-149a19e74d6a\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: f8d57a86-8c10-4f2f-8f0f-149a19e74d6a + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "400" + req-arrive-time: "1758118387328" + resp-start-time: "1758118387728" + x-envoy-upstream-service-time: "399" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:07 GMT" + server: istio-envoy +uuid: 177fac5c-52bf-4ce1-a4cc-20b97fe949fd +persistent: true +insertionIndex: 2 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml new file mode 100644 index 000000000000..7f2bdc6fed3c --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.basic.yaml @@ -0,0 +1,41 @@ +--- +id: ef519d87-3023-46d4-bdb2-d272ce8dba4b +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"South Atlantic\",\"role\":\"assistant\"\ + },\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"object\":\"chat.completion\"\ + ,\"usage\":{\"prompt_tokens\":23,\"completion_tokens\":2,\"total_tokens\":25},\"\ + created\":1758118390,\"system_fingerprint\":null,\"model\":\"qwen3-coder-flash\"\ + ,\"id\":\"chatcmpl-73cce568-68f8-4c6a-b4ce-3ac371989aa5\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 73cce568-68f8-4c6a-b4ce-3ac371989aa5 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "304" + req-arrive-time: "1758118389297" + resp-start-time: "1758118389602" + x-envoy-upstream-service-time: "303" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:09 GMT" + server: istio-envoy +uuid: ef519d87-3023-46d4-bdb2-d272ce8dba4b +persistent: true +insertionIndex: 17 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml new file mode 100644 index 000000000000..157b0583b646 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.multiplechoices.yaml @@ -0,0 +1,44 @@ +--- +id: 5cb55360-eef4-4668-b687-b3f60fc7e201 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "n" : 2, + "stream" : false, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"Southern Ocean\",\"role\":\"assistant\"\ + },\"index\":0,\"finish_reason\":\"stop\",\"logprobs\":null},{\"message\":{\"content\"\ + :\"South\",\"role\":\"assistant\"},\"index\":1,\"finish_reason\":\"stop\",\"logprobs\"\ + :null}],\"object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":23,\"completion_tokens\"\ + :3,\"total_tokens\":26},\"created\":1758119593,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-25a1be03-506a-465a-bdfe-6c9b388ce006\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 25a1be03-506a-465a-bdfe-6c9b388ce006 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "323" + req-arrive-time: "1758119592942" + resp-start-time: "1758119593265" + x-envoy-upstream-service-time: "322" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:33:12 GMT" + server: istio-envoy +uuid: 5cb55360-eef4-4668-b687-b3f60fc7e201 +persistent: true +insertionIndex: 10 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml new file mode 100644 index 000000000000..0ac27586c73f --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.stream.yaml @@ -0,0 +1,47 @@ +--- +id: 62a7985a-5ede-4b53-949a-c6cf100938e4 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":0,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"South"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: {"choices":[{"delta":{"content":" Atlantic"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: {"choices":[{"finish_reason":"stop","delta":{"content":""},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758118389,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-c444b53a-0e32-4059-b033-d4e157f9c3ce"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: c444b53a-0e32-4059-b033-d4e157f9c3ce + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "351" + req-arrive-time: "1758118388403" + resp-start-time: "1758118388755" + x-envoy-upstream-service-time: "350" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:08 GMT" + server: istio-envoy +uuid: 62a7985a-5ede-4b53-949a-c6cf100938e4 +persistent: true +insertionIndex: 6 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml new file mode 100644 index 000000000000..b351cd1a72ee --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streammultiplechoices.yaml @@ -0,0 +1,62 @@ +--- +id: 31332ee9-3ec2-408c-9acd-73e8e68446dd +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "n" : 2, + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":0,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"Southern"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":" Ocean"},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":"","role":"assistant"},"index":1,"logprobs":null,"finish_reason":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"finish_reason":null,"logprobs":null,"delta":{"content":"South"},"index":1}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":""},"finish_reason":null,"index":1,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":null},"finish_reason":"stop","index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: {"choices":[{"delta":{"content":null},"finish_reason":"stop","index":1,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758119594,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-2909fece-ee65-4a06-836c-369f008065a9"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: 2909fece-ee65-4a06-836c-369f008065a9 + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "362" + req-arrive-time: "1758119593605" + resp-start-time: "1758119593968" + x-envoy-upstream-service-time: "361" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:33:13 GMT" + server: istio-envoy +uuid: 31332ee9-3ec2-408c-9acd-73e8e68446dd +persistent: true +insertionIndex: 18 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml new file mode 100644 index 000000000000..2e2c781aa1c7 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.streamtoolcalls.yaml @@ -0,0 +1,82 @@ +--- +id: eaa62748-608a-4627-bda2-93545f27c16d +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : true, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: |+ + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"call_b2455b7da6524dc2b90f11ff","type":"function","function":{"name":"get_weather","arguments":""}}],"role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"New York City"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"\""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":0,"id":"","type":"function","function":{"arguments":"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"call_587ea3e2c2184dcfb35d3c7e","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"{\"location\": \""}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"London"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"content":null,"tool_calls":[{"index":1,"id":"","type":"function","function":{"arguments":"\"}"}}]},"finish_reason":null,"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"delta":{"tool_calls":[{"function":{"arguments":""},"index":1,"id":"","type":"function"}]},"index":0}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: {"choices":[{"finish_reason":"tool_calls","delta":{},"index":0,"logprobs":null}],"object":"chat.completion.chunk","usage":null,"created":1758165571,"system_fingerprint":null,"model":"qwen3-coder-flash","id":"chatcmpl-604a5ad5-5d76-42ad-a689-6f6f60c61e5f"} + + data: [DONE] + + headers: + vary: Origin + x-request-id: 604a5ad5-5d76-42ad-a689-6f6f60c61e5f + content-type: text/event-stream;charset=UTF-8 + x-dashscope-call-gateway: "true" + req-cost-time: "252" + req-arrive-time: "1758165570636" + resp-start-time: "1758165570888" + x-envoy-upstream-service-time: "250" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 03:19:30 GMT" + server: istio-envoy +uuid: eaa62748-608a-4627-bda2-93545f27c16d +persistent: true +insertionIndex: 14 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml new file mode 100644 index 000000000000..342c90f90e3e --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.toolcalls.yaml @@ -0,0 +1,159 @@ +--- +id: 9738f87e-fdf1-435a-a1bd-9b5565c0efdf +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"\",\"role\":\"assistant\",\"tool_calls\"\ + :[{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"New York City\\\"}\",\"\ + name\":\"get_weather\"},\"id\":\"call_69db468ee59a4613a15e7ae4\",\"index\":0,\"\ + type\":\"function\"},{\"function\":{\"arguments\":\"{\\\"location\\\": \\\"London\\\ + \"}\",\"name\":\"get_weather\"},\"id\":\"call_4941c4a1092340ceb42d6804\",\"index\"\ + :1,\"type\":\"function\"}]},\"finish_reason\":\"tool_calls\",\"index\":0,\"logprobs\"\ + :null}],\"object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":311,\"completion_tokens\"\ + :45,\"total_tokens\":356},\"created\":1758165572,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-b861091d-874d-4a2d-a9f8-0e96dd81955f\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: b861091d-874d-4a2d-a9f8-0e96dd81955f + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "714" + req-arrive-time: "1758165571689" + resp-start-time: "1758165572403" + x-envoy-upstream-service-time: "713" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 03:19:32 GMT" + server: istio-envoy +uuid: 9738f87e-fdf1-435a-a1bd-9b5565c0efdf +persistent: true +insertionIndex: 24 +--- +id: 230c9b97-cf95-4d49-b2c1-50cf33df6458 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "You are a helpful assistant providing weather updates.", + "role" : "system" + }, { + "content" : "What is the weather in New York City and London?", + "role" : "user" + }, { + "content" : "", + "role" : "assistant", + "tool_calls" : [ { + "id" : "call_69db468ee59a4613a15e7ae4", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"New York City\"}" + } + }, { + "id" : "call_4941c4a1092340ceb42d6804", + "type" : "function", + "function" : { + "name" : "get_weather", + "arguments" : "{\"location\": \"London\"}" + } + } ] + }, { + "content" : "25 degrees and sunny", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_69db468ee59a4613a15e7ae4" + }, { + "content" : "15 degrees and sunny", + "role" : "tool", + "name" : "get_weather", + "tool_call_id" : "call_4941c4a1092340ceb42d6804" + } ], + "model" : "qwen3-coder-flash", + "stream" : false, + "temperature" : 0.7, + "tools" : [ { + "type" : "function", + "function" : { + "description" : "The location to get the current temperature for", + "name" : "get_weather", + "parameters" : { + "$schema" : "https://json-schema.org/draft/2020-12/schema", + "additionalProperties" : false, + "type" : "object", + "properties" : { + "location" : { + "type" : "string" + } + }, + "required" : [ "location" ] + } + } + } ] + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 200 + body: "{\"choices\":[{\"message\":{\"content\":\"The current weather is as follows:\\\ + n- **New York City**: 25 degrees and sunny.\\n- **London**: 15 degrees and sunny.\"\ + ,\"role\":\"assistant\"},\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null}],\"\ + object\":\"chat.completion\",\"usage\":{\"prompt_tokens\":386,\"completion_tokens\"\ + :31,\"total_tokens\":417},\"created\":1758165573,\"system_fingerprint\":null,\"\ + model\":\"qwen3-coder-flash\",\"id\":\"chatcmpl-9f138fc5-3b65-4888-84b1-552113825783\"\ + }" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 9f138fc5-3b65-4888-84b1-552113825783 + x-dashscope-call-gateway: "true" + content-type: application/json + req-cost-time: "630" + req-arrive-time: "1758165572714" + resp-start-time: "1758165573344" + x-envoy-upstream-service-time: "629" + set-cookie: test_set_cookie + date: "Thu, 18 Sep 2025 03:19:32 GMT" + server: istio-envoy +uuid: 230c9b97-cf95-4d49-b2c1-50cf33df6458 +persistent: true +insertionIndex: 25 diff --git a/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml new file mode 100644 index 000000000000..bea00affda81 --- /dev/null +++ b/instrumentation/spring/spring-ai/spring-ai-openai-1.0/testing/src/main/resources/mappings/io.opentelemetry.instrumentation.spring.ai.openai.v1_0.abstractchatcompletiontest.with400error.yaml @@ -0,0 +1,38 @@ +--- +id: 7efd30b4-7368-4454-b018-c2ef226a3f40 +name: v1_chat_completions +request: + url: /v1/chat/completions + method: POST + bodyPatterns: + - equalToJson: |- + { + "messages" : [ { + "content" : "Answer in up to 3 words: Which ocean contains Bouvet Island?", + "role" : "user" + } ], + "model" : "gpt-4o", + "stream" : true, + "temperature" : 0.7 + } + ignoreArrayOrder: false + ignoreExtraElements: false +response: + status: 404 + body: "{\"error\":{\"message\":\"The model `gpt-4o` does not exist or you do not\ + \ have access to it.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\"\ + :\"model_not_found\"},\"request_id\":\"5710f1fc-5b88-4835-9c4e-20efc1ef35af\"}" + headers: + vary: "Origin,Access-Control-Request-Method,Access-Control-Request-Headers, Accept-Encoding" + x-request-id: 5710f1fc-5b88-4835-9c4e-20efc1ef35af + content-type: application/json + req-cost-time: "8" + req-arrive-time: "1758118389053" + resp-start-time: "1758118389061" + x-envoy-upstream-service-time: "7" + set-cookie: test_set_cookie + date: "Wed, 17 Sep 2025 14:13:08 GMT" + server: istio-envoy +uuid: 7efd30b4-7368-4454-b018-c2ef226a3f40 +persistent: true +insertionIndex: 11 diff --git a/settings.gradle.kts b/settings.gradle.kts index 593700b40dda..14168dcd87f6 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -502,6 +502,7 @@ include(":instrumentation:ratpack:ratpack-1.4:javaagent") include(":instrumentation:ratpack:ratpack-1.4:testing") include(":instrumentation:ratpack:ratpack-1.7:javaagent") include(":instrumentation:ratpack:ratpack-1.7:library") +include(":instrumentation:reactor:reactor-3.1:bootstrap") include(":instrumentation:reactor:reactor-3.1:javaagent") include(":instrumentation:reactor:reactor-3.1:library") include(":instrumentation:reactor:reactor-3.1:testing") @@ -560,6 +561,10 @@ include(":instrumentation:servlet:servlet-common:bootstrap") include(":instrumentation:servlet:servlet-common:javaagent") include(":instrumentation:servlet:servlet-javax-common:javaagent") include(":instrumentation:spark-2.3:javaagent") +include(":instrumentation:spring:spring-ai:spring-ai-1.0:javaagent") +include(":instrumentation:spring:spring-ai:spring-ai-1.0:testing") +include(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:javaagent") +include(":instrumentation:spring:spring-ai:spring-ai-openai-1.0:testing") include(":instrumentation:spring:spring-batch-3.0:javaagent") include(":instrumentation:spring:spring-boot-actuator-autoconfigure-2.0:javaagent") include(":instrumentation:spring:spring-boot-autoconfigure")