From 8e289216ada40e54229438f07e7d21add36336c1 Mon Sep 17 00:00:00 2001 From: yiyuanh Date: Mon, 16 Dec 2024 18:52:40 -0800 Subject: [PATCH] update genai contract tests --- .../appsignals/test/awssdk/base/AwsSdkBaseTest.java | 8 +++++++- .../test/utils/SemanticConventionsConstants.java | 1 + 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/awssdk/base/AwsSdkBaseTest.java b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/awssdk/base/AwsSdkBaseTest.java index 278aa4011d..fd13a259d3 100644 --- a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/awssdk/base/AwsSdkBaseTest.java +++ b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/awssdk/base/AwsSdkBaseTest.java @@ -1997,6 +1997,7 @@ protected void doTestBedrockRuntimeAi21Jamba() { List.of( assertAttribute( SemanticConventionsConstants.GEN_AI_REQUEST_MODEL, "ai21.jamba-1-5-mini-v1:0"), + assertAttribute(SemanticConventionsConstants.GEN_AI_SYSTEM, "aws.bedrock"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TEMPERATURE, "0.7"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TOP_P, "0.8"), assertAttribute(SemanticConventionsConstants.GEN_AI_RESPONSE_FINISH_REASONS, "[stop]"), @@ -2071,6 +2072,7 @@ protected void doTestBedrockRuntimeAmazonTitan() { assertAttribute( SemanticConventionsConstants.GEN_AI_REQUEST_MODEL, "amazon.titan-text-premier-v1:0"), + assertAttribute(SemanticConventionsConstants.GEN_AI_SYSTEM, "aws.bedrock"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_MAX_TOKENS, "100"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TEMPERATURE, "0.7"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TOP_P, "0.9"), @@ -2149,6 +2151,7 @@ protected void doTestBedrockRuntimeAnthropicClaude() { assertAttribute( SemanticConventionsConstants.GEN_AI_REQUEST_MODEL, "anthropic.claude-3-haiku-20240307-v1:0"), + assertAttribute(SemanticConventionsConstants.GEN_AI_SYSTEM, "aws.bedrock"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_MAX_TOKENS, "512"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TEMPERATURE, "0.6"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TOP_P, "0.53"), @@ -2226,6 +2229,7 @@ protected void doTestBedrockRuntimeCohereCommandR() { List.of( assertAttribute( SemanticConventionsConstants.GEN_AI_REQUEST_MODEL, "cohere.command-r-v1:0"), + assertAttribute(SemanticConventionsConstants.GEN_AI_SYSTEM, "aws.bedrock"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_MAX_TOKENS, "4096"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TEMPERATURE, "0.8"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TOP_P, "0.45"), @@ -2303,6 +2307,7 @@ protected void doTestBedrockRuntimeMetaLlama() { List.of( assertAttribute( SemanticConventionsConstants.GEN_AI_REQUEST_MODEL, "meta.llama3-70b-instruct-v1:0"), + assertAttribute(SemanticConventionsConstants.GEN_AI_SYSTEM, "aws.bedrock"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_MAX_TOKENS, "128"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TEMPERATURE, "0.1"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TOP_P, "0.9"), @@ -2380,11 +2385,12 @@ protected void doTestBedrockRuntimeMistral() { assertAttribute( SemanticConventionsConstants.GEN_AI_REQUEST_MODEL, "mistral.mistral-large-2402-v1:0"), + assertAttribute(SemanticConventionsConstants.GEN_AI_SYSTEM, "aws.bedrock"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_MAX_TOKENS, "4096"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TEMPERATURE, "0.75"), assertAttribute(SemanticConventionsConstants.GEN_AI_REQUEST_TOP_P, "0.25"), assertAttribute(SemanticConventionsConstants.GEN_AI_RESPONSE_FINISH_REASONS, "[stop]"), - assertAttribute(SemanticConventionsConstants.GEN_AI_USAGE_INPUT_TOKENS, "15"), + assertAttribute(SemanticConventionsConstants.GEN_AI_USAGE_INPUT_TOKENS, "16"), assertAttribute(SemanticConventionsConstants.GEN_AI_USAGE_OUTPUT_TOKENS, "24"))); assertMetricClientAttributes( metrics, diff --git a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/utils/SemanticConventionsConstants.java b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/utils/SemanticConventionsConstants.java index 51077ea6a1..d74e434e82 100644 --- a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/utils/SemanticConventionsConstants.java +++ b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/utils/SemanticConventionsConstants.java @@ -64,6 +64,7 @@ public class SemanticConventionsConstants { public static final String AWS_GUARDRAIL_ID = "aws.bedrock.guardrail.id"; public static final String AWS_GUARDRAIL_ARN = "aws.bedrock.guardrail.arn"; public static final String GEN_AI_REQUEST_MODEL = "gen_ai.request.model"; + public static final String GEN_AI_SYSTEM = "gen_ai.system"; public static final String GEN_AI_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens"; public static final String GEN_AI_REQUEST_TEMPERATURE = "gen_ai.request.temperature"; public static final String GEN_AI_REQUEST_TOP_P = "gen_ai.request.top_p";