Skip to content

Commit d1285bd

Browse files
committed
update claude support to replace legacy model
1 parent 535eebf commit d1285bd

File tree

2 files changed

+14
-15
lines changed

2 files changed

+14
-15
lines changed

src/OpenTelemetry.Instrumentation.AWS/Implementation/AWSLlmModelProcessor.cs

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -376,28 +376,27 @@ private static void ProcessClaudeModelAttributes(Activity activity, Dictionary<s
376376
activity.SetTag(AWSSemanticConventions.AttributeGenAiTemperature, temperature.GetDouble());
377377
}
378378

379-
if (jsonBody.TryGetValue("max_tokens_to_sample", out var maxTokens))
379+
if (jsonBody.TryGetValue("max_tokens", out var maxTokens))
380380
{
381381
activity.SetTag(AWSSemanticConventions.AttributeGenAiMaxTokens, maxTokens.GetInt32());
382382
}
383-
384-
// input tokens not provided in Claude response body, so we estimate the value based on input length
385-
if (jsonBody.TryGetValue("prompt", out var input))
386-
{
387-
activity.SetTag(AWSSemanticConventions.AttributeGenAiInputTokens, Convert.ToInt32(Math.Ceiling((double) input.GetString().Length / 6)));
388-
}
389383
}
390384
else
391385
{
392-
if (jsonBody.TryGetValue("stop_reason", out var finishReasons))
386+
if (jsonBody.TryGetValue("usage", out var usage))
393387
{
394-
activity.SetTag(AWSSemanticConventions.AttributeGenAiFinishReasons, new string[] { finishReasons.GetString() });
388+
if (usage.TryGetProperty("input_tokens", out var inputTokens))
389+
{
390+
activity.SetTag(AWSSemanticConventions.AttributeGenAiInputTokens, inputTokens.GetInt32());
391+
}
392+
if (usage.TryGetProperty("output_tokens", out var outputTokens))
393+
{
394+
activity.SetTag(AWSSemanticConventions.AttributeGenAiOutputTokens, outputTokens.GetInt32());
395+
}
395396
}
396-
397-
// output tokens not provided in Claude response body, so we estimate the value based on output length
398-
if (jsonBody.TryGetValue("completion", out var output))
397+
if (jsonBody.TryGetValue("stop_reason", out var finishReasons))
399398
{
400-
activity.SetTag(AWSSemanticConventions.AttributeGenAiOutputTokens, Convert.ToInt32(Math.Ceiling((double) output.GetString().Length / 6)));
399+
activity.SetTag(AWSSemanticConventions.AttributeGenAiFinishReasons, new string[] { finishReasons.GetString() });
401400
}
402401
}
403402
}

test/contract-tests/tests/test/amazon/awssdk/awssdk_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -487,8 +487,8 @@ def test_bedrock_runtime_invoke_model_mistral(self):
487487
_GEN_AI_REQUEST_TEMPERATURE: 0.123,
488488
_GEN_AI_REQUEST_TOP_P: 0.456,
489489
_GEN_AI_REQUEST_MAX_TOKENS: 123,
490-
_GEN_AI_USAGE_INPUT_TOKENS: 12,
491-
_GEN_AI_USAGE_OUTPUT_TOKENS: 10,
490+
_GEN_AI_USAGE_INPUT_TOKENS: 456,
491+
_GEN_AI_USAGE_OUTPUT_TOKENS: 789,
492492
_GEN_AI_RESPONSE_FINISH_REASONS: ["finish_reason"],
493493
},
494494
span_name="Bedrock Runtime.InvokeModel",

0 commit comments

Comments
 (0)