Skip to content

Commit 8f39f7d

Browse files
committed
add support for choice events and messages
1 parent 5a1ff07 commit 8f39f7d

File tree

7 files changed

+378
-8
lines changed

7 files changed

+378
-8
lines changed

instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock.py

Lines changed: 43 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -380,11 +380,25 @@ def _get_request_messages(self):
380380

381381
messages = decoded_body.get("messages", [])
382382
if not messages:
383-
# transform old school amazon titan invokeModel api to messages
384-
if input_text := decoded_body.get("inputText"):
385-
messages = [
386-
{"role": "user", "content": [{"text": input_text}]}
387-
]
383+
model_id = self._call_context.params.get(_MODEL_ID_KEY)
384+
if "amazon.titan" in model_id:
385+
# transform old school amazon titan invokeModel api to messages
386+
if input_text := decoded_body.get("inputText"):
387+
messages = [
388+
{"role": "user", "content": [{"text": input_text}]}
389+
]
390+
elif "cohere.command-r" in model_id:
391+
# chat_history can be converted to messages; for now, just use message
392+
if input_text := decoded_body.get("message"):
393+
messages = [
394+
{"role": "user", "content": [{"text": input_text}]}
395+
]
396+
elif "cohere.command" in model_id or "meta.llama" in model_id or "mistral.mistral" in model_id:
397+
# transform old school cohere command api to messages
398+
if input_text := decoded_body.get("prompt"):
399+
messages = [
400+
{"role": "user", "content": [{"text": input_text}]}
401+
]
388402

389403
return system_messages + messages
390404

@@ -831,6 +845,12 @@ def _handle_cohere_command_r_response(
831845
span.set_attribute(
832846
GEN_AI_RESPONSE_FINISH_REASONS, [response_body["finish_reason"]]
833847
)
848+
849+
event_logger = instrumentor_context.event_logger
850+
choice = _Choice.from_invoke_cohere_command_r(
851+
response_body, capture_content
852+
)
853+
event_logger.emit(choice.to_choice_event())
834854

835855
def _handle_cohere_command_response(
836856
self,
@@ -849,6 +869,12 @@ def _handle_cohere_command_response(
849869
span.set_attribute(
850870
GEN_AI_RESPONSE_FINISH_REASONS, [generations["finish_reason"]]
851871
)
872+
873+
event_logger = instrumentor_context.event_logger
874+
choice = _Choice.from_invoke_cohere_command(
875+
response_body, capture_content
876+
)
877+
event_logger.emit(choice.to_choice_event())
852878

853879
def _handle_meta_llama_response(
854880
self,
@@ -870,6 +896,12 @@ def _handle_meta_llama_response(
870896
GEN_AI_RESPONSE_FINISH_REASONS, [response_body["stop_reason"]]
871897
)
872898

899+
event_logger = instrumentor_context.event_logger
900+
choice = _Choice.from_invoke_meta_llama(
901+
response_body, capture_content
902+
)
903+
event_logger.emit(choice.to_choice_event())
904+
873905
def _handle_mistral_ai_response(
874906
self,
875907
span: Span,
@@ -883,6 +915,12 @@ def _handle_mistral_ai_response(
883915
span.set_attribute(GEN_AI_USAGE_OUTPUT_TOKENS, math.ceil(len(outputs["text"]) / 6))
884916
if "stop_reason" in outputs:
885917
span.set_attribute(GEN_AI_RESPONSE_FINISH_REASONS, [outputs["stop_reason"]])
918+
919+
event_logger = instrumentor_context.event_logger
920+
choice = _Choice.from_invoke_mistral_mistral(
921+
response_body, capture_content
922+
)
923+
event_logger.emit(choice.to_choice_event())
886924

887925
def on_error(
888926
self,

instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock_utils.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -519,6 +519,48 @@ def from_invoke_anthropic_claude(
519519
message["content"] = response["content"]
520520
return cls(message, response["stop_reason"], index=0)
521521

522+
@classmethod
523+
def from_invoke_cohere_command_r(
524+
cls, response: dict[str, Any], capture_content: bool
525+
) -> _Choice:
526+
if capture_content:
527+
message = {"content": response["text"]}
528+
else:
529+
message = {}
530+
return cls(message, response["finish_reason"], index=0)
531+
532+
@classmethod
533+
def from_invoke_cohere_command(
534+
cls, response: dict[str, Any], capture_content: bool
535+
) -> _Choice:
536+
result = response["generations"][0]
537+
if capture_content:
538+
message = {"content": result["text"]}
539+
else:
540+
message = {}
541+
return cls(message, result["finish_reason"], index=0)
542+
543+
@classmethod
544+
def from_invoke_meta_llama(
545+
cls, response: dict[str, Any], capture_content: bool
546+
) -> _Choice:
547+
if capture_content:
548+
message = {"content": response["generation"]}
549+
else:
550+
message = {}
551+
return cls(message, response["stop_reason"], index=0)
552+
553+
@classmethod
554+
def from_invoke_mistral_mistral(
555+
cls, response: dict[str, Any], capture_content: bool
556+
) -> _Choice:
557+
result = response["outputs"][0]
558+
if capture_content:
559+
message = {"content": result["text"]}
560+
else:
561+
message = {}
562+
return cls(message, result["stop_reason"], index=0)
563+
522564
def _to_body_dict(self) -> dict[str, Any]:
523565
return {
524566
"finish_reason": self.finish_reason,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"message": "Say this is a test",
6+
"max_tokens": 10,
7+
"temperature": 0.8,
8+
"p": 0.99,
9+
"stop_sequences": [
10+
"|"
11+
]
12+
}
13+
headers:
14+
Content-Length:
15+
- '107'
16+
User-Agent:
17+
- Boto3/1.35.56 md/Botocore#1.35.56 ua/2.0 os/macos#24.3.0 md/arch#arm64 lang/python#3.10.16
18+
md/pyimpl#CPython cfg/retry-mode#legacy Botocore/1.35.56
19+
X-Amz-Date:
20+
- 20250410T224018Z
21+
X-Amz-Security-Token:
22+
- test_aws_security_token
23+
X-Amzn-Trace-Id:
24+
- Root=1-51673f20-3dd018601b078c785c032a50;Parent=c05b84f54e2c35ee;Sampled=1
25+
amz-sdk-invocation-id:
26+
- c81a224e-d8b3-4416-aaca-858b478b7db4
27+
amz-sdk-request:
28+
- attempt=1
29+
authorization:
30+
- Bearer test_aws_authorization
31+
method: POST
32+
uri: https://bedrock-runtime.us-west-2.amazonaws.com/model/cohere.command-r-v1%3A0/invoke
33+
response:
34+
body:
35+
string: |-
36+
{
37+
"response_id": "379ed018/aa2df2bf-edc8-483f-8cd0-d22d04ba34ba",
38+
"text": "This is a test. How are you doing today",
39+
"generation_id": "2d74c447-266d-4286-8425-a92ad7fc0cbc",
40+
"chat_history": [
41+
{
42+
"role": "USER",
43+
"message": "Say this is a test"
44+
},
45+
{
46+
"role": "CHATBOT",
47+
"message": "This is a test. How are you doing today"
48+
}
49+
],
50+
"finish_reason": "MAX_TOKENS"
51+
}
52+
headers:
53+
Connection:
54+
- keep-alive
55+
Content-Type:
56+
- application/json
57+
Date:
58+
- Thu, 10 Apr 2025 22:40:18 GMT
59+
Set-Cookie: test_set_cookie
60+
X-Amzn-Bedrock-Input-Token-Count:
61+
- '5'
62+
X-Amzn-Bedrock-Invocation-Latency:
63+
- '196'
64+
X-Amzn-Bedrock-Output-Token-Count:
65+
- '10'
66+
x-amzn-RequestId:
67+
- c1fd38df-b669-4464-9f4b-4cf32c32fde8
68+
status:
69+
code: 200
70+
message: OK
71+
version: 1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"prompt": "Say this is a test",
6+
"max_tokens": 10,
7+
"temperature": 0.8,
8+
"p": 1,
9+
"stop_sequences": [
10+
"|"
11+
]
12+
}
13+
headers:
14+
Content-Length:
15+
- '103'
16+
User-Agent:
17+
- Boto3/1.35.56 md/Botocore#1.35.56 ua/2.0 os/macos#24.3.0 md/arch#arm64 lang/python#3.10.16
18+
md/pyimpl#CPython cfg/retry-mode#legacy Botocore/1.35.56
19+
X-Amz-Date:
20+
- 20250410T223753Z
21+
X-Amz-Security-Token:
22+
- test_aws_security_token
23+
X-Amzn-Trace-Id:
24+
- Root=1-461bf523-e7a0388221ce8a1d7a31fc5b;Parent=a55a9398f0f3d364;Sampled=1
25+
amz-sdk-invocation-id:
26+
- 17fbe175-373f-4c10-8047-8bba2fd8e22c
27+
amz-sdk-request:
28+
- attempt=1
29+
authorization:
30+
- Bearer test_aws_authorization
31+
method: POST
32+
uri: https://bedrock-runtime.us-west-2.amazonaws.com/model/cohere.command-light-text-v14/invoke
33+
response:
34+
body:
35+
string: |-
36+
{
37+
"id": "a09c1c60-6608-482a-b98d-764e4d87fcd1",
38+
"generations": [
39+
{
40+
"id": "8b38ce59-5f77-4b79-82c3-58eb72d4b1a2",
41+
"text": " Let it be a test of knowledge, skills,",
42+
"finish_reason": "MAX_TOKENS"
43+
}
44+
],
45+
"prompt": "Say this is a test"
46+
}
47+
headers:
48+
Connection:
49+
- keep-alive
50+
Content-Type:
51+
- application/json
52+
Date:
53+
- Thu, 10 Apr 2025 22:37:53 GMT
54+
Set-Cookie: test_set_cookie
55+
X-Amzn-Bedrock-Input-Token-Count:
56+
- '5'
57+
X-Amzn-Bedrock-Invocation-Latency:
58+
- '248'
59+
X-Amzn-Bedrock-Output-Token-Count:
60+
- '10'
61+
x-amzn-RequestId:
62+
- a09c1c60-6608-482a-b98d-764e4d87fcd1
63+
status:
64+
code: 200
65+
message: OK
66+
version: 1
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"prompt": "Say this is a test",
6+
"max_gen_len": 10,
7+
"temperature": 0.8,
8+
"top_p": 1
9+
}
10+
headers:
11+
Content-Length:
12+
- '83'
13+
User-Agent:
14+
- Boto3/1.35.56 md/Botocore#1.35.56 ua/2.0 os/macos#24.3.0 md/arch#arm64 lang/python#3.10.16
15+
md/pyimpl#CPython cfg/retry-mode#legacy Botocore/1.35.56
16+
X-Amz-Date:
17+
- 20250410T224059Z
18+
X-Amz-Security-Token:
19+
- test_aws_security_token
20+
X-Amzn-Trace-Id:
21+
- Root=1-9930be15-db431eb0bf3c21be1ed23dde;Parent=0c4e46fcf0474877;Sampled=1
22+
amz-sdk-invocation-id:
23+
- c3afc5f6-9912-4892-b48e-dff457911e1c
24+
amz-sdk-request:
25+
- attempt=1
26+
authorization:
27+
- Bearer test_aws_authorization
28+
method: POST
29+
uri: https://bedrock-runtime.us-west-2.amazonaws.com/model/meta.llama3-1-70b-instruct-v1%3A0/invoke
30+
response:
31+
body:
32+
string: |-
33+
{
34+
"generation": " post. This is a test post. This is",
35+
"prompt_token_count": 5,
36+
"generation_token_count": 10,
37+
"stop_reason": "length"
38+
}
39+
headers:
40+
Connection:
41+
- keep-alive
42+
Content-Type:
43+
- application/json
44+
Date:
45+
- Thu, 10 Apr 2025 22:40:59 GMT
46+
Set-Cookie: test_set_cookie
47+
X-Amzn-Bedrock-Input-Token-Count:
48+
- '5'
49+
X-Amzn-Bedrock-Invocation-Latency:
50+
- '604'
51+
X-Amzn-Bedrock-Output-Token-Count:
52+
- '10'
53+
x-amzn-RequestId:
54+
- 4b8da626-00b1-4535-b75b-a7e985e9877e
55+
status:
56+
code: 200
57+
message: OK
58+
version: 1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"prompt": "Say this is a test",
6+
"max_tokens": 10,
7+
"temperature": 0.8,
8+
"top_p": 1,
9+
"stop": [
10+
"|"
11+
]
12+
}
13+
headers:
14+
Content-Length:
15+
- '97'
16+
User-Agent:
17+
- Boto3/1.35.56 md/Botocore#1.35.56 ua/2.0 os/macos#24.3.0 md/arch#arm64 lang/python#3.10.16
18+
md/pyimpl#CPython cfg/retry-mode#legacy Botocore/1.35.56
19+
X-Amz-Date:
20+
- 20250410T224059Z
21+
X-Amz-Security-Token:
22+
- test_aws_security_token
23+
X-Amzn-Trace-Id:
24+
- Root=1-332feae9-215a9553da54b4789bf12414;Parent=f3899b50f888d5d7;Sampled=1
25+
amz-sdk-invocation-id:
26+
- 2d410cd2-ef68-48af-9cad-b7ddf3b1d844
27+
amz-sdk-request:
28+
- attempt=1
29+
authorization:
30+
- Bearer test_aws_authorization
31+
method: POST
32+
uri: https://bedrock-runtime.us-west-2.amazonaws.com/model/mistral.mistral-7b-instruct-v0%3A2/invoke
33+
response:
34+
body:
35+
string: |-
36+
{
37+
"outputs": [
38+
{
39+
"text": "\n\nA man stands before a crowd of people",
40+
"stop_reason": "length"
41+
}
42+
]
43+
}
44+
headers:
45+
Connection:
46+
- keep-alive
47+
Content-Type:
48+
- application/json
49+
Date:
50+
- Thu, 10 Apr 2025 22:41:00 GMT
51+
Set-Cookie: test_set_cookie
52+
X-Amzn-Bedrock-Input-Token-Count:
53+
- '6'
54+
X-Amzn-Bedrock-Invocation-Latency:
55+
- '174'
56+
X-Amzn-Bedrock-Output-Token-Count:
57+
- '10'
58+
x-amzn-RequestId:
59+
- 68547039-f30c-4aff-b368-31bb6d9b9d07
60+
status:
61+
code: 200
62+
message: OK
63+
version: 1

0 commit comments

Comments
 (0)