Skip to content

Commit 2ff5480

Browse files
feat(bedrock): update example code
- Add streaming functionality example - Demonstrate vendor-specific attributes - Add proper error handling - Update runner to show both examples Co-Authored-By: [email protected] <[email protected]>
1 parent a8ed8d7 commit 2ff5480

File tree

2 files changed

+84
-22
lines changed

2 files changed

+84
-22
lines changed
Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,26 @@
1-
from examples.awsbedrock_examples.converse import use_converse
1+
from examples.awsbedrock_examples.converse import use_converse, use_converse_stream
22
from langtrace_python_sdk import langtrace, with_langtrace_root_span
33

44
langtrace.init()
55

6-
76
class AWSBedrockRunner:
87
@with_langtrace_root_span("AWS_Bedrock")
98
def run(self):
10-
use_converse()
9+
# Standard completion
10+
print("\nRunning standard completion example...")
11+
response = use_converse()
12+
if response:
13+
content = response.get('output', {}).get('message', {}).get('content', [])
14+
if content:
15+
print(f"Response: {content[0].get('text', '')}")
16+
17+
# Streaming completion
18+
print("\nRunning streaming completion example...")
19+
try:
20+
for chunk in use_converse_stream():
21+
content = chunk.get('output', {}).get('message', {}).get('content', [])
22+
if content:
23+
print(f"Chunk: {content[0].get('text', '')}", end='', flush=True)
24+
print("\nStreaming complete!")
25+
except Exception as e:
26+
print(f"\nStreaming failed: {e}")
Lines changed: 65 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,80 @@
11
import os
22
import boto3
3-
from langtrace_python_sdk import langtrace
3+
from typing import Dict, Iterator
44

5-
langtrace.init(api_key=os.environ["LANGTRACE_API_KEY"])
5+
from opentelemetry import trace
6+
from opentelemetry.trace import TracerProvider
7+
from langtrace_python_sdk import langtrace, with_langtrace_root_span
8+
from langtrace_python_sdk.instrumentation.aws_bedrock import AWSBedrockInstrumentation
69

7-
def use_converse():
8-
model_id = "anthropic.claude-3-haiku-20240307-v1:0"
9-
client = boto3.client(
10+
# Initialize tracing
11+
trace.set_tracer_provider(TracerProvider())
12+
AWSBedrockInstrumentation().instrument()
13+
langtrace.init()
14+
15+
def get_bedrock_client():
16+
"""Create an instrumented AWS Bedrock client."""
17+
return boto3.client(
1018
"bedrock-runtime",
1119
region_name="us-east-1",
1220
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
1321
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
1422
)
15-
conversation = [
16-
{
17-
"role": "user",
18-
"content": [{"text": "Write a story about a magic backpack."}],
19-
}
20-
]
23+
24+
@with_langtrace_root_span()
25+
def use_converse() -> Dict:
26+
"""Example of standard completion request with vendor attributes."""
27+
client = get_bedrock_client()
28+
model_id = "anthropic.claude-3-haiku-20240307-v1:0"
2129

2230
try:
2331
response = client.converse(
2432
modelId=model_id,
25-
messages=conversation,
26-
inferenceConfig={"maxTokens":4096,"temperature":0},
27-
additionalModelRequestFields={"top_k":250}
33+
messages=[{
34+
"role": "user",
35+
"content": [{"text": "Write a story about a magic backpack."}],
36+
}],
37+
inferenceConfig={
38+
"maxTokens": 4096,
39+
"temperature": 0.7,
40+
"top_p": 0.9,
41+
"stopSequences": ["\n\nHuman:"],
42+
},
43+
additionalModelRequestFields={
44+
"top_k": 250,
45+
"anthropic_version": "bedrock-2024-02-20",
46+
}
2847
)
29-
response_text = response["output"]["message"]["content"][0]["text"]
30-
print(response_text)
31-
32-
except (Exception) as e:
48+
return response
49+
except Exception as e:
3350
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
34-
exit(1)
51+
raise
52+
53+
@with_langtrace_root_span()
54+
def use_converse_stream() -> Iterator[Dict]:
55+
"""Example of streaming completion with vendor attributes."""
56+
client = get_bedrock_client()
57+
model_id = "anthropic.claude-3-haiku-20240307-v1:0"
58+
59+
try:
60+
response = client.converse_stream(
61+
modelId=model_id,
62+
messages=[{
63+
"role": "user",
64+
"content": [{"text": "Tell me a story about a robot learning to dance."}],
65+
}],
66+
inferenceConfig={
67+
"maxTokens": 4096,
68+
"temperature": 0.7,
69+
"top_p": 0.9,
70+
"stopSequences": ["\n\nHuman:"],
71+
},
72+
additionalModelRequestFields={
73+
"top_k": 250,
74+
"anthropic_version": "bedrock-2024-02-20",
75+
}
76+
)
77+
return response
78+
except Exception as e:
79+
print(f"ERROR: Can't invoke streaming for '{model_id}'. Reason: {e}")
80+
raise

0 commit comments

Comments
 (0)