11import os
22import boto3
3- from langtrace_python_sdk import langtrace
3+ from typing import Dict , Iterator
44
5- langtrace .init (api_key = os .environ ["LANGTRACE_API_KEY" ])
5+ from opentelemetry import trace
6+ from opentelemetry .trace import TracerProvider
7+ from langtrace_python_sdk import langtrace , with_langtrace_root_span
8+ from langtrace_python_sdk .instrumentation .aws_bedrock import AWSBedrockInstrumentation
69
7- def use_converse ():
8- model_id = "anthropic.claude-3-haiku-20240307-v1:0"
9- client = boto3 .client (
10+ # Initialize tracing
11+ trace .set_tracer_provider (TracerProvider ())
12+ AWSBedrockInstrumentation ().instrument ()
13+ langtrace .init ()
14+
15+ def get_bedrock_client ():
16+ """Create an instrumented AWS Bedrock client."""
17+ return boto3 .client (
1018 "bedrock-runtime" ,
1119 region_name = "us-east-1" ,
1220 aws_access_key_id = os .environ ["AWS_ACCESS_KEY_ID" ],
1321 aws_secret_access_key = os .environ ["AWS_SECRET_ACCESS_KEY" ],
1422 )
15- conversation = [
16- {
17- "role" : "user" ,
18- "content" : [{ "text" : "Write a story about a magic backpack." }],
19- }
20- ]
23+
24+ @ with_langtrace_root_span ()
25+ def use_converse () -> Dict :
26+ """Example of standard completion request with vendor attributes."""
27+ client = get_bedrock_client ()
28+ model_id = "anthropic.claude-3-haiku-20240307-v1:0"
2129
2230 try :
2331 response = client .converse (
2432 modelId = model_id ,
25- messages = conversation ,
26- inferenceConfig = {"maxTokens" :4096 ,"temperature" :0 },
27- additionalModelRequestFields = {"top_k" :250 }
33+ messages = [{
34+ "role" : "user" ,
35+ "content" : [{"text" : "Write a story about a magic backpack." }],
36+ }],
37+ inferenceConfig = {
38+ "maxTokens" : 4096 ,
39+ "temperature" : 0.7 ,
40+ "top_p" : 0.9 ,
41+ "stopSequences" : ["\n \n Human:" ],
42+ },
43+ additionalModelRequestFields = {
44+ "top_k" : 250 ,
45+ "anthropic_version" : "bedrock-2024-02-20" ,
46+ }
2847 )
29- response_text = response ["output" ]["message" ]["content" ][0 ]["text" ]
30- print (response_text )
31-
32- except (Exception ) as e :
48+ return response
49+ except Exception as e :
3350 print (f"ERROR: Can't invoke '{ model_id } '. Reason: { e } " )
34- exit (1 )
51+ raise
52+
53+ @with_langtrace_root_span ()
54+ def use_converse_stream () -> Iterator [Dict ]:
55+ """Example of streaming completion with vendor attributes."""
56+ client = get_bedrock_client ()
57+ model_id = "anthropic.claude-3-haiku-20240307-v1:0"
58+
59+ try :
60+ response = client .converse_stream (
61+ modelId = model_id ,
62+ messages = [{
63+ "role" : "user" ,
64+ "content" : [{"text" : "Tell me a story about a robot learning to dance." }],
65+ }],
66+ inferenceConfig = {
67+ "maxTokens" : 4096 ,
68+ "temperature" : 0.7 ,
69+ "top_p" : 0.9 ,
70+ "stopSequences" : ["\n \n Human:" ],
71+ },
72+ additionalModelRequestFields = {
73+ "top_k" : 250 ,
74+ "anthropic_version" : "bedrock-2024-02-20" ,
75+ }
76+ )
77+ return response
78+ except Exception as e :
79+ print (f"ERROR: Can't invoke streaming for '{ model_id } '. Reason: { e } " )
80+ raise
0 commit comments