File tree Expand file tree Collapse file tree 2 files changed +25
-2
lines changed
example-apps/chatbot-rag-app Expand file tree Collapse file tree 2 files changed +25
-2
lines changed Original file line number Diff line number Diff line change @@ -127,6 +127,13 @@ dotenv run -- python api/app.py
127127If you set ` OTEL_SDK_DISABLED=false ` in your ` .env ` file, the app will send
128128logs, metrics and traces to an OpenTelemetry compatible endpoint.
129129
130+ This happens automatically, when using docker. If running with python directly,
131+ prefix ` python ` with ` opentelemetry-instrument ` to enable OpenTelemetry.
132+
133+ ``` bash
134+ dotenv run -- opentelemetry-instrument python api/app.py
135+ ```
136+
130137[ env.example] ( env.example ) defaults to use Elastic APM server, started by
131138[ docker-compose-elastic.yml] ( ../../docker ) . If you start your Elastic stack
132139this way, you can access Kibana like this, authenticating with the username
Original file line number Diff line number Diff line change 11import os
22
3- import boto3
43from langchain_aws import ChatBedrock
54from langchain_cohere import ChatCohere
65from langchain_google_vertexai import ChatVertexAI
@@ -41,8 +40,25 @@ def init_bedrock(temperature):
4140 from langtrace_python_sdk .instrumentation import AWSBedrockInstrumentation
4241
4342 AWSBedrockInstrumentation ().instrument ()
43+
44+ # TODO: Remove after https://github.com/Scale3-Labs/langtrace-python-sdk/issues/458
45+ from langtrace_python_sdk .instrumentation .aws_bedrock .patch import patch_aws_bedrock
46+ from opentelemetry .trace import get_tracer
47+ import importlib .metadata
48+ from wrapt import wrap_function_wrapper as _W
49+
50+ tracer = get_tracer (
51+ __name__ ,
52+ )
53+ version = importlib .metadata .version ("boto3" )
54+
55+ _W (
56+ module = "boto3.session" ,
57+ name = "Session.client" ,
58+ wrapper = patch_aws_bedrock (tracer , version ),
59+ )
60+
4461 return ChatBedrock (
45- client = boto3 .client ("bedrock-runtime" ),
4662 model_id = os .getenv ("CHAT_MODEL" ),
4763 streaming = True ,
4864 model_kwargs = {"temperature" : temperature },
You can’t perform that action at this time.
0 commit comments