Skip to content

Commit 12e48c1

Browse files
committed
lockfiles updated
1 parent e8759c5 commit 12e48c1

File tree

10 files changed

+2452
-470
lines changed

10 files changed

+2452
-470
lines changed
Lines changed: 18 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,32 @@
1-
This sample contains part of the LangGraph chatbot demo taken from
2-
https://python.langchain.com/docs/tutorials/chatbot, running with OTel instrumentation. It
3-
sends traces and logs to the OTel collector which sends them to GCP. Docker compose wraps
4-
everything to make it easy to run.
1+
This sample contains a Streamlit + LangGraph chatbot demo. It sends traces and logs to the GCP
2+
with the OTLP exporter and opentelemetry-exporter-gcp-logging exporters.
3+
4+
The `run_streamlit.py` script allows you to easily run the sample with auto instrumentation
5+
enabled and sending telemetry to GCP. It just sets some environment variables and runs with
6+
`opentelemetry-instrument.
57

68
## Running the example
79

8-
I recommend running in Cloud Shell, it's super simple. You will see GenAI spans in trace
9-
explorer right away. Make sure the Vertex and Trace APIs are enabled in the project.
10+
First, make sure you have `uv` installed: https://docs.astral.sh/uv/getting-started/installation/.
11+
12+
Optionally, set a project with `export GOOGLE_CLOUD_PROJECT=...`. The app respects ADC.
1013

11-
### Cloud Shell or GCE
14+
### Without cloning
1215

1316
```sh
14-
git clone --branch=vertex-langgraph https://github.com/aabmass/opentelemetry-python-contrib.git
15-
cd opentelemetry-python-contrib/instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo
16-
docker compose up --build --abort-on-container-exit
17+
uv run --upgrade https://raw.githubusercontent.com/aabmass/opentelemetry-python-contrib/refs/heads/vertex-langgraph/inst rumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo/run_streamlit.py
1718
```
1819

19-
### Locally with Application Default Credentials
20+
### With cloned repo
2021

2122
```sh
2223
git clone --branch=vertex-langgraph https://github.com/aabmass/opentelemetry-python-contrib.git
2324
cd opentelemetry-python-contrib/instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo
25+
uv run run_streamlit.py
26+
```
27+
28+
### Without auto instrumentation
2429

25-
# Export the credentials to `GOOGLE_APPLICATION_CREDENTIALS` environment variable so it is
26-
# available inside the docker containers
27-
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.config/gcloud/application_default_credentials.json
28-
# Lets collector read mounted config
29-
export USERID="$(id -u)"
30-
# Specify the project ID
31-
export GOOGLE_CLOUD_PROJECT=<your project id>
32-
docker compose up --build --abort-on-container-exit
30+
```sh
31+
uv run streamlit run src/langgraph_chatbot_demo/langchain_history.py
3332
```

instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo/docker-compose.yaml

Lines changed: 0 additions & 46 deletions
This file was deleted.

instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo/otel-collector-config.yaml

Lines changed: 0 additions & 42 deletions
This file was deleted.

instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo/pyproject.toml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ dependencies = [
1111
"langchain-google-vertexai>=2.0.7",
1212
"langgraph>0.2.27",
1313
"opentelemetry-distro>=0.50b0",
14-
"opentelemetry-exporter-gcp-logging",
14+
"opentelemetry-exporter-gcp-logging>=1.9.0a0",
1515
"opentelemetry-exporter-gcp-trace>=1.8.0",
1616
"opentelemetry-exporter-otlp-proto-grpc>=1.29.0",
1717
"opentelemetry-instrumentation-aiohttp-client>=0.50b0",
@@ -24,8 +24,7 @@ dependencies = [
2424
]
2525

2626
[tool.uv.sources]
27-
opentelemetry-instrumentation-vertexai = { git = "https://github.com/aabmass/opentelemetry-python-contrib.git", subdirectory = "instrumentation-genai/opentelemetry-instrumentation-vertexai", branch = "vertex-langgraph" }
28-
opentelemetry-exporter-gcp-logging = { git = "https://github.com/DylanRussell/opentelemetry-operations-python.git", subdirectory = "opentelemetry-exporter-gcp-logging", branch = "logging_exporter" }
27+
opentelemetry-instrumentation-vertexai = { path = "../../" }
2928

3029
[dependency-groups]
3130
dev = ["ipython>=8.18.1", "ruff>=0.9.2"]
Lines changed: 5 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,55 +1,14 @@
11
# /// script
22
# requires-python = ">=3.13"
33
# dependencies = [
4-
# "langgraph-chatbot-demo",
4+
# "langgraph-chatbot-demo",
55
# ]
66
#
77
# [tool.uv.sources]
8-
# langgraph-chatbot-demo = { path = "." }
8+
# langgraph-chatbot-demo = { git = "https://github.com/aabmass/opentelemetry-python-contrib.git", subdirectory = "instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo", branch = "vertex-langgraph" }
9+
#
910
# ///
1011

11-
import os
12-
import importlib.util
13-
import subprocess
14-
15-
import google.auth
16-
import google.auth.transport
17-
import google.auth.transport.requests
18-
19-
creds, project_id = google.auth.default()
20-
creds.refresh(google.auth.transport.requests.Request())
21-
22-
23-
def setenv_default(k: str, v: str) -> None:
24-
if k not in os.environ:
25-
os.environ[k] = v
26-
27-
28-
setenv_default(
29-
"OTEL_EXPORTER_OTLP_ENDPOINT", "https://telemetry.googleapis.com:443"
30-
)
31-
setenv_default("OTEL_SERVICE_NAME", "langgraph-chatbot-demo")
32-
setenv_default("OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED", "true")
33-
setenv_default("OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT", "true")
34-
setenv_default("OTEL_LOGS_EXPORTER", "gcp_logging")
35-
setenv_default("OTEL_RESOURCE_ATTRIBUTES", f"gcp.project_id={project_id}")
36-
setenv_default(
37-
"OTEL_EXPORTER_OTLP_HEADERS",
38-
f"authorization=Bearer {creds.token},x-goog-user-project={project_id}",
39-
)
40-
41-
langchain_app_spec = importlib.util.find_spec(
42-
"langgraph_chatbot_demo.langchain_history"
43-
)
44-
if not (langchain_app_spec and langchain_app_spec.origin):
45-
raise Exception("Could not find langchain_history.py")
12+
from langgraph_chatbot_demo.run_streamlit import run_streamlit
4613

47-
subprocess.run(
48-
[
49-
"opentelemetry-instrument",
50-
"streamlit",
51-
"run",
52-
langchain_app_spec.origin,
53-
],
54-
check=True,
55-
)
14+
run_streamlit()

0 commit comments

Comments
 (0)