Skip to content

Commit aee127c

Browse files
committed
With streamlit
1 parent 0366d8e commit aee127c

File tree

5 files changed

+2207
-159
lines changed

5 files changed

+2207
-159
lines changed

instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo/docker-compose.yaml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,12 @@ services:
1111
ENV PATH="/app/.venv/bin:$PATH"
1212
COPY . /app
1313
ENTRYPOINT []
14-
CMD ["opentelemetry-instrument", "python", "chatbot.py"]
14+
# CMD ["opentelemetry-instrument", "python", "chatbot.py"]
15+
CMD ["opentelemetry-instrument", "streamlit", "run", "--server.port=8501", "langchain_history.py"]
1516
volumes:
1617
- ${GOOGLE_APPLICATION_CREDENTIALS:-/dev/null}:${GOOGLE_APPLICATION_CREDENTIALS:-/dev/null}:ro
18+
ports:
19+
- 8501:8501
1720
environment:
1821
- OTEL_EXPORTER_OTLP_ENDPOINT=http://otelcol:4317
1922
- OTEL_SERVICE_NAME=langgraph-chatbot-demo
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
"""Adapted from https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/basic_memory.py"""
2+
3+
from os import environ
4+
5+
from opentelemetry import trace
6+
import streamlit as st
7+
from langchain_community.chat_message_histories import (
8+
StreamlitChatMessageHistory,
9+
)
10+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
11+
from langchain_core.runnables.history import RunnableWithMessageHistory
12+
from langchain_google_vertexai import ChatVertexAI
13+
14+
tracer = trace.get_tracer(__name__)
15+
16+
st.set_page_config(page_title="StreamlitChatMessageHistory", page_icon="📖")
17+
st.title("📖 StreamlitChatMessageHistory")
18+
19+
20+
"""
21+
A basic example of using StreamlitChatMessageHistory to help LLMChain remember messages in a conversation.
22+
The messages are stored in Session State across re-runs automatically. You can view the contents of Session State
23+
in the expander below. View the
24+
[source code for this app](https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/basic_memory.py).
25+
"""
26+
27+
# Set up memory
28+
msgs = StreamlitChatMessageHistory(key="langchain_messages")
29+
if len(msgs.messages) == 0:
30+
msgs.add_ai_message("How can I help you?")
31+
32+
view_messages = st.expander("View the message contents in session state")
33+
34+
# Set up the LangChain, passing in Message History
35+
36+
prompt = ChatPromptTemplate.from_messages(
37+
[
38+
(
39+
"system",
40+
"You are an AI chatbot having a conversation with a human.",
41+
),
42+
MessagesPlaceholder(variable_name="history"),
43+
("human", "{question}"),
44+
]
45+
)
46+
47+
llm = ChatVertexAI(
48+
model="gemini-1.5-flash",
49+
project=environ.get("GOOGLE_CLOUD_PROJECT", None),
50+
)
51+
chain = prompt | llm
52+
chain_with_history = RunnableWithMessageHistory(
53+
chain,
54+
lambda session_id: msgs,
55+
input_messages_key="question",
56+
history_messages_key="history",
57+
)
58+
59+
# Render current messages from StreamlitChatMessageHistory
60+
for msg in msgs.messages:
61+
st.chat_message(msg.type).write(msg.content)
62+
63+
# If user inputs a new prompt, generate and draw a new response
64+
if prompt := st.chat_input():
65+
st.chat_message("human").write(prompt)
66+
# Note: new messages are saved to history automatically by Langchain during run
67+
config = {"configurable": {"session_id": "any"}}
68+
with tracer.start_as_current_span("chain invoke"):
69+
response = chain_with_history.invoke({"question": prompt}, config)
70+
st.chat_message("ai").write(response.content)
71+
72+
# Draw the messages at the end, so newly generated ones show up immediately
73+
with view_messages:
74+
"""
75+
Message History initialized with:
76+
```python
77+
msgs = StreamlitChatMessageHistory(key="langchain_messages")
78+
```
79+
80+
Contents of `st.session_state.langchain_messages`:
81+
"""
82+
view_messages.json(st.session_state.langchain_messages)

instrumentation-genai/opentelemetry-instrumentation-vertexai/examples/langgraph-chatbot-demo/pyproject.toml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,30 @@ description = "Add your description here"
55
readme = "README.md"
66
requires-python = ">=3.9"
77
dependencies = [
8+
"duckduckgo-search>=7.3.0",
9+
"langchain-community>=0.3.16",
810
"langchain-core>=0.3.31",
911
"langchain-google-vertexai>=2.0.7",
1012
"langgraph>0.2.27",
1113
"opentelemetry-distro>=0.50b0",
14+
"opentelemetry-exporter-gcp-logging",
15+
"opentelemetry-exporter-gcp-trace>=1.8.0",
1216
"opentelemetry-exporter-otlp-proto-grpc>=1.29.0",
17+
"opentelemetry-instrumentation-aiohttp-client>=0.50b0",
18+
"opentelemetry-instrumentation-httpx>=0.50b0",
19+
"opentelemetry-instrumentation-requests>=0.50b0",
20+
"opentelemetry-instrumentation-urllib>=0.50b0",
21+
"opentelemetry-instrumentation-urllib3>=0.50b0",
1322
"opentelemetry-instrumentation-vertexai",
23+
"streamlit>=1.41.1",
1424
]
1525

1626
[tool.uv.sources]
1727
opentelemetry-instrumentation-vertexai = { git = "https://github.com/aabmass/opentelemetry-python-contrib.git", subdirectory = "instrumentation-genai/opentelemetry-instrumentation-vertexai", branch = "vertex-langgraph" }
28+
opentelemetry-exporter-gcp-logging = { git = "https://github.com/DylanRussell/opentelemetry-operations-python.git", subdirectory = "opentelemetry-exporter-gcp-logging", branch = "logging_exporter" }
1829

1930
[dependency-groups]
2031
dev = [
32+
"ipython>=8.18.1",
2133
"ruff>=0.9.2",
2234
]
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import os
2+
import subprocess
3+
4+
import google.auth
5+
import google.auth.transport
6+
import google.auth.transport.requests
7+
8+
creds, project_id = google.auth.default()
9+
creds.refresh(google.auth.transport.requests.Request())
10+
11+
12+
def setenv_default(k: str, v: str) -> None:
13+
if k not in os.environ:
14+
os.environ[k] = v
15+
16+
17+
setenv_default(
18+
"OTEL_EXPORTER_OTLP_ENDPOINT", "https://telemetry.googleapis.com:443"
19+
)
20+
setenv_default("OTEL_SERVICE_NAME", "langgraph-chatbot-demo")
21+
setenv_default("OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED", "true")
22+
setenv_default("OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT", "true")
23+
setenv_default("OTEL_LOGS_EXPORTER", "gcp_logging")
24+
setenv_default("OTEL_RESOURCE_ATTRIBUTES", f"gcp.project_id={project_id}")
25+
setenv_default(
26+
"OTEL_EXPORTER_OTLP_HEADERS",
27+
f"authorization=Bearer {creds.token},x-goog-user-project={project_id}",
28+
)
29+
30+
print(os.environ)
31+
subprocess.run(
32+
[
33+
"opentelemetry-instrument",
34+
"streamlit",
35+
"run",
36+
"langchain_history.py",
37+
],
38+
check=True,
39+
)

0 commit comments

Comments
 (0)