Replies: 4 comments 1 reply
-
would you mind providing the code directly instead of giving the image? |
Beta Was this translation helpful? Give feedback.
0 replies
-
Hi
Thank you for your kind answer.
I was trying still hard but, can't get the answers
…On Fri, 25 Jul 2025 at 07:06, Tik1993 ***@***.***> wrote:
would you mind providing the code directly instead of giving the image?
—
Reply to this email directly, view it on GitHub
<#5645 (comment)>,
or unsubscribe
<https://github.com/notifications/unsubscribe-auth/BUJUNQKNHBGA2NAH2IW5KKT3KFKHTAVCNFSM6AAAAACCIX3ZBKVHI2DSMVQWIX3LMV43URDJONRXK43TNFXW4Q3PNVWWK3TUHMYTGOBYGI4DAOI>
.
You are receiving this because you authored the thread.Message ID:
***@***.***
com>
|
Beta Was this translation helpful? Give feedback.
1 reply
-
oh okay
Can you read it now?
%pip install -qU langgraph langsmith
%pip install -qU langchain-tavily
import os
import getpass
def _set_env(var: str):
if not os.environ.get(var):
os.environ[var] = getpass.getpass(f"{var}: ")
_set_env("TAVILY_API_KEY")
from langchain_tavily import TavilySearch
from langgraph.checkpoint.memory import InMemorySaver
memory = InMemorySaver()
tool = TavilySearch(max_result=2)
tools = [tool]
tool.invoke("What's a 'node' in LangGraph?")
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
class State(TypedDict):
messages: Annotated[list, add_messages]
graph_builder = StateGraph(State)
import os
from langchain.chat_models import init_chat_model
os.environ["GOOGLE_API_KEY"] = ""
llm = init_chat_model("google_genai:gemini-2.0-flash")
llm_with_tools = llm.bind_tools(tools)
def chatbot(state: State):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
graph_builder.add_node("chatbot", chatbot)
import json
from langchain_core.messages import ToolMessage
class BasicToolNode:
def __init__(self, tools: list) -> None:
self.tools_by_name = {tool.name: tool for tool in tools}
def __call__(self, inputs: dict):
if messages := inputs.get("messaeges", []):
message = messages[-1]
else:
raise ValueError("No message found in input")
outputs = []
for tool_call in message.tool_calls:
tool_result = self.tools_by_name[tool_call["name"]].invoke(
tool_call["args"]
)
outputs.append(
ToolMessage(
content=json.dumps(tool_result),
name=tool_call["name"],
tool_call_id = tool_call["id"],
)
)
return {"messages":outputs}
tool_node = BasicToolNode(tools=[tool])
graph_builder.add_node("tools", tool_node)
def route_tools(
state: State,
):
if messages := state.get("messages", []):
ai_message = messages[-1]
else:
raise ValueError(f"No messages found in input state to tool_edge: {
state}")
if hasattr(ai_message, "tool_calls") and ai_message.tool_calls:
return "tools"
return END
graph_builder.add_conditional_edges(
"chatbot",
route_tools,
{"tools": "tools", END:END},
)
graph_builder.add_edge("tools", "chatbot")
graph_builder.add_edge(START, "chatbot")
graph = graph_builder.compile()
from IPython.display import Image, display
try:
display(Image(graph.get_graph().draw_mermaid_png()))
except Exception:
pass
def stream_graph_updates(user_input: str):
for event in graph.stream({"messages":[{"role":"user", "content"
:user_input}]}):
for value in event.values():
print("Assistant:", value["messages"][-1].content)
while True:
try:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye")
break
stream_graph_updates(user_input)
except:
user_input = "What do you know about LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
break
…On Fri, 25 Jul 2025 at 07:19, Tik1993 ***@***.***> wrote:
I cannot read the code from the image, so it would be better if you can
upload the code directly to here
—
Reply to this email directly, view it on GitHub
<#5645 (reply in thread)>,
or unsubscribe
<https://github.com/notifications/unsubscribe-auth/BUJUNQNIEDYJRTNH5NAY7ET3KFLWFAVCNFSM6AAAAACCIX3ZBKVHI2DSMVQWIX3LMV43URDJONRXK43TNFXW4Q3PNVWWK3TUHMYTGOBYGI4DKNA>
.
You are receiving this because you authored the thread.Message ID:
***@***.***
com>
|
Beta Was this translation helpful? Give feedback.
0 replies
-
%pip install -qU langgraph langsmith
%pip install -qU langchain-tavily
import os
import getpass
def _set_env(var: str):
if not os.environ.get(var):
os.environ[var] = getpass.getpass(f"{var}: ")
_set_env("TAVILY_API_KEY")
from langchain_tavily import TavilySearch
from langgraph.checkpoint.memory import InMemorySaver
memory = InMemorySaver()
tool = TavilySearch(max_result=2)
tools = [tool]
tool.invoke("What's a 'node' in LangGraph?")
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
class State(TypedDict):
messages: Annotated[list, add_messages]
graph_builder = StateGraph(State)
import os
from langchain.chat_models import init_chat_model
os.environ["GOOGLE_API_KEY"] = ""
llm = init_chat_model("google_genai:gemini-2.0-flash")
llm_with_tools = llm.bind_tools(tools)
def chatbot(state: State):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
graph_builder.add_node("chatbot", chatbot)
import json
from langchain_core.messages import ToolMessage
class BasicToolNode:
def __init__(self, tools: list) -> None:
self.tools_by_name = {tool.name: tool for tool in tools}
def __call__(self, inputs: dict):
if messages := inputs.get("messaeges", []):
message = messages[-1]
else:
raise ValueError("No message found in input")
outputs = []
for tool_call in message.tool_calls:
tool_result = self.tools_by_name[tool_call["name"]].invoke(
tool_call["args"]
)
outputs.append(
ToolMessage(
content=json.dumps(tool_result),
name=tool_call["name"],
tool_call_id = tool_call["id"],
)
)
return {"messages":outputs}
tool_node = BasicToolNode(tools=[tool])
graph_builder.add_node("tools", tool_node)
def route_tools(
state: State,
):
if messages := state.get("messages", []):
ai_message = messages[-1]
else:
raise ValueError(f"No messages found in input state to tool_edge:
{state}")
if hasattr(ai_message, "tool_calls") and ai_message.tool_calls:
return "tools"
return END
graph_builder.add_conditional_edges(
"chatbot",
route_tools,
{"tools": "tools", END:END},
)
graph_builder.add_edge("tools", "chatbot")
graph_builder.add_edge(START, "chatbot")
graph = graph_builder.compile()
from IPython.display import Image, display
try:
display(Image(graph.get_graph().draw_mermaid_png()))
except Exception:
pass
def stream_graph_updates(user_input: str):
for event in graph.stream({"messages":[{"role":"user",
"content":user_input}]}):
for value in event.values():
print("Assistant:", value["messages"][-1].content)
while True:
try:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye")
break
stream_graph_updates(user_input)
except:
user_input = "What do you know about LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
break
…On Fri, 25 Jul 2025 at 07:23, jason hwang ***@***.***> wrote:
oh okay
Can you read it now?
%pip install -qU langgraph langsmith
%pip install -qU langchain-tavily
import os
import getpass
def _set_env(var: str):
if not os.environ.get(var):
os.environ[var] = getpass.getpass(f"{var}: ")
_set_env("TAVILY_API_KEY")
from langchain_tavily import TavilySearch
from langgraph.checkpoint.memory import InMemorySaver
memory = InMemorySaver()
tool = TavilySearch(max_result=2)
tools = [tool]
tool.invoke("What's a 'node' in LangGraph?")
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
class State(TypedDict):
messages: Annotated[list, add_messages]
graph_builder = StateGraph(State)
import os
from langchain.chat_models import init_chat_model
os.environ["GOOGLE_API_KEY"] = ""
llm = init_chat_model("google_genai:gemini-2.0-flash")
llm_with_tools = llm.bind_tools(tools)
def chatbot(state: State):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
graph_builder.add_node("chatbot", chatbot)
import json
from langchain_core.messages import ToolMessage
class BasicToolNode:
def __init__(self, tools: list) -> None:
self.tools_by_name = {tool.name: tool for tool in tools}
def __call__(self, inputs: dict):
if messages := inputs.get("messaeges", []):
message = messages[-1]
else:
raise ValueError("No message found in input")
outputs = []
for tool_call in message.tool_calls:
tool_result = self.tools_by_name[tool_call["name"]].invoke(
tool_call["args"]
)
outputs.append(
ToolMessage(
content=json.dumps(tool_result),
name=tool_call["name"],
tool_call_id = tool_call["id"],
)
)
return {"messages":outputs}
tool_node = BasicToolNode(tools=[tool])
graph_builder.add_node("tools", tool_node)
def route_tools(
state: State,
):
if messages := state.get("messages", []):
ai_message = messages[-1]
else:
raise ValueError(f"No messages found in input state to tool_edge:
{state}")
if hasattr(ai_message, "tool_calls") and ai_message.tool_calls:
return "tools"
return END
graph_builder.add_conditional_edges(
"chatbot",
route_tools,
{"tools": "tools", END:END},
)
graph_builder.add_edge("tools", "chatbot")
graph_builder.add_edge(START, "chatbot")
graph = graph_builder.compile()
from IPython.display import Image, display
try:
display(Image(graph.get_graph().draw_mermaid_png()))
except Exception:
pass
def stream_graph_updates(user_input: str):
for event in graph.stream({"messages":[{"role":"user", "content"
:user_input}]}):
for value in event.values():
print("Assistant:", value["messages"][-1].content)
while True:
try:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye")
break
stream_graph_updates(user_input)
except:
user_input = "What do you know about LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
break
On Fri, 25 Jul 2025 at 07:19, Tik1993 ***@***.***> wrote:
> I cannot read the code from the image, so it would be better if you can
> upload the code directly to here
>
> —
> Reply to this email directly, view it on GitHub
> <#5645 (reply in thread)>,
> or unsubscribe
> <https://github.com/notifications/unsubscribe-auth/BUJUNQNIEDYJRTNH5NAY7ET3KFLWFAVCNFSM6AAAAACCIX3ZBKVHI2DSMVQWIX3LMV43URDJONRXK43TNFXW4Q3PNVWWK3TUHMYTGOBYGI4DKNA>
> .
> You are receiving this because you authored the thread.Message ID:
> ***@***.***
> com>
>
|
Beta Was this translation helpful? Give feedback.
0 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
-
I get confused a lot when adding nodes into langgraph. I still don't know what the problem is.
Can you guys help me?
Beta Was this translation helpful? Give feedback.
All reactions