Skip to content

Commit 3f54ec9

Browse files
authored
Merge pull request #16 from ks6088ts-labs/feature/issue-11_tool-call
add tool call agent
2 parents c5a3ae9 + ceded02 commit 3f54ec9

File tree

3 files changed

+115
-21
lines changed

3 files changed

+115
-21
lines changed

docs/index.md

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,18 @@ uv run python -m template_langgraph.tasks.run_kabuto_helpdesk_agent "KABUTOの
2727

2828
# BasicWorkflowAgent
2929
uv run python -m template_langgraph.tasks.draw_basic_workflow_agent_mermaid_png "data/basic_workflow_agent.png"
30-
uv run python -m template_langgraph.tasks.run_basic_workflow_agent "KABUTOの起動時に、画面全体が紫色に点滅し、システムがフリーズします。"
31-
uv run python -m template_langgraph.tasks.run_basic_workflow_agent "私の名前はフグ田 サザエ。東京都世田谷区桜新町あさひが丘3丁目に住んでいる 24 歳の主婦です。夫のノリスケと子供のタラちゃんがいます。"
30+
uv run python -m template_langgraph.tasks.run_basic_workflow_agent
31+
# 私の名前はフグ田 サザエ。東京都世田谷区桜新町あさひが丘3丁目に住んでいる 24 歳の主婦です。夫のノリスケと子供のタラちゃんがいます
32+
# KABUTOの起動時に、画面全体が紫色に点滅し、システムがフリーズします。KABUTO のマニュアルから、関連する情報を取得したり過去のシステムのトラブルシュート事例が蓄積されたデータベースから、関連する情報を取得して質問に答えてください
33+
# 天狗のいたずら という現象について KABUTO のマニュアルから、関連する情報を取得したり過去のシステムのトラブルシュート事例が蓄積されたデータベースから、関連する情報を取得して質問に答えてください
3234
```
3335

3436
## References
3537

38+
### LangGraph
39+
40+
- [Build a custom workflow](https://langchain-ai.github.io/langgraph/concepts/why-langgraph/)
41+
3642
### Sample Codes
3743

3844
- [「現場で活用するためのAIエージェント実践入門」リポジトリ](https://github.com/masamasa59/genai-agent-advanced-book)

template_langgraph/agents/basic_workflow_agent/agent.py

Lines changed: 84 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,41 @@
1-
from langchain_core.messages import AIMessage
1+
import json
2+
3+
from langchain_core.messages import AIMessage, ToolMessage
24
from langgraph.graph import END, START, StateGraph
35

46
from template_langgraph.agents.basic_workflow_agent.models import AgentInput, AgentOutput, AgentState, Profile
57
from template_langgraph.llms.azure_openais import AzureOpenAiWrapper
68
from template_langgraph.loggers import get_logger
9+
from template_langgraph.tools.elasticsearch_tool import search_elasticsearch
10+
from template_langgraph.tools.qdrants import search_qdrant
711

812
logger = get_logger(__name__)
913

1014

15+
class BasicToolNode:
16+
"""A node that runs the tools requested in the last AIMessage."""
17+
18+
def __init__(self, tools: list) -> None:
19+
self.tools_by_name = {tool.name: tool for tool in tools}
20+
21+
def __call__(self, inputs: dict):
22+
if messages := inputs.get("messages", []):
23+
message = messages[-1]
24+
else:
25+
raise ValueError("No message found in input")
26+
outputs = []
27+
for tool_call in message.tool_calls:
28+
tool_result = self.tools_by_name[tool_call["name"]].invoke(tool_call["args"])
29+
outputs.append(
30+
ToolMessage(
31+
content=json.dumps(tool_result.__str__(), ensure_ascii=False),
32+
name=tool_call["name"],
33+
tool_call_id=tool_call["id"],
34+
)
35+
)
36+
return {"messages": outputs}
37+
38+
1139
class BasicWorkflowAgent:
1240
def __init__(self):
1341
self.llm = AzureOpenAiWrapper().chat_model
@@ -21,13 +49,34 @@ def create_graph(self):
2149
workflow.add_node("initialize", self.initialize)
2250
workflow.add_node("do_something", self.do_something)
2351
workflow.add_node("extract_profile", self.extract_profile)
52+
workflow.add_node("chat_with_tools", self.chat_with_tools)
53+
workflow.add_node(
54+
"tools",
55+
BasicToolNode(
56+
tools=[
57+
search_qdrant,
58+
search_elasticsearch,
59+
]
60+
),
61+
)
2462
workflow.add_node("finalize", self.finalize)
2563

2664
# Create edges
2765
workflow.add_edge(START, "initialize")
2866
workflow.add_edge("initialize", "do_something")
2967
workflow.add_edge("do_something", "extract_profile")
30-
workflow.add_edge("extract_profile", "finalize")
68+
workflow.add_edge("extract_profile", "chat_with_tools")
69+
workflow.add_conditional_edges(
70+
"chat_with_tools",
71+
self.route_tools,
72+
# The following dictionary lets you tell the graph to interpret the condition's outputs as a specific node
73+
# It defaults to the identity function, but if you
74+
# want to use a node named something else apart from "tools",
75+
# You can update the value of the dictionary to something else
76+
# e.g., "tools": "my_tools"
77+
{"tools": "tools", END: "finalize"},
78+
)
79+
workflow.add_edge("tools", "chat_with_tools")
3180
workflow.add_edge("finalize", END)
3281

3382
# Compile the graph
@@ -66,6 +115,39 @@ def extract_profile(self, state: AgentState) -> AgentState:
66115
state["profile"] = profile
67116
return state
68117

118+
def chat_with_tools(self, state: AgentState) -> AgentState:
119+
"""Chat with tools using the state."""
120+
logger.info(f"Chatting with tools using state: {state}")
121+
llm_with_tools = self.llm.bind_tools(
122+
tools=[
123+
search_qdrant,
124+
search_elasticsearch,
125+
],
126+
)
127+
return {
128+
"messages": [
129+
llm_with_tools.invoke(state["messages"]),
130+
]
131+
}
132+
133+
def route_tools(
134+
self,
135+
state: AgentState,
136+
):
137+
"""
138+
Use in the conditional_edge to route to the ToolNode if the last message
139+
has tool calls. Otherwise, route to the end.
140+
"""
141+
if isinstance(state, list):
142+
ai_message = state[-1]
143+
elif messages := state.get("messages", []):
144+
ai_message = messages[-1]
145+
else:
146+
raise ValueError(f"No messages found in input state to tool_edge: {state}")
147+
if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0:
148+
return "tools"
149+
return END
150+
69151
def finalize(self, state: AgentState) -> AgentState:
70152
"""Finalize the agent's work and prepare the output."""
71153
logger.info(f"Finalizing BasicWorkflowAgent with state: {state}")
Lines changed: 23 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,33 @@
11
import logging
2-
import sys
32

4-
from template_langgraph.agents.basic_workflow_agent.agent import AgentInput, BasicWorkflowAgent
3+
from template_langgraph.agents.basic_workflow_agent.agent import AgentState
4+
from template_langgraph.agents.basic_workflow_agent.agent import graph as basic_workflow_agent_graph
55
from template_langgraph.loggers import get_logger
66

77
logger = get_logger(__name__)
88
logger.setLevel(logging.INFO)
99

10-
if __name__ == "__main__":
11-
question = "「鬼灯」を実行すると、KABUTOが急に停止します。原因と対策を教えてください。"
12-
if len(sys.argv) > 1:
13-
# sys.argv[1] が最初の引数
14-
question = sys.argv[1]
1510

16-
# Agentのインスタンス化
17-
agent = BasicWorkflowAgent()
11+
def stream_graph_updates(
12+
state: AgentState,
13+
) -> dict:
14+
for event in basic_workflow_agent_graph.stream(input=state):
15+
logger.info("-" * 20)
16+
logger.info(f"Event: {event}")
17+
return event
1818

19-
# AgentInputの作成
20-
agent_input = AgentInput(
21-
request=question,
22-
)
2319

24-
# エージェントの実行
25-
logger.info(f"Running BasicWorkflowAgent with input: {agent_input.model_dump_json(indent=2)}")
26-
agent_output = agent.run_agent(input=agent_input)
27-
logger.info(f"Agent output: {agent_output.model_dump_json(indent=2)}")
20+
if __name__ == "__main__":
21+
user_input = input("User: ")
22+
state = AgentState(
23+
messages=[
24+
{
25+
"role": "user",
26+
"content": user_input,
27+
}
28+
],
29+
profile=None,
30+
)
31+
last_event = stream_graph_updates(state)
32+
for value in last_event.values():
33+
logger.info(f"Final state: {value['messages'][-1].content}") # noqa: E501

0 commit comments

Comments
 (0)