Skip to content

Commit c9b0765

Browse files
authored
Rename HandleResponseNode to CallToolsNode (#1020)
1 parent c839ef3 commit c9b0765

File tree

4 files changed

+25
-25
lines changed

4 files changed

+25
-25
lines changed

docs/agents.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ async def main():
133133
kind='request',
134134
)
135135
),
136-
HandleResponseNode(
136+
CallToolsNode(
137137
model_response=ModelResponse(
138138
parts=[TextPart(content='Paris', part_kind='text')],
139139
model_name='gpt-4o',
@@ -194,7 +194,7 @@ async def main():
194194
kind='request',
195195
)
196196
),
197-
HandleResponseNode(
197+
CallToolsNode(
198198
model_response=ModelResponse(
199199
parts=[TextPart(content='Paris', part_kind='text')],
200200
model_name='gpt-4o',
@@ -311,10 +311,10 @@ async def main():
311311
output_messages.append(
312312
f'[Result] The model produced a final result (tool_name={event.tool_name})'
313313
)
314-
elif Agent.is_handle_response_node(node):
314+
elif Agent.is_call_tools_node(node):
315315
# A handle-response node => The model returned some data, potentially calls a tool
316316
output_messages.append(
317-
'=== HandleResponseNode: streaming partial response & tool usage ==='
317+
'=== CallToolsNode: streaming partial response & tool usage ==='
318318
)
319319
async with node.stream(run.ctx) as handle_stream:
320320
async for event in handle_stream:
@@ -343,7 +343,7 @@ if __name__ == '__main__':
343343
'[Request] Part 0 args_delta=ris","forecast_',
344344
'[Request] Part 0 args_delta=date":"2030-01-',
345345
'[Request] Part 0 args_delta=01"}',
346-
'=== HandleResponseNode: streaming partial response & tool usage ===',
346+
'=== CallToolsNode: streaming partial response & tool usage ===',
347347
'[Tools] The LLM calls tool=\'weather_forecast\' with args={"location":"Paris","forecast_date":"2030-01-01"} (tool_call_id=\'0001\')',
348348
"[Tools] Tool call '0001' returned => The forecast in Paris on 2030-01-01 is 24°C and sunny.",
349349
'=== ModelRequestNode: streaming partial request tokens ===',
@@ -352,7 +352,7 @@ if __name__ == '__main__':
352352
"[Request] Part 0 text delta: 'warm and sunny '",
353353
"[Request] Part 0 text delta: 'in Paris on '",
354354
"[Request] Part 0 text delta: 'Tuesday.'",
355-
'=== HandleResponseNode: streaming partial response & tool usage ===',
355+
'=== CallToolsNode: streaming partial response & tool usage ===',
356356
'=== Final Agent Output: It will be warm and sunny in Paris on Tuesday. ===',
357357
]
358358
"""

pydantic_ai_slim/pydantic_ai/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from importlib.metadata import version
22

3-
from .agent import Agent, EndStrategy, HandleResponseNode, ModelRequestNode, UserPromptNode, capture_run_messages
3+
from .agent import Agent, CallToolsNode, EndStrategy, ModelRequestNode, UserPromptNode, capture_run_messages
44
from .exceptions import (
55
AgentRunError,
66
FallbackExceptionGroup,
@@ -18,7 +18,7 @@
1818
# agent
1919
'Agent',
2020
'EndStrategy',
21-
'HandleResponseNode',
21+
'CallToolsNode',
2222
'ModelRequestNode',
2323
'UserPromptNode',
2424
'capture_run_messages',

pydantic_ai_slim/pydantic_ai/_agent_graph.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
'GraphAgentDeps',
3737
'UserPromptNode',
3838
'ModelRequestNode',
39-
'HandleResponseNode',
39+
'CallToolsNode',
4040
'build_run_context',
4141
'capture_run_messages',
4242
)
@@ -243,12 +243,12 @@ class ModelRequestNode(AgentNode[DepsT, NodeRunEndT]):
243243

244244
request: _messages.ModelRequest
245245

246-
_result: HandleResponseNode[DepsT, NodeRunEndT] | None = field(default=None, repr=False)
246+
_result: CallToolsNode[DepsT, NodeRunEndT] | None = field(default=None, repr=False)
247247
_did_stream: bool = field(default=False, repr=False)
248248

249249
async def run(
250250
self, ctx: GraphRunContext[GraphAgentState, GraphAgentDeps[DepsT, NodeRunEndT]]
251-
) -> HandleResponseNode[DepsT, NodeRunEndT]:
251+
) -> CallToolsNode[DepsT, NodeRunEndT]:
252252
if self._result is not None:
253253
return self._result
254254

@@ -307,7 +307,7 @@ async def _stream(
307307

308308
async def _make_request(
309309
self, ctx: GraphRunContext[GraphAgentState, GraphAgentDeps[DepsT, NodeRunEndT]]
310-
) -> HandleResponseNode[DepsT, NodeRunEndT]:
310+
) -> CallToolsNode[DepsT, NodeRunEndT]:
311311
if self._result is not None:
312312
return self._result
313313

@@ -344,7 +344,7 @@ def _finish_handling(
344344
ctx: GraphRunContext[GraphAgentState, GraphAgentDeps[DepsT, NodeRunEndT]],
345345
response: _messages.ModelResponse,
346346
usage: _usage.Usage,
347-
) -> HandleResponseNode[DepsT, NodeRunEndT]:
347+
) -> CallToolsNode[DepsT, NodeRunEndT]:
348348
# Update usage
349349
ctx.state.usage.incr(usage, requests=0)
350350
if ctx.deps.usage_limits:
@@ -354,13 +354,13 @@ def _finish_handling(
354354
ctx.state.message_history.append(response)
355355

356356
# Set the `_result` attribute since we can't use `return` in an async iterator
357-
self._result = HandleResponseNode(response)
357+
self._result = CallToolsNode(response)
358358

359359
return self._result
360360

361361

362362
@dataclasses.dataclass
363-
class HandleResponseNode(AgentNode[DepsT, NodeRunEndT]):
363+
class CallToolsNode(AgentNode[DepsT, NodeRunEndT]):
364364
"""Process a model response, and decide whether to end the run or make a new request."""
365365

366366
model_response: _messages.ModelResponse
@@ -716,7 +716,7 @@ def build_agent_graph(
716716
nodes = (
717717
UserPromptNode[DepsT],
718718
ModelRequestNode[DepsT],
719-
HandleResponseNode[DepsT],
719+
CallToolsNode[DepsT],
720720
)
721721
graph = Graph[GraphAgentState, GraphAgentDeps[DepsT, Any], result.FinalResult[ResultT]](
722722
nodes=nodes,

pydantic_ai_slim/pydantic_ai/agent.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
# Re-exporting like this improves auto-import behavior in PyCharm
4343
capture_run_messages = _agent_graph.capture_run_messages
4444
EndStrategy = _agent_graph.EndStrategy
45-
HandleResponseNode = _agent_graph.HandleResponseNode
45+
CallToolsNode = _agent_graph.CallToolsNode
4646
ModelRequestNode = _agent_graph.ModelRequestNode
4747
UserPromptNode = _agent_graph.UserPromptNode
4848

@@ -52,7 +52,7 @@
5252
'AgentRunResult',
5353
'capture_run_messages',
5454
'EndStrategy',
55-
'HandleResponseNode',
55+
'CallToolsNode',
5656
'ModelRequestNode',
5757
'UserPromptNode',
5858
)
@@ -362,7 +362,7 @@ async def main():
362362
kind='request',
363363
)
364364
),
365-
HandleResponseNode(
365+
CallToolsNode(
366366
model_response=ModelResponse(
367367
parts=[TextPart(content='Paris', part_kind='text')],
368368
model_name='gpt-4o',
@@ -1183,14 +1183,14 @@ def is_model_request_node(
11831183
return isinstance(node, _agent_graph.ModelRequestNode)
11841184

11851185
@staticmethod
1186-
def is_handle_response_node(
1186+
def is_call_tools_node(
11871187
node: _agent_graph.AgentNode[T, S] | End[result.FinalResult[S]],
1188-
) -> TypeGuard[_agent_graph.HandleResponseNode[T, S]]:
1189-
"""Check if the node is a `HandleResponseNode`, narrowing the type if it is.
1188+
) -> TypeGuard[_agent_graph.CallToolsNode[T, S]]:
1189+
"""Check if the node is a `CallToolsNode`, narrowing the type if it is.
11901190
11911191
This method preserves the generic parameters while narrowing the type, unlike a direct call to `isinstance`.
11921192
"""
1193-
return isinstance(node, _agent_graph.HandleResponseNode)
1193+
return isinstance(node, _agent_graph.CallToolsNode)
11941194

11951195
@staticmethod
11961196
def is_user_prompt_node(
@@ -1250,7 +1250,7 @@ async def main():
12501250
kind='request',
12511251
)
12521252
),
1253-
HandleResponseNode(
1253+
CallToolsNode(
12541254
model_response=ModelResponse(
12551255
parts=[TextPart(content='Paris', part_kind='text')],
12561256
model_name='gpt-4o',
@@ -1374,7 +1374,7 @@ async def main():
13741374
kind='request',
13751375
)
13761376
),
1377-
HandleResponseNode(
1377+
CallToolsNode(
13781378
model_response=ModelResponse(
13791379
parts=[TextPart(content='Paris', part_kind='text')],
13801380
model_name='gpt-4o',

0 commit comments

Comments
 (0)