3
3
Works with a chat model with tool calling support.
4
4
"""
5
5
6
- from datetime import datetime , timezone
6
+ from datetime import UTC , datetime
7
7
from typing import Dict , List , Literal , cast
8
8
9
9
from langchain_core .messages import AIMessage
10
- from langchain_core .runnables import RunnableConfig
11
10
from langgraph .graph import StateGraph
12
11
from langgraph .prebuilt import ToolNode
13
12
19
18
# Define the function that calls the model
20
19
21
20
22
- async def call_model (
23
- state : State , config : RunnableConfig
24
- ) -> Dict [str , List [AIMessage ]]:
21
+ async def call_model (state : State ) -> Dict [str , List [AIMessage ]]:
25
22
"""Call the LLM powering our "agent".
26
23
27
24
This function prepares the prompt, initializes the model, and processes the response.
@@ -33,21 +30,21 @@ async def call_model(
33
30
Returns:
34
31
dict: A dictionary containing the model's response message.
35
32
"""
36
- configuration = Configuration .from_runnable_config ( config )
33
+ configuration = Configuration .from_context ( )
37
34
38
35
# Initialize the model with tool binding. Change the model or add more tools here.
39
36
model = load_chat_model (configuration .model ).bind_tools (TOOLS )
40
37
41
38
# Format the system prompt. Customize this to change the agent's behavior.
42
39
system_message = configuration .system_prompt .format (
43
- system_time = datetime .now (tz = timezone . utc ).isoformat ()
40
+ system_time = datetime .now (tz = UTC ).isoformat ()
44
41
)
45
42
46
43
# Get the model's response
47
44
response = cast (
48
45
AIMessage ,
49
46
await model .ainvoke (
50
- [{"role" : "system" , "content" : system_message }, * state .messages ], config
47
+ [{"role" : "system" , "content" : system_message }, * state .messages ]
51
48
),
52
49
)
53
50
@@ -115,9 +112,4 @@ def route_model_output(state: State) -> Literal["__end__", "tools"]:
115
112
builder .add_edge ("tools" , "call_model" )
116
113
117
114
# Compile the builder into an executable graph
118
- # You can customize this by adding interrupt points for state updates
119
- graph = builder .compile (
120
- interrupt_before = [], # Add node names here to update state before they're called
121
- interrupt_after = [], # Add node names here to update state after they're called
122
- )
123
- graph .name = "ReAct Agent" # This customizes the name in LangSmith
115
+ graph = builder .compile (name = "ReAct Agent" )
0 commit comments