1
1
from langgraph .graph import END , StateGraph
2
+ from langgraph .types import interrupt
2
3
3
- from template_langgraph .agents .chat_with_tools_agent .models import AgentState
4
+ from template_langgraph .agents .task_decomposer_agent .models import AgentState , TaskList
4
5
from template_langgraph .llms .azure_openais import AzureOpenAiWrapper
5
6
from template_langgraph .loggers import get_logger
6
7
@@ -18,22 +19,58 @@ def create_graph(self):
18
19
19
20
# Create nodes
20
21
workflow .add_node ("chat" , self .chat )
22
+ workflow .add_node ("human_feedback" , self .human_feedback )
21
23
22
24
# Create edges
23
25
workflow .set_entry_point ("chat" )
24
- workflow .add_edge ("chat" , END )
25
-
26
- # Compile the graph
26
+ workflow .add_edge ("chat" , "human_feedback" )
27
+ workflow .add_conditional_edges (
28
+ source = "human_feedback" ,
29
+ path = self .route_human_feedback ,
30
+ path_map = {
31
+ "loopback" : "chat" ,
32
+ "end" : END ,
33
+ },
34
+ )
27
35
return workflow .compile ()
28
36
29
37
def chat (self , state : AgentState ) -> AgentState :
30
38
"""Chat with tools using the state."""
31
39
logger .info (f"Chatting with tools using state: { state } " )
32
- return {
33
- "messages" : [
34
- self .llm .invoke (state ["messages" ]),
35
- ]
36
- }
40
+
41
+ task_list = self .llm .with_structured_output (TaskList ).invoke (
42
+ input = state ["messages" ],
43
+ )
44
+ state ["task_list" ] = task_list
45
+ logger .info (f"Decomposed tasks: { task_list } " )
46
+ return state
47
+
48
+ def human_feedback (self , state : AgentState ) -> AgentState :
49
+ """Handle human feedback."""
50
+ logger .info (f"Handling human feedback with state: { state } " )
51
+ feedback = interrupt ("Type your feedback. If you want to end the conversation, type 'end'." )
52
+ state ["messages" ].append (
53
+ {
54
+ "content" : feedback ,
55
+ "role" : "user" ,
56
+ }
57
+ )
58
+ return state
59
+
60
+ def route_human_feedback (
61
+ self ,
62
+ state : AgentState ,
63
+ ):
64
+ """
65
+ Use in the conditional_edge to route to the HumanFeedbackNode if the last message
66
+ has human feedback. Otherwise, route to the end.
67
+ """
68
+ human_feedback = state ["messages" ][- 1 ].content .strip ().lower ()
69
+ if human_feedback == "end" :
70
+ logger .info ("Ending the conversation as per user request." )
71
+ return "end"
72
+ logger .info ("Looping back to chat for further processing." )
73
+ return "loopback"
37
74
38
75
def draw_mermaid_png (self ) -> bytes :
39
76
"""Draw the graph in Mermaid format."""
0 commit comments