Continuing checkpoint after intrupption of a node #5998
continentalGT
started this conversation in
Discussions
Replies: 0 comments
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
-
Hello everyone ,
I need help.
I have created a simple workflow to explain humanin the loop .the workflow is in the ss.
the first node will simply greet the user and then the intrupption which will ask the user to provide the topic on which he wants the poem and then conintuing the state again from where it left.
My issue is that it is not contuing from the check point.it is starting again with the same thread.
can anyone help me.
here is the code
from typing import Annotated, TypedDict
from langgraph.graph.message import add_messages, AnyMessage
from IPython.display import Image, display
from langgraph.graph import StateGraph, START, END
from langchain_core.messages import AIMessage,HumanMessage
from langgraph.checkpoint.memory import MemorySaver
class MessagesState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
def greeting(state: MessagesState):
return {
"messages": [
AIMessage(content="Hello, Good day, I will be helping you in creating a shayari on your favorite topic.Please mention your topic",name='Lalita')
]
}
def shayar(state: MessagesState):
sys_msg='you are an experienced shayar produce a shayari for the topic mentioned by user in just 2-4 lines'
return {"messages":
[
llm.invoke([sys_msg]+state["messages"])
]
}
Build graph
graph = StateGraph(MessagesState)
graph.add_node("greeting", greeting)
graph.add_node("shayar", shayar)
graph.add_edge(START, "greeting")
graph.add_edge("greeting", "shayar")
graph.add_edge("shayar", END)
View before compiling
memory = MemorySaver()
graph = graph.compile(interrupt_before=['shayar'],checkpointer=memory) #adding intrrupt before shayar
display(Image(graph.get_graph().draw_mermaid_png()))
Input
user_hi = input()
initial_input = {"messages": [HumanMessage(content=user_hi)]}
Thread
thread = {"configurable": {"thread_id": "1"}}
Run the graph until the first interruption
for event in graph.stream(initial_input, thread, stream_mode="values"):
event['messages'][-1].pretty_print()
Get user feedback
user_preference = input("Please tell me the topic on which u want shayari : ")
topic = {"messages": [HumanMessage(content=user_preference)]}
for event in graph.stream(topic


, thread, stream_mode="values"):
event['messages'][-1].pretty_print()
Beta Was this translation helpful? Give feedback.
All reactions