-
I was playing around with the The following is my async def call_model(
state: State, config: RunnableConfig
) -> Dict[str, List[AIMessage]]:
configuration = Configuration.from_runnable_config(config)
configurable = config.get('configurable', {})
user_info = configurable.get('user_info', {})
current_time = datetime.now(tz=timezone.utc)
prompt = ChatPromptTemplate.from_messages(
[
("system", configuration.system_prompt),
("placeholder", "{messages}")
]
).partial(
date=current_time.isoformat(),
time=current_time.strftime("%H:%M:%S")
)
model = load_chat_model(configuration.model).bind_tools(TOOLS)
message_value = await prompt.ainvoke(
{
"messages": state.messages,
"user_info": user_info,
},
config,
)
response = cast(AIMessage, await model.ainvoke(message_value, config))
if state.is_last_step and response.tool_calls:
return {
"messages": [
AIMessage(
id=response.id,
content="Sorry, I could not find an answer to your question in the specified number of steps.",
)
]
}
return {"messages": [response]} Now I stream the response of the graph like this: async def stream_events():
async for chunk in chatbot.astream({"messages": [("user", message.content)]}, config, stream_mode="events", version="v2"):
print(chunk)
if (
chunk.event == "events" and
chunk.data["event"] == "on_chat_model_stream" and
len(chunk.data["data"]["chunk"]["content"]) > 0 and
'text' in chunk.data["data"]["chunk"]["content"][0]
):
yield f"data: {chunk.data['data']['chunk']['content'][0]['text']}\n\n"
yield "event: close\ndata: [DONE]\n\n" am I missing some this? |
Beta Was this translation helpful? Give feedback.
Answered by
vbarda
Oct 18, 2024
Replies: 1 comment
-
@cris-m there is actually no |
Beta Was this translation helpful? Give feedback.
0 replies
Answer selected by
vbarda
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
@cris-m there is actually no
stream_mode="events"
in langgraph -- you need to usechatbot.astream_events({"messages": [("user", message.content)]}, config, version="v2")
. hope this helps!