@@ -57,7 +57,7 @@ def __init__(
5757 self .is_md_scraper = node_config .get ("is_md_scraper" , False )
5858 self .additional_info = node_config .get ("additional_info" )
5959
60- async def execute (self , state : dict ) -> dict :
60+ def execute (self , state : dict ) -> dict :
6161 """
6262 Executes the GenerateAnswerNode.
6363
@@ -123,7 +123,7 @@ async def execute(self, state: dict) -> dict:
123123 chain = prompt | self .llm_model
124124 if output_parser :
125125 chain = chain | output_parser
126- answer = await chain .ainvoke ({"question" : user_prompt })
126+ answer = chain .invoke ({"question" : user_prompt })
127127
128128 state .update ({self .output [0 ]: answer })
129129 return state
@@ -143,7 +143,7 @@ async def execute(self, state: dict) -> dict:
143143 chains_dict [chain_name ] = chains_dict [chain_name ] | output_parser
144144
145145 async_runner = RunnableParallel (** chains_dict )
146- batch_results = await async_runner .ainvoke ({"question" : user_prompt })
146+ batch_results = async_runner .invoke ({"question" : user_prompt })
147147
148148 merge_prompt = PromptTemplate (
149149 template = template_merge_prompt ,
@@ -154,7 +154,7 @@ async def execute(self, state: dict) -> dict:
154154 merge_chain = merge_prompt | self .llm_model
155155 if output_parser :
156156 merge_chain = merge_chain | output_parser
157- answer = await merge_chain .ainvoke ({"context" : batch_results , "question" : user_prompt })
157+ answer = merge_chain .invoke ({"context" : batch_results , "question" : user_prompt })
158158
159159 state .update ({self .output [0 ]: answer })
160160 return state
0 commit comments