@@ -80,30 +80,20 @@ def execute(self, state: dict) -> dict:
8080
8181 self .logger .info (f"--- Executing { self .node_name } Node ---" )
8282
83- # Interpret input keys based on the provided input expression
8483 input_keys = self .get_input_keys (state )
85- # Fetching data from the state based on the input keys
84+
8685 input_data = [state [key ] for key in input_keys ]
8786 user_prompt = input_data [0 ]
8887 doc = input_data [1 ]
8988
90- # Initialize the output parser
9189 if self .node_config .get ("schema" , None ) is not None :
92- output_parser = JsonOutputParser (pydantic_object = self .node_config ["schema" ])
93-
94- # Use built-in structured output for providers that allow it
95- optional_modules = {"langchain_anthropic" , "langchain_fireworks" , "langchain_groq" , "langchain_google_vertexai" }
96- if all (key in modules for key in optional_modules ):
97- if isinstance (self .llm_model , (ChatOpenAI , ChatMistralAI , ChatAnthropic , ChatFireworks , ChatGroq , ChatVertexAI )):
98- self .llm_model = self .llm_model .with_structured_output (
99- schema = self .node_config ["schema" ],
100- method = "json_schema" )
101- else :
102- if isinstance (self .llm_model , (ChatOpenAI , ChatMistralAI )):
103- self .llm_model = self .llm_model .with_structured_output (
104- schema = self .node_config ["schema" ],
105- method = "json_schema" )
10690
91+ if isinstance (self .llm_model , (ChatOpenAI , ChatMistralAI )):
92+ self .llm_model = self .llm_model .with_structured_output (
93+ schema = self .node_config ["schema" ],
94+ method = "json_schema" )
95+ else :
96+ output_parser = JsonOutputParser (pydantic_object = self .node_config ["schema" ])
10797
10898 else :
10999 output_parser = JsonOutputParser ()
0 commit comments