@@ -41,7 +41,7 @@ def _create_llm(self, llm_config: dict):
4141 try :
4242 self .model_token = models_tokens ["openai" ][llm_params ["model" ]]
4343 except KeyError :
44- raise ValueError ("Model not supported" )
44+ raise KeyError ("Model not supported" )
4545 return OpenAI (llm_params )
4646
4747 elif "azure" in llm_params ["model" ]:
@@ -50,14 +50,14 @@ def _create_llm(self, llm_config: dict):
5050 try :
5151 self .model_token = models_tokens ["azure" ][llm_params ["model" ]]
5252 except KeyError :
53- raise ValueError ("Model not supported" )
53+ raise KeyError ("Model not supported" )
5454 return AzureOpenAI (llm_params )
5555
5656 elif "gemini" in llm_params ["model" ]:
5757 try :
5858 self .model_token = models_tokens ["gemini" ][llm_params ["model" ]]
5959 except KeyError :
60- raise ValueError ("Model not supported" )
60+ raise KeyError ("Model not supported" )
6161 return Gemini (llm_params )
6262
6363 elif "ollama" in llm_params ["model" ]:
@@ -70,19 +70,27 @@ def _create_llm(self, llm_config: dict):
7070 try :
7171 self .model_token = models_tokens ["ollama" ][llm_params ["model" ]]
7272 except KeyError :
73- raise ValueError ("Model not supported" )
73+ raise KeyError ("Model not supported" )
7474
7575 return Ollama (llm_params )
7676 elif "hugging_face" in llm_params ["model" ]:
7777 try :
7878 self .model_token = models_tokens ["hugging_face" ][llm_params ["model" ]]
7979 except KeyError :
80- raise ValueError ("Model not supported" )
80+ raise KeyError ("Model not supported" )
8181 return HuggingFace (llm_params )
8282 else :
8383 raise ValueError (
8484 "Model provided by the configuration not supported" )
8585
86+ def get_state (self , key = None ) -> dict :
87+ """""
88+ Obtain the current state
89+ """
90+ if key is not None :
91+ return self .final_state [key ]
92+ return self .final_state
93+
8694 def get_execution_info (self ):
8795 """
8896 Returns the execution information of the graph.
0 commit comments