@@ -203,93 +203,52 @@ def initalize_messages(self, obs: Any) -> None:
203203)
204204
205205
206- def supports_tool_calling (model_name : str ) -> bool :
207- """
208- Check if the model supports tool calling.
209206
210- Args:
211- model_name (str): The name of the model.
212207
213- Returns:
214- bool: True if the model supports tool calling, False otherwise.
215- """
216- import os
208+ # def get_openrouter_model(model_name: str, **open_router_args) -> OpenRouterModelArgs:
209+ # default_model_args = {
210+ # "max_total_tokens": 200_000,
211+ # "max_input_tokens": 180_000,
212+ # "max_new_tokens": 2_000,
213+ # "temperature": 0.1,
214+ # "vision_support": True,
215+ # }
216+ # merged_args = {**default_model_args, **open_router_args}
217217
218- import openai
218+ # return OpenRouterModelArgs(model_name=model_name, **merged_args)
219219
220- client = openai .Client (
221- api_key = os .getenv ("OPENROUTER_API_KEY" ), base_url = "https://openrouter.ai/api/v1"
222- )
223- try :
224- response = client .chat .completions .create (
225- model = model_name ,
226- messages = [{"role" : "user" , "content" : "Call the test tool" }],
227- tools = [
228- {
229- "type" : "function" ,
230- "function" : {
231- "name" : "dummy_tool" ,
232- "description" : "Just a test tool" ,
233- "parameters" : {
234- "type" : "object" ,
235- "properties" : {},
236- },
237- },
238- }
239- ],
240- tool_choice = "required" ,
241- )
242- response = response .to_dict ()
243- return "tool_calls" in response ["choices" ][0 ]["message" ]
244- except Exception as e :
245- print (f"Model '{ model_name } ' error: { e } " )
246- return False
247-
248-
249- def get_openrouter_model (model_name : str , ** open_router_args ) -> OpenRouterModelArgs :
250- default_model_args = {
251- "max_total_tokens" : 200_000 ,
252- "max_input_tokens" : 180_000 ,
253- "max_new_tokens" : 2_000 ,
254- "temperature" : 0.1 ,
255- "vision_support" : True ,
256- }
257- merged_args = {** default_model_args , ** open_router_args }
258-
259- return OpenRouterModelArgs (model_name = model_name , ** merged_args )
260-
261-
262- def get_openrouter_tool_use_agent (
263- model_name : str ,
264- model_args : dict = {},
265- use_first_obs = True ,
266- tag_screenshot = True ,
267- use_raw_page_output = True ,
268- ) -> ToolUseAgentArgs :
269- # To Do : Check if OpenRouter endpoint specific args are working
270- if not supports_tool_calling (model_name ):
271- raise ValueError (f"Model { model_name } does not support tool calling." )
272220
273- model_args = get_openrouter_model (model_name , ** model_args )
221+ # def get_openrouter_tool_use_agent(
222+ # model_name: str,
223+ # model_args: dict = {},
224+ # use_first_obs=True,
225+ # tag_screenshot=True,
226+ # use_raw_page_output=True,
227+ # ) -> ToolUseAgentArgs:
228+ # # To Do : Check if OpenRouter endpoint specific args are working
229+ # if not supports_tool_calling(model_name):
230+ # raise ValueError(f"Model {model_name} does not support tool calling.")
274231
275- return ToolUseAgentArgs (
276- model_args = model_args ,
277- use_first_obs = use_first_obs ,
278- tag_screenshot = tag_screenshot ,
279- use_raw_page_output = use_raw_page_output ,
280- )
232+ # model_args = get_openrouter_model(model_name, **model_args)
233+
234+ # return ToolUseAgentArgs(
235+ # model_args=model_args,
236+ # use_first_obs=use_first_obs,
237+ # tag_screenshot=tag_screenshot,
238+ # use_raw_page_output=use_raw_page_output,
239+ # )
281240
282241
283- OPENROUTER_MODEL = get_openrouter_tool_use_agent ("google/gemini-2.5-pro-preview" )
242+ # OPENROUTER_MODEL = get_openrouter_tool_use_agent("google/gemini-2.5-pro-preview")
284243
285244
286245AGENT_CONFIG = ToolUseAgentArgs (
287246 model_args = CLAUDE_MODEL_CONFIG ,
288247)
289248
290- MT_TOOL_USE_AGENT = ToolUseAgentArgs (
291- model_args = OPENROUTER_MODEL ,
292- )
249+ # MT_TOOL_USE_AGENT = ToolUseAgentArgs(
250+ # model_args=OPENROUTER_MODEL,
251+ # )
293252CHATAPI_AGENT_CONFIG = ToolUseAgentArgs (
294253 model_args = OpenAIChatModelArgs (
295254 model_name = "gpt-4o-2024-11-20" ,
0 commit comments