diff --git a/src/huggingface_hub/inference/_mcp/mcp_client.py b/src/huggingface_hub/inference/_mcp/mcp_client.py index 7d7744574e..80436b15dd 100644 --- a/src/huggingface_hub/inference/_mcp/mcp_client.py +++ b/src/huggingface_hub/inference/_mcp/mcp_client.py @@ -310,7 +310,19 @@ async def process_single_turn_with_tools( # Process tool calls one by one for tool_call in final_tool_calls.values(): function_name = tool_call.function.name - function_args = json.loads(tool_call.function.arguments or "{}") + try: + function_args = json.loads(tool_call.function.arguments or "{}") + except json.JSONDecodeError as err: + tool_message = { + "role": "tool", + "tool_call_id": tool_call.id, + "name": function_name, + "content": f"Invalid JSON generated by the model: {err}", + } + tool_message_as_obj = ChatCompletionInputMessage.parse_obj_as_instance(tool_message) + messages.append(tool_message_as_obj) + yield tool_message_as_obj + continue # move to next tool call tool_message = {"role": "tool", "tool_call_id": tool_call.id, "content": "", "name": function_name} @@ -324,11 +336,13 @@ async def process_single_turn_with_tools( # Execute tool call with the appropriate session session = self.sessions.get(function_name) if session is not None: - result = await session.call_tool(function_name, function_args) - tool_message["content"] = format_result(result) + try: + result = await session.call_tool(function_name, function_args) + tool_message["content"] = format_result(result) + except Exception as err: + tool_message["content"] = f"Error: MCP tool call failed with error message: {err}" else: - error_msg = f"Error: No session found for tool: {function_name}" - tool_message["content"] = error_msg + tool_message["content"] = f"Error: No session found for tool: {function_name}" # Yield tool message tool_message_as_obj = ChatCompletionInputMessage.parse_obj_as_instance(tool_message)