|
| 1 | +diff --git a/berkeley-function-call-leaderboard/bfcl_eval/constants/model_config.py b/berkeley-function-call-leaderboard/bfcl_eval/constants/model_config.py |
| 2 | +index db41f84..9200637 100644 |
| 3 | +--- a/berkeley-function-call-leaderboard/bfcl_eval/constants/model_config.py |
| 4 | ++++ b/berkeley-function-call-leaderboard/bfcl_eval/constants/model_config.py |
| 5 | +@@ -863,7 +863,7 @@ api_inference_model_map = { |
| 6 | + input_price=None, |
| 7 | + output_price=None, |
| 8 | + is_fc_model=True, |
| 9 | +- underscore_to_dot=True, |
| 10 | ++ underscore_to_dot=True, |
| 11 | + ), |
| 12 | + "qwen3-0.6b": ModelConfig( |
| 13 | + model_name="qwen3-0.6b", |
| 14 | +@@ -1930,6 +1930,78 @@ third_party_inference_model_map = { |
| 15 | + is_fc_model=False, |
| 16 | + underscore_to_dot=False, |
| 17 | + ), |
| 18 | ++ "openvino-qwen3-8b-int8-FC": ModelConfig( |
| 19 | ++ model_name="openvino-qwen3-8b-int8-FC", |
| 20 | ++ display_name="openvino-qwen3-8b-int8-FC", |
| 21 | ++ url="https://huggingface.co/Qwen/Qwen3-8B", |
| 22 | ++ org="OpenAI", |
| 23 | ++ license="apache-2.0", |
| 24 | ++ model_handler=OpenAIHandler, |
| 25 | ++ input_price=None, |
| 26 | ++ output_price=None, |
| 27 | ++ is_fc_model=True, |
| 28 | ++ underscore_to_dot=True, |
| 29 | ++ ), |
| 30 | ++ "openvino-qwen3-8b-int4-FC": ModelConfig( |
| 31 | ++ model_name="ovms-qwen3-8b-int4-FC", |
| 32 | ++ display_name="ovms-qwen3-8b-int4-FC", |
| 33 | ++ url="https://huggingface.co/Qwen/Qwen3-8B", |
| 34 | ++ org="OpenAI", |
| 35 | ++ license="apache-2.0", |
| 36 | ++ model_handler=OpenAIHandler, |
| 37 | ++ input_price=None, |
| 38 | ++ output_price=None, |
| 39 | ++ is_fc_model=True, |
| 40 | ++ underscore_to_dot=True, |
| 41 | ++ ), |
| 42 | ++ "openvino-qwen3-4b-int8-FC": ModelConfig( |
| 43 | ++ model_name="openvino-qwen3-4b-int8-FC", |
| 44 | ++ display_name="openvino-qwen3-4b-int8-FC", |
| 45 | ++ url="https://huggingface.co/Qwen/Qwen3-4B", |
| 46 | ++ org="OpenAI", |
| 47 | ++ license="apache-2.0", |
| 48 | ++ model_handler=OpenAIHandler, |
| 49 | ++ input_price=None, |
| 50 | ++ output_price=None, |
| 51 | ++ is_fc_model=True, |
| 52 | ++ underscore_to_dot=True, |
| 53 | ++ ), |
| 54 | ++ "openvino-qwen3-4b-int4-FC": ModelConfig( |
| 55 | ++ model_name="openvino-qwen3-4b-int4-FC", |
| 56 | ++ display_name="openvino-qwen3-4b-int4-FC", |
| 57 | ++ url="https://huggingface.co/Qwen/Qwen3-4B", |
| 58 | ++ org="OpenAI", |
| 59 | ++ license="apache-2.0", |
| 60 | ++ model_handler=OpenAIHandler, |
| 61 | ++ input_price=None, |
| 62 | ++ output_price=None, |
| 63 | ++ is_fc_model=True, |
| 64 | ++ underscore_to_dot=True, |
| 65 | ++ ), |
| 66 | ++ "openvino-phi-4-mini-instruct-int8-FC": ModelConfig( |
| 67 | ++ model_name="openvino-phi-4-mini-instruct-int8-FC", |
| 68 | ++ display_name="openvino-phi-4-mini-instruct-int8-FC", |
| 69 | ++ url="https://huggingface.co/microsoft/phi4-mini-instruct", |
| 70 | ++ org="OpenAI", |
| 71 | ++ license="apache-2.0", |
| 72 | ++ model_handler=OpenAIHandler, |
| 73 | ++ input_price=None, |
| 74 | ++ output_price=None, |
| 75 | ++ is_fc_model=True, |
| 76 | ++ underscore_to_dot=True, |
| 77 | ++ ), |
| 78 | ++ "openvino-phi-4-mini-instruct-int4-FC": ModelConfig( |
| 79 | ++ model_name="openvino-phi-4-mini-instruct-int4-FC", |
| 80 | ++ display_name="openvino-phi-4-mini-instruct-int4-FC", |
| 81 | ++ url="https://huggingface.co/microsoft/phi4-mini-instruct", |
| 82 | ++ org="OpenAI", |
| 83 | ++ license="apache-2.0", |
| 84 | ++ model_handler=OpenAIHandler, |
| 85 | ++ input_price=None, |
| 86 | ++ output_price=None, |
| 87 | ++ is_fc_model=True, |
| 88 | ++ underscore_to_dot=True, |
| 89 | ++ ), |
| 90 | + } |
| 91 | + |
| 92 | + |
| 93 | +diff --git a/berkeley-function-call-leaderboard/bfcl_eval/model_handler/api_inference/openai.py b/berkeley-function-call-leaderboard/bfcl_eval/model_handler/api_inference/openai.py |
| 94 | +index 656efc2..a1345a1 100644 |
| 95 | +--- a/berkeley-function-call-leaderboard/bfcl_eval/model_handler/api_inference/openai.py |
| 96 | ++++ b/berkeley-function-call-leaderboard/bfcl_eval/model_handler/api_inference/openai.py |
| 97 | +@@ -22,7 +22,7 @@ class OpenAIHandler(BaseHandler): |
| 98 | + def __init__(self, model_name, temperature) -> None: |
| 99 | + super().__init__(model_name, temperature) |
| 100 | + self.model_style = ModelStyle.OpenAI |
| 101 | +- self.client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
| 102 | ++ self.client = OpenAI(api_key=os.getenv("OPENAI_API_KEY","unused"), base_url=os.getenv("OPENAI_BASE_URL","http://localhost:8000")) |
| 103 | + |
| 104 | + def decode_ast(self, result, language="Python"): |
| 105 | + if "FC" in self.model_name or self.is_fc_model: |
0 commit comments