Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions agent-arena/client/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -568,6 +568,18 @@
"https://huggingface.co/MadeAgents/Hammer-7b",
"MadeAgents",
"cc-by-nc-4.0",
],
"empower-dev/llama3-empower-functions-small-v1.1": [
"Empower-Fucntions-Small-v1.1 (FC)",
"https://huggingface.co/empower-dev/llama3-empower-functions-small-v1.1",
"Empower.dev",
"apache-2.0"
],
"empower-dev/llama3-empower-functions-large-v1.1": [
"Empower-Fucntions-Large-v1.1 (FC)",
"https://huggingface.co/empower-dev/llama3-empower-functions-large-v1.1",
"Empower.dev",
"apache-2.0"
]
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,8 @@
"THUDM/glm-4-9b-chat",
"ibm-granite/granite-20b-functioncalling",
"yi-large-fc",
"empower-dev/llama3-empower-functions-small-v1.1",
"empower-dev/llama3-empower-functions-large-v1.1",
]

TEST_FILE_MAPPING = {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from bfcl.model_handler.oss_handler import OSSHandler
from bfcl.model_handler.model_style import ModelStyle
import json
from bfcl.model_handler.utils import (
convert_to_tool,
)
from bfcl.model_handler.constant import (
GORILLA_TO_OPENAPI,
)


class EmpowerHandler(OSSHandler):
def __init__(self, model_name, temperature=0.001, top_p=1, max_tokens=1000) -> None:
super().__init__(model_name, temperature, top_p, max_tokens)
self.model_style = ModelStyle.OSSMODEL

def _format_prompt(prompts, functions, test_category):
formatted_prompt = "<|begin_of_text|>"

for idx, prompt in enumerate(prompts):
if idx == 0:
tools = convert_to_tool(
functions, GORILLA_TO_OPENAPI, ModelStyle.OSSMODEL, test_category
)
prompt['content'] = "In this environment you have access to a set of functions defined in the JSON format you can use to address user's requests, use them if needed.\nFunctions:\n" \
+ json.dumps(tools, indent=2) \
+ "\n\n" \
+ "User Message:\n" \
+ prompt['content']

formatted_prompt += f"<|start_header_id|>{prompt['role']}<|end_header_id|>\n\n{prompt['content']}<|eot_id|>"

formatted_prompt += f"<|start_header_id|>assistant<|end_header_id|>\n\n"

return formatted_prompt

def inference(
self,
test_question,
num_gpus,
gpu_memory_utilization,
format_prompt_func=_format_prompt,
):
return super().inference(
test_question,
num_gpus,
gpu_memory_utilization,
format_prompt_func=format_prompt_func,
include_system_prompt=False,
)

def decode_ast(self, result, language="Python"):
decoded_output = []

# strip the function/conversation tag <f>/<c>
result_stripped = result[3:]
for invoked_function in json.loads(result_stripped):
name = invoked_function["name"]
params = invoked_function["arguments"]
decoded_output.append({name: params})

return decoded_output

def decode_execute(self, result):
execution_list = []

for function_call in self.decode_ast(result):
for key, value in function_call.items():
argument_list = []
for k, v in value.items():
argument_list.append(f'{k}={repr(v)}')
execution_list.append(
f"{key}({','.join(argument_list)})"
)

return execution_list
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from bfcl.model_handler.proprietary_model.cohere import CohereHandler
from bfcl.model_handler.proprietary_model.databricks import DatabricksHandler
from bfcl.model_handler.oss_model.deepseek import DeepseekHandler
from bfcl.model_handler.empower_handler import EmpowerHandler
from bfcl.model_handler.proprietary_model.fireworks import FireworksHandler
from bfcl.model_handler.proprietary_model.functionary import FunctionaryHandler
from bfcl.model_handler.proprietary_model.gemini import GeminiHandler
Expand Down Expand Up @@ -102,5 +103,7 @@
"Salesforce/xLAM-7b-r": SalesforceHandler,
"Salesforce/xLAM-8x7b-r": SalesforceHandler,
"Salesforce/xLAM-8x22b-r": SalesforceHandler,
"MadeAgents/Hammer-7b": HammerHandler
"MadeAgents/Hammer-7b": HammerHandler,
"empower-dev/llama3-empower-functions-small-v1.1": EmpowerHandler,
"empower-dev/llama3-empower-functions-large-v1.1": EmpowerHandler,
}
2 changes: 1 addition & 1 deletion berkeley-function-call-leaderboard/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,4 @@ include = ["bfcl*"]
Repository = "https://github.com/ShishirPatil/gorilla/tree/main/berkeley-function-call-leaderboard"

[project.optional-dependencies]
oss_eval = ["vllm==0.5.0"]
oss_eval = ["vllm==0.5.5"]