Skip to content

Commit d3b313f

Browse files
committed
fix tool's bug
1 parent 5a4cf81 commit d3b313f

File tree

1 file changed

+10
-1
lines changed

1 file changed

+10
-1
lines changed

gpt_server/model_backend/lmdeploy_backend.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,15 @@ def is_stop(output: str, stop_str: str):
4040
return output, False
4141

4242

43+
def is_messages_with_tool(messages: list):
44+
flag = False
45+
for msg in messages:
46+
if "content" not in msg:
47+
flag = True
48+
break
49+
return flag
50+
51+
4352
class LMDeployBackend(ModelBackend):
4453
def __init__(self, model_path) -> None:
4554
backend = backend_map[os.getenv("backend")]
@@ -115,7 +124,7 @@ async def stream_chat(self, params: Dict[str, Any]) -> AsyncGenerator:
115124
response_format=params["response_format"],
116125
)
117126
logger.info(f"request_id {int(request_id)}")
118-
if params.get("tools", None):
127+
if params.get("tools", None) or is_messages_with_tool(messages=messages):
119128
messages = prompt or messages # 解决lmdeploy 的提示模板不支持 tools
120129
if self.messages_type_select:
121130
messages = prompt or messages

0 commit comments

Comments
 (0)