Skip to content

Commit a864590

Browse files
committed
add /apply-template endpoint to server
1 parent 794fe23 commit a864590

File tree

2 files changed

+25
-0
lines changed

2 files changed

+25
-0
lines changed

examples/server/server.cpp

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4124,6 +4124,14 @@ int main(int argc, char ** argv) {
41244124
res_ok(res, root);
41254125
};
41264126

4127+
const auto handle_apply_template = [&ctx_server, &params, &res_ok](const httplib::Request & req, httplib::Response & res) {
4128+
auto body = json::parse(req.body);
4129+
const auto & chat_template = body.contains("tools") && ctx_server.chat_templates.template_tool_use ? *ctx_server.chat_templates.template_tool_use : *ctx_server.chat_templates.template_default;
4130+
json data = oaicompat_completion_params_parse(body, chat_template, params.use_jinja);
4131+
4132+
res_ok(res, data);
4133+
};
4134+
41274135
const auto handle_embeddings = [&handle_embeddings_impl](const httplib::Request & req, httplib::Response & res) {
41284136
handle_embeddings_impl(req, res, OAICOMPAT_TYPE_NONE);
41294137
};
@@ -4300,6 +4308,7 @@ int main(int argc, char ** argv) {
43004308
svr->Post("/v1/reranking", handle_rerank);
43014309
svr->Post("/tokenize", handle_tokenize);
43024310
svr->Post("/detokenize", handle_detokenize);
4311+
svr->Post("/apply-template", handle_apply_template);
43034312
// LoRA adapters hotswap
43044313
svr->Get ("/lora-adapters", handle_lora_adapters_list);
43054314
svr->Post("/lora-adapters", handle_lora_adapters_apply);

examples/server/tests/unit/test_chat_completion.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,22 @@ def test_chat_template():
121121
assert res.body["__verbose"]["prompt"] == "<s> <|start_header_id|>system<|end_header_id|>\n\nBook<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the best book<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
122122

123123

124+
def test_apply_chat_template():
125+
global server
126+
server.chat_template = "command-r"
127+
server.start()
128+
res = server.make_request("POST", "/apply-template", data={
129+
"max_tokens": 8,
130+
"messages": [
131+
{"role": "system", "content": "You are a test."},
132+
{"role": "user", "content":"Hi there"},
133+
]
134+
})
135+
assert res.status_code == 200
136+
assert "prompt" in res.body
137+
assert res.body["prompt"] == "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>You are a test.<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Hi there<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>"
138+
139+
124140
@pytest.mark.parametrize("response_format,n_predicted,re_content", [
125141
({"type": "json_object", "schema": {"const": "42"}}, 6, "\"42\""),
126142
({"type": "json_object", "schema": {"items": [{"type": "integer"}]}}, 10, "[ -3000 ]"),

0 commit comments

Comments
 (0)