Skip to content

Commit 5d18d76

Browse files
author
Olivier Chafik
committed
fix double bos issue (drop bos/eos tokens from jinja template)
1 parent cf83623 commit 5d18d76

File tree

2 files changed

+13
-4
lines changed

2 files changed

+13
-4
lines changed

common/common.cpp

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1902,6 +1902,11 @@ common_chat_templates common_chat_templates_from_model(const struct llama_model
19021902
default_template_src = CHATML_TEMPLATE_SRC;
19031903
}
19041904
}
1905+
std::string token_bos;
1906+
std::string token_eos;
1907+
// TODO: update logic that adds BOS and EOS tokens to the tokenized prompt, in favour of the template.
1908+
#if 0
1909+
auto vocab = llama_model_get_vocab(model);
19051910
const auto get_token = [&](llama_token token, const char * name, const char * jinja_variable_name) {
19061911
if (token == LLAMA_TOKEN_NULL) {
19071912
if (default_template_src.find(jinja_variable_name) != std::string::npos
@@ -1913,8 +1918,9 @@ common_chat_templates common_chat_templates_from_model(const struct llama_model
19131918
return common_token_to_piece(vocab, token, true);
19141919
}
19151920
};
1916-
auto token_bos = get_token(llama_vocab_bos(vocab), "BOS", "bos_token");
1917-
auto token_eos = get_token(llama_vocab_eos(vocab), "EOS", "eos_token");
1921+
token_bos = get_token(llama_vocab_bos(vocab), "BOS", "bos_token");
1922+
token_eos = get_token(llama_vocab_eos(vocab), "EOS", "eos_token");
1923+
#endif
19181924
return {
19191925
has_explicit_template,
19201926
std::make_unique<minja::chat_template>(default_template_src, token_bos, token_eos),

examples/server/tests/unit/test_chat_completion.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,12 @@ def create_server():
1313
@pytest.mark.parametrize(
1414
"model,system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,finish_reason,jinja,chat_template",
1515
[
16+
(None, "Book", "Hey", 8, "But she couldn't", 69, 8, "length", False, None),
17+
(None, "Book", "Hey", 8, "But she couldn't", 69, 8, "length", True, None),
1618
(None, "Book", "What is the best book", 8, "(Suddenly)+|\\{ \" Sarax.", 77, 8, "length", False, None),
17-
(None, "Book", "What is the best book", 8, "(Suddenly)+|\\{ \" Sarax.", 77, 8, "length", True, None),
18-
(None, "Book", "What is the best book", 8, "^ blue", 23, 8, "length", True, "This is not a chat template, it is"),
19+
(None, "Book", "What is the best book", 8, "(Suddenly)+|\\{ \" Sarax.", 77, 8, "length", True, None),
20+
(None, "Book", "What is the best book", 8, "(Suddenly)+|\\{ \" Sarax.", 77, 8, "length", True, 'chatml'),
21+
(None, "Book", "What is the best book", 8, "^ blue", 23, 8, "length", True, "This is not a chat template, it is"),
1922
("codellama70b", "You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, "length", False, None),
2023
("codellama70b", "You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, "length", True, None),
2124
]

0 commit comments

Comments
 (0)