Skip to content

Commit 54a669e

Browse files
author
ochafik
committed
Guard against missing eos/bos tokens (null token otherwise throws in llama_vocab::impl::token_get_attr)
1 parent 099f983 commit 54a669e

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

common/common.cpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1824,8 +1824,9 @@ std::string common_chat_format_example(const llama_chat_template & tmpl, bool us
18241824
llama_chat_templates common_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override)
18251825
{
18261826
auto vocab = llama_model_get_vocab(model);
1827-
auto token_bos = common_token_to_piece(vocab, llama_vocab_bos(vocab), true);
1828-
auto token_eos = common_token_to_piece(vocab, llama_vocab_eos(vocab), true);
1827+
// TODO: consider detecting if the template needs bos / eos tokens and warn / error when missing.
1828+
auto token_bos = llama_vocab_bos(vocab) == LLAMA_TOKEN_NULL ? "" : common_token_to_piece(vocab, llama_vocab_bos(vocab), true);
1829+
auto token_eos = llama_vocab_eos(vocab) == LLAMA_TOKEN_NULL ? "" : common_token_to_piece(vocab, llama_vocab_eos(vocab), true);
18291830
std::string default_template_src = chat_template_override;
18301831
std::string template_tool_use_src = chat_template_override;
18311832
bool has_explicit_template = !chat_template_override.empty();

0 commit comments

Comments
 (0)