Skip to content

Commit 153e852

Browse files
ochafikngxsonggerganov
authored
Apply suggestions from code review
Co-authored-by: Xuan Son Nguyen <[email protected]> Co-authored-by: Georgi Gerganov <[email protected]>
1 parent cc50356 commit 153e852

File tree

3 files changed

+6
-5
lines changed

3 files changed

+6
-5
lines changed

common/common.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1821,11 +1821,11 @@ std::string common_chat_format_example(const llama_chat_template & tmpl, bool us
18211821
return common_chat_apply_template(tmpl, msgs, true, use_jinja);
18221822
}
18231823

1824-
llama_chat_templates llama_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override)
1824+
llama_chat_templates common_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override)
18251825
{
18261826
auto vocab = llama_model_get_vocab(model);
1827-
auto bos_token = common_token_to_piece(vocab, llama_vocab_bos(vocab), true);
1828-
auto eos_token = common_token_to_piece(vocab, llama_vocab_eos(vocab), true);
1827+
auto token_bos = common_token_to_piece(vocab, llama_vocab_bos(vocab), true);
1828+
auto token_eos = common_token_to_piece(vocab, llama_vocab_eos(vocab), true);
18291829
std::string default_template_src = chat_template_override;
18301830
std::string tool_use_template_src = chat_template_override;
18311831
bool has_explicit_template = !chat_template_override.empty();

common/common.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -607,8 +607,8 @@ typedef minja::chat_template llama_chat_template;
607607

608608
struct llama_chat_templates {
609609
bool has_explicit_template; // Model had builtin template or template overridde was specified.
610-
std::unique_ptr<llama_chat_template> default_template; // always set (defaults to chatml)
611-
std::unique_ptr<llama_chat_template> tool_use_template;
610+
std::unique_ptr<llama_chat_template> template_default; // always set (defaults to chatml)
611+
std::unique_ptr<llama_chat_template> template_tool_use;
612612
};
613613

614614
// CPP wrapper for llama_chat_apply_template

include/llama.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -510,6 +510,7 @@ extern "C" {
510510
LLAMA_API uint64_t llama_model_size(const struct llama_model * model);
511511

512512
// Get the default chat template. Returns nullptr if not available
513+
// If name is NULL, returns the default chat template
513514
LLAMA_API const char * llama_model_chat_template(const struct llama_model * model, const char * name);
514515

515516
// Returns the total number of parameters in the model

0 commit comments

Comments
 (0)