Skip to content

Commit 09739df

Browse files
Lpzhan931CISC
andauthored
Apply suggestions from code review
change the chat-template check condition and some formatting issue Co-authored-by: Sigbjørn Skjæret <[email protected]>
1 parent 5becaad commit 09739df

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/llama-chat.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
214214
return LLM_CHAT_TEMPLATE_SEED_OSS;
215215
} else if (tmpl_contains("'Assistant: ' + message['content'] + '<|separator|>")) {
216216
return LLM_CHAT_TEMPLATE_GROK_2;
217-
} else if (tmpl_contains(LU8("[unused9]系统:[unused10]")) && tmpl_contains("message['content'] + '[unused10]'")) {
217+
} else if (tmpl_contains(LU8("[unused9]系统:[unused10]"))) {
218218
return LLM_CHAT_TEMPLATE_PANGU_EMBED;
219219
}
220220
return LLM_CHAT_TEMPLATE_UNKNOWN;

src/llama-model.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6281,7 +6281,7 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
62816281
output = create_tensor(tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, TENSOR_NOT_REQUIRED);
62826282

62836283
// if output is NULL, init from the input tok embed
6284-
if (output == NULL){
6284+
if (output == NULL) {
62856285
output = create_tensor(tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, TENSOR_DUPLICATED);
62866286
}
62876287

0 commit comments

Comments
 (0)