Skip to content

Commit f9f5a71

Browse files
committed
Updating to fix the char8_t error.
1 parent f9cd683 commit f9f5a71

File tree

1 file changed

+7
-13
lines changed

1 file changed

+7
-13
lines changed

src/llama-chat.cpp

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,6 @@
66
#include <sstream>
77
#include <algorithm>
88

9-
#if __cplusplus >= 202000L
10-
#define LU8(x) (const char*)(u8##x)
11-
#else
12-
#define LU8(x) u8##x
13-
#endif
14-
159
// trim whitespace from the beginning and end of a string
1610
static std::string trim(const std::string & str) {
1711
size_t start = 0;
@@ -158,12 +152,12 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
158152
} else if (tmpl_contains("[gMASK]sop")) {
159153
// chatglm3-6b
160154
return LLM_CHAT_TEMPLATE_CHATGLM_3;
161-
} else if (tmpl_contains(LU8("<用户>"))) {
155+
} else if (tmpl_contains(("<用户>"))) {
162156
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
163157
return LLM_CHAT_TEMPLATE_MINICPM;
164158
} else if (tmpl_contains("'Assistant: ' + message['content'] + eos_token")) {
165159
return LLM_CHAT_TEMPLATE_DEEPSEEK_2;
166-
} else if (tmpl_contains(LU8("<|Assistant|>")) && tmpl_contains(LU8("<|User|>")) && tmpl_contains(LU8("<|end▁of▁sentence|>"))) {
160+
} else if (tmpl_contains(("<|Assistant|>")) && tmpl_contains(("<|User|>")) && tmpl_contains(("<|end▁of▁sentence|>"))) {
167161
return LLM_CHAT_TEMPLATE_DEEPSEEK_3;
168162
} else if (tmpl_contains("[|system|]") && tmpl_contains("[|assistant|]") && tmpl_contains("[|endofturn|]")) {
169163
// ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
@@ -471,7 +465,7 @@ int32_t llm_chat_apply_template(
471465
for (auto message : chat) {
472466
std::string role(message->role);
473467
if (role == "user") {
474-
ss << LU8("<用户>");
468+
ss << ("<用户>");
475469
ss << trim(message->content);
476470
ss << "<AI>";
477471
} else {
@@ -487,7 +481,7 @@ int32_t llm_chat_apply_template(
487481
} else if (role == "user") {
488482
ss << "User: " << message->content << "\n\n";
489483
} else if (role == "assistant") {
490-
ss << "Assistant: " << message->content << LU8("<|end▁of▁sentence|>");
484+
ss << "Assistant: " << message->content << ("<|end▁of▁sentence|>");
491485
}
492486
}
493487
if (add_ass) {
@@ -500,13 +494,13 @@ int32_t llm_chat_apply_template(
500494
if (role == "system") {
501495
ss << message->content << "\n\n";
502496
} else if (role == "user") {
503-
ss << LU8("<|User|>") << message->content;
497+
ss << ("<|User|>") << message->content;
504498
} else if (role == "assistant") {
505-
ss << LU8("<|Assistant|>") << message->content << LU8("<|end▁of▁sentence|>");
499+
ss << ("<|Assistant|>") << message->content << ("<|end▁of▁sentence|>");
506500
}
507501
}
508502
if (add_ass) {
509-
ss << LU8("<|Assistant|>");
503+
ss << ("<|Assistant|>");
510504
}
511505
} else if (tmpl == LLM_CHAT_TEMPLATE_EXAONE_3) {
512506
// ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb

0 commit comments

Comments
 (0)