77#include  < algorithm> 
88
99#if  __cplusplus >= 202000L
10-     #define  LU8 (x ) (const  char *)(u8 ##x)
11- #else 
12-     #define  LU8 (x ) u8 ##x
10+     #define  (x) (const char*)(x)
1311#endif 
1412
1513//  trim whitespace from the beginning and end of a string
@@ -158,12 +156,12 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
158156    } else  if  (tmpl_contains (" [gMASK]sop"  )) {
159157        //  chatglm3-6b
160158        return  LLM_CHAT_TEMPLATE_CHATGLM_3;
161-     } else  if  (tmpl_contains (LU8 (" <用户>"  ))) {
159+     } else  if  (tmpl_contains ((" <用户>"  ))) {
162160        //  MiniCPM-3B-OpenHermes-2.5-v2-GGUF
163161        return  LLM_CHAT_TEMPLATE_MINICPM;
164162    } else  if  (tmpl_contains (" 'Assistant: ' + message['content'] + eos_token"  )) {
165163        return  LLM_CHAT_TEMPLATE_DEEPSEEK_2;
166-     } else  if  (tmpl_contains (LU8 (" <|Assistant|>"  )) && tmpl_contains (LU8 (" <|User|>"  )) && tmpl_contains (LU8 (" <|end▁of▁sentence|>"  ))) {
164+     } else  if  (tmpl_contains ((" <|Assistant|>"  )) && tmpl_contains ((" <|User|>"  )) && tmpl_contains ((" <|end▁of▁sentence|>"  ))) {
167165        return  LLM_CHAT_TEMPLATE_DEEPSEEK_3;
168166    } else  if  (tmpl_contains (" [|system|]"  ) && tmpl_contains (" [|assistant|]"  ) && tmpl_contains (" [|endofturn|]"  )) {
169167        //  ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
@@ -471,7 +469,7 @@ int32_t llm_chat_apply_template(
471469        for  (auto  message : chat) {
472470            std::string role (message->role );
473471            if  (role == " user"  ) {
474-                 ss << LU8 (" <用户>"  );
472+                 ss << (" <用户>"  );
475473                ss << trim (message->content );
476474                ss << " <AI>"  ;
477475            } else  {
@@ -487,7 +485,7 @@ int32_t llm_chat_apply_template(
487485            } else  if  (role == " user"  ) {
488486                ss << " User: "   << message->content  << " \n\n "  ;
489487            } else  if  (role == " assistant"  ) {
490-                 ss << " Assistant: "   << message->content  << LU8 (" <|end▁of▁sentence|>"  );
488+                 ss << " Assistant: "   << message->content  << (" <|end▁of▁sentence|>"  );
491489            }
492490        }
493491        if  (add_ass) {
@@ -500,13 +498,13 @@ int32_t llm_chat_apply_template(
500498            if  (role == " system"  ) {
501499                ss << message->content  << " \n\n "  ;
502500            } else  if  (role == " user"  ) {
503-                 ss << LU8 (" <|User|>"  ) << message->content ;
501+                 ss << (" <|User|>"  ) << message->content ;
504502            } else  if  (role == " assistant"  ) {
505-                 ss << LU8 (" <|Assistant|>"  ) << message->content  << LU8 (" <|end▁of▁sentence|>"  );
503+                 ss << (" <|Assistant|>"  ) << message->content  << (" <|end▁of▁sentence|>"  );
506504            }
507505        }
508506        if  (add_ass) {
509-             ss << LU8 (" <|Assistant|>"  );
507+             ss << (" <|Assistant|>"  );
510508        }
511509    } else  if  (tmpl == LLM_CHAT_TEMPLATE_EXAONE_3) {
512510        //  ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
0 commit comments