@@ -718,7 +718,8 @@ void TokenizerImpl::InitializeChatParameters(const char* template_str,
718718}
719719
720720// ApplyChatTemplate method to choose the template logic based on chat_template
721- OrtxStatus TokenizerImpl::ApplyChatTemplate (const TokenizerImpl::MessageList& message_list, std::string& output,
721+ OrtxStatus TokenizerImpl::ApplyChatTemplate (const TokenizerImpl::MessageList& message_list,
722+ const char * tools, std::string& output,
722723 bool add_generation_prompt) const {
723724 // Note: The official chat template from this model's config file may not be supported.
724725 // However, we do not throw an error until checking model_to_template_map as the user
@@ -734,6 +735,20 @@ OrtxStatus TokenizerImpl::ApplyChatTemplate(const TokenizerImpl::MessageList& me
734735
735736 messages = message_list;
736737
738+ if (tools && *tools) {
739+ tool_calls = std::string (tools);
740+ if (!messages.empty ()) {
741+ if (messages[0 ].find (" tools" ) != messages[0 ].end ()) {
742+ messages[0 ][" tools" ] = tool_calls;
743+ tools_in_user_message = true ;
744+ }
745+ if (messages[0 ].find (" tool_calls " ) != messages[0 ].end ()) {
746+ messages[0 ][" tool_calls " ] = tool_calls;
747+ tools_in_user_message = true ;
748+ }
749+ }
750+ }
751+
737752 // Apply the corresponding chat template if it is supported
738753 if (chat_template == PHI4_CHAT_TEMPLATE) {
739754 return Phi4ChatTemplate (output, add_generation_prompt);
@@ -762,9 +777,9 @@ OrtxStatus TokenizerImpl::ApplyChatTemplate(const TokenizerImpl::MessageList& me
762777 return {};
763778}
764779
765- OrtxStatus TokenizerImpl::ApplyChatTemplate (const char * template_str, const char * message, std::string& output ,
766- std::vector<extTokenId_t>& ids_vec, bool add_generation_prompt ,
767- bool tokenize) const {
780+ OrtxStatus TokenizerImpl::ApplyChatTemplate (const char * template_str, const char * message, const char * tools ,
781+ std::string& output, std:: vector<extTokenId_t>& ids_vec,
782+ bool add_generation_prompt, bool tokenize) const {
768783 OrtxStatus status;
769784 std::string input_str = minja::normalize_newlines (message);
770785 auto activated_str = tok_config_->chat_template_ .c_str ();
@@ -783,7 +798,7 @@ OrtxStatus TokenizerImpl::ApplyChatTemplate(const char* template_str, const char
783798 return {kOrtxErrorInvalidArgument , " Invalid JSON format in chat message." };
784799 }
785800
786- status = ApplyChatTemplate (message_list, output, add_generation_prompt);
801+ status = ApplyChatTemplate (message_list, tools, output, add_generation_prompt);
787802 } else {
788803 using json = nlohmann::ordered_json;
789804 std::string text;
0 commit comments