We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7bd0440 commit 86ff8e3Copy full SHA for 86ff8e3
tests/test-chat-template.cpp
@@ -134,10 +134,11 @@ int main(void) {
134
int32_t res;
135
136
// list all supported templates
137
- std::vector<const char *> supported_tmpl(1024);
138
- res = llama_chat_builtin_templates(supported_tmpl.data(), supported_tmpl.size());
+ std::vector<const char *> supported_tmpl;
+ res = llama_chat_builtin_templates(nullptr, 0);
139
assert(res > 0);
140
supported_tmpl.resize(res);
141
+ res = llama_chat_builtin_templates(supported_tmpl.data(), supported_tmpl.size());
142
printf("Built-in chat templates:\n");
143
for (auto tmpl : supported_tmpl) {
144
printf(" %s\n", tmpl);
0 commit comments