Skip to content

Commit 9db72b0

Browse files
authored
fix: adapt to llama.cpp interface change (#49)
1 parent b3758b4 commit 9db72b0

File tree

1 file changed

+2
-9
lines changed

1 file changed

+2
-9
lines changed

llama/addon.cpp

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -193,16 +193,9 @@ class LLAMAContext : public Napi::ObjectWrap<LLAMAContext> {
193193
Napi::Value Encode(const Napi::CallbackInfo& info) {
194194
std::string text = info[0].As<Napi::String>().Utf8Value();
195195

196-
std::vector<llama_token> tokens(text.size());
197-
int n = llama_tokenize(ctx, text.data(), tokens.data(), text.size(), false);
196+
std::vector<llama_token> tokens = llama_tokenize(ctx, text, false);
198197

199-
if (n < 0) {
200-
Napi::Error::New(info.Env(), "String expected").ThrowAsJavaScriptException();
201-
return info.Env().Undefined();
202-
}
203-
tokens.resize(n);
204-
205-
Napi::Uint32Array result = Napi::Uint32Array::New(info.Env(), n);
198+
Napi::Uint32Array result = Napi::Uint32Array::New(info.Env(), tokens.size());
206199
for (size_t i = 0; i < tokens.size(); ++i) { result[i] = static_cast<uint32_t>(tokens[i]); }
207200

208201
return result;

0 commit comments

Comments
 (0)