Skip to content

Commit 6b012a6

Browse files
authored
fix: adapt to breaking llama.cpp changes (#183)
1 parent d841fff commit 6b012a6

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

llama/addon.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ class LLAMAContext : public Napi::ObjectWrap<LLAMAContext> {
157157
}
158158

159159
if (options.Has("embedding")) {
160-
context_params.embedding = options.Get("embedding").As<Napi::Boolean>().Value();
160+
context_params.embeddings = options.Get("embedding").As<Napi::Boolean>().Value();
161161
}
162162

163163
if (options.Has("threads")) {
@@ -409,7 +409,7 @@ class LLAMAContextEvalWorker : Napi::AsyncWorker, Napi::Promise::Deferred {
409409
llama_sample_tail_free(ctx->ctx, &candidates_p, tfs_z, min_keep);
410410
llama_sample_typical(ctx->ctx, &candidates_p, typical_p, min_keep);
411411
llama_sample_top_p(ctx->ctx, &candidates_p, resolved_top_p, min_keep);
412-
llama_sample_temperature(ctx->ctx, &candidates_p, temperature);
412+
llama_sample_temp(ctx->ctx, &candidates_p, temperature);
413413
new_token_id = llama_sample_token(ctx->ctx, &candidates_p);
414414
}
415415

0 commit comments

Comments
 (0)