Skip to content

Commit 76b505e

Browse files
o-u-pnickname-yin
andauthored
fix: adapt to breaking llama.cpp changes (#501)
Co-authored-by: Kevin <[email protected]>
1 parent c5cd057 commit 76b505e

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

llama/addon/AddonContext.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -420,7 +420,8 @@ AddonContext::AddonContext(const Napi::CallbackInfo& info) : Napi::ObjectWrap<Ad
420420
}
421421

422422
if (options.Has("flashAttention")) {
423-
context_params.flash_attn = options.Get("flashAttention").As<Napi::Boolean>().Value();
423+
bool flashAttention = options.Get("flashAttention").As<Napi::Boolean>().Value();
424+
context_params.flash_attn_type = flashAttention ? LLAMA_FLASH_ATTN_TYPE_ENABLED : LLAMA_FLASH_ATTN_TYPE_DISABLED;
424425
}
425426

426427
if (options.Has("threads")) {

0 commit comments

Comments
 (0)