We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
llama.cpp
1 parent c5cd057 commit 76b505eCopy full SHA for 76b505e
llama/addon/AddonContext.cpp
@@ -420,7 +420,8 @@ AddonContext::AddonContext(const Napi::CallbackInfo& info) : Napi::ObjectWrap<Ad
420
}
421
422
if (options.Has("flashAttention")) {
423
- context_params.flash_attn = options.Get("flashAttention").As<Napi::Boolean>().Value();
+ bool flashAttention = options.Get("flashAttention").As<Napi::Boolean>().Value();
424
+ context_params.flash_attn_type = flashAttention ? LLAMA_FLASH_ATTN_TYPE_ENABLED : LLAMA_FLASH_ATTN_TYPE_DISABLED;
425
426
427
if (options.Has("threads")) {
0 commit comments