Skip to content

Commit 68eef2e

Browse files
committed
fix: adapt to breaking llama.cpp changes
1 parent 3b36095 commit 68eef2e

File tree

4 files changed

+13
-1
lines changed

4 files changed

+13
-1
lines changed

llama/addon/addon.cpp

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,12 @@ class AddonBackendUnloadWorker : public Napi::AsyncWorker {
151151
}
152152
};
153153

154+
Napi::Value addonLoadBackends(const Napi::CallbackInfo& info) {
155+
ggml_backend_load_all();
156+
157+
return info.Env().Undefined();
158+
}
159+
154160
Napi::Value addonInit(const Napi::CallbackInfo& info) {
155161
if (backendInitialized) {
156162
Napi::Promise::Deferred deferred = Napi::Promise::Deferred::New(info.Env());
@@ -205,6 +211,7 @@ Napi::Object registerCallback(Napi::Env env, Napi::Object exports) {
205211
Napi::PropertyDescriptor::Function("getGpuDeviceInfo", getGpuDeviceInfo),
206212
Napi::PropertyDescriptor::Function("getGpuType", getGpuType),
207213
Napi::PropertyDescriptor::Function("getSwapInfo", getSwapInfo),
214+
Napi::PropertyDescriptor::Function("loadBackends", addonLoadBackends),
208215
Napi::PropertyDescriptor::Function("init", addonInit),
209216
Napi::PropertyDescriptor::Function("dispose", addonDispose),
210217
});
@@ -215,7 +222,6 @@ Napi::Object registerCallback(Napi::Env env, Napi::Object exports) {
215222
AddonContext::init(exports);
216223
AddonSampler::init(exports);
217224

218-
ggml_backend_load_all();
219225
llama_log_set(addonLlamaCppLogCallback, nullptr);
220226

221227
exports.AddFinalizer(addonFreeLlamaBackend, static_cast<int*>(nullptr));

src/bindings/AddonTypes.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,7 @@ export type BindingModule = {
7676
free: number
7777
},
7878
init(): Promise<void>,
79+
loadBackends(): void,
7980
dispose(): Promise<void>
8081
};
8182

src/bindings/Llama.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,8 @@ export class Llama {
126126
this._bindings.setLoggerLogLevel(LlamaLogLevelToAddonLogLevel.get(this._logLevel) ?? defaultLogLevel);
127127
}
128128

129+
this._bindings.loadBackends();
130+
129131
this._onExit = this._onExit.bind(this);
130132

131133
process.on("exit", this._onExit);
@@ -605,6 +607,8 @@ function getTransformedLogLevel(level: LlamaLogLevel, message: string): LlamaLog
605607
return LlamaLogLevel.info;
606608
else if (level === LlamaLogLevel.warn && message.startsWith("ggml_metal_init: skipping kernel_") && message.endsWith("(not supported)"))
607609
return LlamaLogLevel.log;
610+
else if (level === LlamaLogLevel.warn && message.startsWith("ggml_cuda_init: GGML_CUDA_FORCE_") && message.endsWith(" no"))
611+
return LlamaLogLevel.log;
608612

609613
return level;
610614
}

src/bindings/utils/testBindingBinary.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -196,6 +196,7 @@ if (process.env.TEST_BINDING_CP === "true" && (process.parentPort != null || pro
196196
if (message.type === "start") {
197197
try {
198198
const binding: BindingModule = require(message.bindingBinaryPath);
199+
binding.loadBackends();
199200
await binding.init();
200201
binding.getGpuVramInfo();
201202
binding.getGpuDeviceInfo();

0 commit comments

Comments
 (0)