Skip to content

Commit 602cb3c

Browse files
committed
fix: embedding context deadlock
1 parent e703a47 commit 602cb3c

File tree

3 files changed

+12
-13
lines changed

3 files changed

+12
-13
lines changed

package-lock.json

Lines changed: 10 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@
190190
"ignore": "^5.3.2",
191191
"ipull": "^3.9.2",
192192
"is-unicode-supported": "^2.1.0",
193-
"lifecycle-utils": "^1.7.2",
193+
"lifecycle-utils": "^2.0.0",
194194
"log-symbols": "^7.0.0",
195195
"nanoid": "^5.0.9",
196196
"node-addon-api": "^8.3.0",

src/evaluator/LlamaModel/LlamaModel.ts

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -522,14 +522,7 @@ export class LlamaModel {
522522
if (this._vocabOnly)
523523
throw new Error("Model is loaded in vocabOnly mode, so no context can be created");
524524

525-
return await withLock(this._llama._memoryLock, LlamaLocks.loadToMemory, options.createSignal, async () => {
526-
const preventDisposalHandle = this._backendModelDisposeGuard.createPreventDisposalHandle();
527-
try {
528-
return await LlamaEmbeddingContext._create({_model: this}, options);
529-
} finally {
530-
preventDisposalHandle.dispose();
531-
}
532-
});
525+
return await LlamaEmbeddingContext._create({_model: this}, options);
533526
}
534527

535528
/**

0 commit comments

Comments
 (0)