Skip to content

Commit 6267778

Browse files
authored
fix: create a context with no parameters (#188)
1 parent 7e70a9f commit 6267778

File tree

2 files changed

+3
-5
lines changed

2 files changed

+3
-5
lines changed

src/evaluator/LlamaModel.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,7 @@ export class LlamaModel {
293293
return this._model.getTokenType(token) as GgufMetadataTokenizerTokenType;
294294
}
295295

296-
public async createContext(options: LlamaContextOptions) {
296+
public async createContext(options: LlamaContextOptions = {}) {
297297
return await withLock(this._llama._memoryLock, LlamaLocks.loadToMemory, options.createSignal, async () => {
298298
const preventDisposalHandle = this._backendModelDisposeGuard.createPreventDisposalHandle();
299299
try {
@@ -304,7 +304,7 @@ export class LlamaModel {
304304
});
305305
}
306306

307-
public async createEmbeddingContext(options: LlamaEmbeddingContextOptions) {
307+
public async createEmbeddingContext(options: LlamaEmbeddingContextOptions = {}) {
308308
return await withLock(this._llama._memoryLock, LlamaLocks.loadToMemory, options.createSignal, async () => {
309309
const preventDisposalHandle = this._backendModelDisposeGuard.createPreventDisposalHandle();
310310
try {

test/modelDependent/functionary/sanity.test.ts

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,7 @@ describe("functionary", () => {
1212
const model = await llama.loadModel({
1313
modelPath
1414
});
15-
const context = await model.createContext({
16-
contextSize: 4096
17-
});
15+
const context = await model.createContext();
1816
const chatSession = new LlamaChatSession({
1917
contextSequence: context.getSequence()
2018
});

0 commit comments

Comments
 (0)