From a574d8402045c7e795b16b6d9f8e50ecf008cc66 Mon Sep 17 00:00:00 2001 From: Gilad S Date: Wed, 11 Jun 2025 03:12:44 +0300 Subject: [PATCH] test: fix test --- test/modelDependent/llama3.2/promptCompletion.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/modelDependent/llama3.2/promptCompletion.test.ts b/test/modelDependent/llama3.2/promptCompletion.test.ts index 574524d9..9362f25e 100644 --- a/test/modelDependent/llama3.2/promptCompletion.test.ts +++ b/test/modelDependent/llama3.2/promptCompletion.test.ts @@ -41,9 +41,9 @@ describe("llama 3.2", () => { }); const promptCompletion = await chatSession.completePrompt("Hi there!", { - maxTokens: 50 + maxTokens: 11 }); - expect(promptCompletion).toMatchInlineSnapshot("\" I'm looking for a new phone case. I need a case that can protect your phone from scratches and drops.\""); + expect(promptCompletion).toMatchInlineSnapshot(`" I'm looking for a new phone case. I need"`); expect(LlamaText.fromTokens(model.tokenizer, chatSession.sequence.contextTokens)).toMatchInlineSnapshot(` LlamaText([ new SpecialToken("BOS"), @@ -64,7 +64,7 @@ describe("llama 3.2", () => { new SpecialTokensText("<|end_header_id|>"), " - Hi there! I'm looking for a new phone case. I need a case that can protect your phone from scratches and drops.", + Hi there! I'm looking for a new phone case. I", ]) `);