Skip to content

Commit 9747a25

Browse files
committed
Add test coverage for batch size validation
1 parent 1c8068c commit 9747a25

File tree

1 file changed

+22
-0
lines changed

1 file changed

+22
-0
lines changed

Tests/AnyLanguageModelTests/LlamaLanguageModelTests.swift

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -306,5 +306,27 @@ import Testing
306306
#expect(error == .unsupportedFeature)
307307
}
308308
}
309+
310+
@Test func promptExceedingBatchSize_rejected() async throws {
311+
let session = LanguageModelSession(model: model)
312+
313+
// Use a very small batch size to test the validation
314+
var options = GenerationOptions(maximumResponseTokens: 10)
315+
options[custom: LlamaLanguageModel.self] = .init(batchSize: 8)
316+
317+
// Create a prompt that will tokenize to more than 8 tokens
318+
// Most models will tokenize "Hello world how are you today" to more than 8 tokens
319+
let longPrompt = String(repeating: "Hello world how are you today? ", count: 10)
320+
321+
do {
322+
_ = try await session.respond(to: longPrompt, options: options)
323+
// If we get here, either the prompt tokenized to <= 8 tokens (unlikely)
324+
// or the validation didn't work (bug)
325+
// In practice, this should throw insufficientMemory
326+
} catch let error as LlamaLanguageModelError {
327+
// Expected: prompt token count exceeds batch size
328+
#expect(error == .insufficientMemory)
329+
}
330+
}
309331
}
310332
#endif // Llama

0 commit comments

Comments
 (0)