Skip to content

Commit 6e1a975

Browse files
committed
fix: increase the size of the buffer in LlamaModel::apply_chat_template from times 2 to times 4
1 parent 64409eb commit 6e1a975

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

llama-cpp-2/src/model.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -454,7 +454,7 @@ impl LlamaModel {
454454
let message_length = chat.iter().fold(0, |acc, c| {
455455
acc + c.role.to_bytes().len() + c.content.to_bytes().len()
456456
});
457-
let mut buff: Vec<i8> = vec![0_i8; message_length * 2];
457+
let mut buff: Vec<i8> = vec![0_i8; message_length * 4];
458458

459459
// Build our llama_cpp_sys_2 chat messages
460460
let chat: Vec<llama_cpp_sys_2::llama_chat_message> = chat

0 commit comments

Comments
 (0)