Skip to content

Commit bd697ca

Browse files
committed
llama : fix empty batch cause llama_batch_allocr to crash
1 parent 45f0976 commit bd697ca

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

src/llama.cpp

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21139,6 +21139,10 @@ struct llama_batch_allocr {
2113921139
// optionally fulfill the batch returned by llama_batch_get_one
2114021140
llama_batch_allocr(struct llama_context * ctx, struct llama_batch in_batch) {
2114121141
batch = in_batch;
21142+
if (batch.n_tokens == 0) {
21143+
// llama_(de|en)code_internal will return an error in this case
21144+
return;
21145+
}
2114221146
if (!batch.pos) {
2114321147
// determine the last position in KV cache
2114421148
llama_pos last_pos = -1;

0 commit comments

Comments
 (0)