Skip to content

Commit a3641e6

Browse files
authored
Bugfix for TENSOR_SKIP
skip loading if a tensor has the TENSOR_SKIP flag - @ubergarm via ikawrakow#668 (comment)
1 parent 3f3e384 commit a3641e6

File tree

1 file changed

+11
-0
lines changed

1 file changed

+11
-0
lines changed

src/llama.cpp

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4896,6 +4896,17 @@ struct llama_model_loader {
48964896
return NULL;
48974897
}
48984898

4899+
// skip unused tensors
4900+
if (flags & TENSOR_SKIP) {
4901+
const size_t nbytes = ggml_nbytes(cur);
4902+
LLAMA_LOG_WARN("model has unused tensor %s (size = %zu bytes) -- ignoring\n", name.c_str(), nbytes);
4903+
4904+
size_data -= nbytes;
4905+
n_created++;
4906+
4907+
return nullptr;
4908+
}
4909+
48994910
return create_tensor_for(ctx, cur, flags & TENSOR_DUPLICATED);
49004911
}
49014912

0 commit comments

Comments
 (0)