Skip to content

Commit b1554be

Browse files
committed
context : add cache-less llama_context
ggml-ci
1 parent 072280e commit b1554be

File tree

8 files changed

+1122
-404
lines changed

8 files changed

+1122
-404
lines changed

common/common.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -952,7 +952,7 @@ struct common_init_result common_init_from_params(common_params & params) {
952952
}
953953

954954
if (params.ctx_shift && !llama_kv_self_can_shift(lctx)) {
955-
LOG_WRN("%s: KV cache shifting is not supported for this model, disabling KV cache shifting\n", __func__);
955+
LOG_WRN("%s: KV cache shifting is not supported for this context, disabling KV cache shifting\n", __func__);
956956
params.ctx_shift = false;
957957
}
958958

0 commit comments

Comments
 (0)