Skip to content

Commit 698cb80

Browse files
committed
fix missing dot
1 parent d5ce3fe commit 698cb80

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

llama_cpp/llama_cpp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1367,7 +1367,7 @@ def llama_model_n_head(model: llama_model_p, /) -> int:
13671367
# LLAMA_API int32_t llama_model_n_head_kv (const struct llama_model * model);
13681368
@ctypes_function("llama_model_n_head_kv", [llama_model_p_ctypes], ctypes.c_int32)
13691369
def llama_model_n_head_kv(model: llama_model_p, /) -> int:
1370-
1370+
...
13711371

13721372
# // Get the model's RoPE frequency scaling factor
13731373
# LLAMA_API float llama_model_rope_freq_scale_train(const struct llama_model * model);

0 commit comments

Comments
 (0)