We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 31725a2 commit 11e4438Copy full SHA for 11e4438
llama_cpp/llama_cpp.py
@@ -3814,6 +3814,7 @@ def llama_sampler_init_softmax() -> llama_sampler_p:
3814
3815
3816
# /// @details Top-K sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751
3817
+# /// Setting k <= 0 makes this a noop
3818
# LLAMA_API struct llama_sampler * llama_sampler_init_top_k (int32_t k);
3819
@ctypes_function("llama_sampler_init_top_k", [ctypes.c_int32], llama_sampler_p_ctypes)
3820
def llama_sampler_init_top_k(k: int) -> llama_sampler_p:
0 commit comments