File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -36,7 +36,7 @@ pub struct LlamaLoraAdapter {
36
36
pub ( crate ) lora_adapter : NonNull < llama_cpp_sys_2:: llama_adapter_lora > ,
37
37
}
38
38
39
- /// A performance-friendly wrapper around [LlamaModel::get_chat_template ] which is then
39
+ /// A performance-friendly wrapper around [LlamaModel::chat_template ] which is then
40
40
/// fed into [LlamaModel::apply_chat_template] to convert a list of messages into an LLM
41
41
/// prompt. Internally the template is stored as a CString to avoid round-trip conversions
42
42
/// within the FFI.
@@ -627,7 +627,7 @@ impl LlamaModel {
627
627
/// use "chatml", then just do `LlamaChatTemplate::new("chatml")` or any other model name or template
628
628
/// string.
629
629
///
630
- /// Use [Self::get_chat_template ] to retrieve the template baked into the model (this is the preferred
630
+ /// Use [Self::chat_template ] to retrieve the template baked into the model (this is the preferred
631
631
/// mechanism as using the wrong chat template can result in really unexpected responses from the LLM).
632
632
///
633
633
/// You probably want to set `add_ass` to true so that the generated template string ends with a the
You can’t perform that action at this time.
0 commit comments