Skip to content

Commit 8a36976

Browse files
authored
Merge pull request #758 from MatthewCash/fix/bad-grammar-segfault
Make LlamaSampler::grammar() return an Option
2 parents 079a47e + 98e3671 commit 8a36976

File tree

1 file changed

+15
-6
lines changed

1 file changed

+15
-6
lines changed

llama-cpp-2/src/sampling.rs

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ impl LlamaSampler {
278278
/// # Panics
279279
/// If either of ``grammar_str`` or ``grammar_root`` contain null bytes.
280280
#[must_use]
281-
pub fn grammar(model: &LlamaModel, grammar_str: &str, grammar_root: &str) -> Self {
281+
pub fn grammar(model: &LlamaModel, grammar_str: &str, grammar_root: &str) -> Option<Self> {
282282
let grammar_str = CString::new(grammar_str).unwrap();
283283
let grammar_root = CString::new(grammar_root).unwrap();
284284

@@ -289,7 +289,12 @@ impl LlamaSampler {
289289
grammar_root.as_ptr(),
290290
)
291291
};
292-
Self { sampler }
292+
293+
if sampler.is_null() {
294+
None
295+
} else {
296+
Some(Self { sampler })
297+
}
293298
}
294299

295300
/// Lazy grammar sampler, introduced in <https://github.com/ggerganov/llama.cpp/pull/9639>
@@ -306,7 +311,7 @@ impl LlamaSampler {
306311
grammar_root: &str,
307312
trigger_words: impl IntoIterator<Item = impl AsRef<[u8]>>,
308313
trigger_tokens: &[LlamaToken],
309-
) -> Self {
314+
) -> Option<Self> {
310315
let grammar_str = CString::new(grammar_str).unwrap();
311316
let grammar_root = CString::new(grammar_root).unwrap();
312317

@@ -331,9 +336,13 @@ impl LlamaSampler {
331336
trigger_tokens.len(),
332337
)
333338
};
334-
335-
Self { sampler }
336-
}
339+
340+
if sampler.is_null() {
341+
None
342+
} else {
343+
Some(Self { sampler })
344+
}
345+
}
337346

338347
/// DRY sampler, designed by p-e-w, as described in:
339348
/// <https://github.com/oobabooga/text-generation-webui/pull/5677>, porting Koboldcpp

0 commit comments

Comments
 (0)