Skip to content

Commit 76e5753

Browse files
authored
Merge branch 'utilityai:main' into silas-redo-build-script
2 parents 1f69c90 + 2a20e37 commit 76e5753

File tree

8 files changed

+43
-25
lines changed

8 files changed

+43
-25
lines changed

Cargo.lock

Lines changed: 10 additions & 10 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ criterion = "0.5.1"
1717
pprof = "0.13.0"
1818
bindgen = "0.69.4"
1919
cc = "1.0.94"
20-
anyhow = "1.0.82"
20+
anyhow = "1.0.86"
2121
clap = "4.5.4"
2222
encoding_rs = "0.8.34"
2323

embeddings/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
[package]
22
name = "embeddings"
3-
version = "0.1.52"
3+
version = "0.1.54"
44
edition = "2021"
55

66
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
77

88
[dependencies]
9-
llama-cpp-2 = { path = "../llama-cpp-2", version = "0.1.52" }
9+
llama-cpp-2 = { path = "../llama-cpp-2", version = "0.1.54" }
1010
hf-hub = { workspace = true }
1111
clap = { workspace = true , features = ["derive"] }
1212
anyhow = { workspace = true }

llama-cpp-2/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
[package]
22
name = "llama-cpp-2"
33
description = "llama.cpp bindings for Rust"
4-
version = "0.1.52"
4+
version = "0.1.54"
55
edition = "2021"
66
license = "MIT OR Apache-2.0"
77
repository = "https://github.com/utilityai/llama-cpp-rs"
88

99
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
1010

1111
[dependencies]
12-
llama-cpp-sys-2 = { path = "../llama-cpp-sys-2", version = "0.1.52" }
12+
llama-cpp-sys-2 = { path = "../llama-cpp-sys-2", version = "0.1.54" }
1313
thiserror = { workspace = true }
1414
tracing = { workspace = true }
1515

llama-cpp-2/src/model.rs

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,11 @@ impl LlamaModel {
113113
/// # Errors
114114
///
115115
/// See [`TokenToStringError`] for more information.
116-
pub fn token_to_str(&self, token: LlamaToken, special: Special) -> Result<String, TokenToStringError> {
116+
pub fn token_to_str(
117+
&self,
118+
token: LlamaToken,
119+
special: Special,
120+
) -> Result<String, TokenToStringError> {
117121
self.token_to_str_with_size(token, 32, special)
118122
}
119123

@@ -122,7 +126,11 @@ impl LlamaModel {
122126
/// # Errors
123127
///
124128
/// See [`TokenToStringError`] for more information.
125-
pub fn token_to_bytes(&self, token: LlamaToken, special: Special) -> Result<Vec<u8>, TokenToStringError> {
129+
pub fn token_to_bytes(
130+
&self,
131+
token: LlamaToken,
132+
special: Special,
133+
) -> Result<Vec<u8>, TokenToStringError> {
126134
self.token_to_bytes_with_size(token, 32, special)
127135
}
128136

@@ -131,9 +139,17 @@ impl LlamaModel {
131139
/// # Errors
132140
///
133141
/// See [`TokenToStringError`] for more information.
134-
pub fn tokens_to_str(&self, tokens: &[LlamaToken], special: Special) -> Result<String, TokenToStringError> {
142+
pub fn tokens_to_str(
143+
&self,
144+
tokens: &[LlamaToken],
145+
special: Special,
146+
) -> Result<String, TokenToStringError> {
135147
let mut builder = String::with_capacity(tokens.len() * 4);
136-
for str in tokens.iter().copied().map(|t| self.token_to_str(t, special)) {
148+
for str in tokens
149+
.iter()
150+
.copied()
151+
.map(|t| self.token_to_str(t, special))
152+
{
137153
builder += &str?;
138154
}
139155
Ok(builder)
@@ -451,12 +467,14 @@ impl LlamaModel {
451467
content: c.content.as_ptr(),
452468
})
453469
.collect();
470+
454471
// Set the tmpl pointer
455472
let tmpl = tmpl.map(CString::new);
456-
let tmpl_ptr = match tmpl {
457-
Some(str) => str?.as_ptr(),
473+
let tmpl_ptr = match &tmpl {
474+
Some(str) => str.as_ref().map_err(|e| e.clone())?.as_ptr(),
458475
None => std::ptr::null(),
459476
};
477+
460478
let formatted_chat = unsafe {
461479
let res = llama_cpp_sys_2::llama_chat_apply_template(
462480
self.model.as_ptr(),

llama-cpp-sys-2/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22
name = "llama-cpp-sys-2"
33
description = "Low Level Bindings to llama.cpp"
4-
version = "0.1.52"
4+
version = "0.1.54"
55
edition = "2021"
66
license = "MIT OR Apache-2.0"
77
repository = "https://github.com/utilityai/llama-cpp-rs"

llama-cpp-sys-2/llama.cpp

simple/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
[package]
22
name = "simple"
3-
version = "0.1.52"
3+
version = "0.1.54"
44
edition = "2021"
55

66
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
77

88
[dependencies]
9-
llama-cpp-2 = { path = "../llama-cpp-2", version = "0.1.52" }
9+
llama-cpp-2 = { path = "../llama-cpp-2", version = "0.1.54" }
1010
hf-hub = { workspace = true }
1111
clap = { workspace = true , features = ["derive"] }
1212
anyhow = { workspace = true }

0 commit comments

Comments
 (0)