Skip to content

Commit 0b8ab7b

Browse files
authored
Merge branch 'main' into 134-add-windows-to-test-ci
2 parents 2fbb793 + f4bac4a commit 0b8ab7b

File tree

3 files changed

+10
-2
lines changed

3 files changed

+10
-2
lines changed

llama-cpp-2/src/model/params.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,10 @@ impl LlamaModelParams {
180180
/// ```
181181
/// # use llama_cpp_2::model::params::LlamaModelParams;
182182
/// let params = LlamaModelParams::default();
183+
/// #[cfg(not(target_os = "macos"))]
183184
/// assert_eq!(params.n_gpu_layers(), 0, "n_gpu_layers should be 0");
185+
/// #[cfg(target_os = "macos")]
186+
/// assert_eq!(params.n_gpu_layers(), 999, "n_gpu_layers should be 999");
184187
/// assert_eq!(params.main_gpu(), 0, "main_gpu should be 0");
185188
/// assert_eq!(params.vocab_only(), false, "vocab_only should be false");
186189
/// assert_eq!(params.use_mmap(), true, "use_mmap should be true");

llama-cpp-sys-2/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ include = [
3232
"/llama.cpp/llama.cpp",
3333
"/llama.cpp/llama.h",
3434
"/llama.cpp/unicode.h",
35+
"/llama.cpp/unicode.cpp",
3536
]
3637

3738
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

llama-cpp-sys-2/build.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ fn main() {
130130
llama_cpp.define("GGML_USE_ACCELERATE", None);
131131
llama_cpp.define("ACCELERATE_NEW_LAPACK", None);
132132
llama_cpp.define("ACCELERATE_LAPACK_ILP64", None);
133-
println!("cargo:rustc-link-arg=framework=Accelerate");
133+
println!("cargo:rustc-link-lib=framework=Accelerate");
134134

135135
metal_hack(&mut ggml);
136136
ggml.include("./llama.cpp/ggml-metal.h");
@@ -156,7 +156,8 @@ fn main() {
156156
.define("_XOPEN_SOURCE", Some("600"))
157157
.include("llama.cpp")
158158
.std("c++11")
159-
.file("llama.cpp/llama.cpp");
159+
.file("llama.cpp/llama.cpp")
160+
.file("llama.cpp/unicode.cpp");
160161

161162
// Remove debug log output from `llama.cpp`
162163
let is_release = env::var("PROFILE").unwrap() == "release";
@@ -199,6 +200,9 @@ fn main() {
199200
bindings
200201
.write_to_file(out_path.join("bindings.rs"))
201202
.expect("failed to write bindings to file");
203+
let llama_cpp_dir = PathBuf::from("llama.cpp").canonicalize().unwrap();
204+
println!("cargo:INCLUDE={}", llama_cpp_dir.to_str().unwrap());
205+
println!("cargo:OUT_DIR={}", out_path.to_str().unwrap());
202206
}
203207

204208
// courtesy of https://github.com/rustformers/llm

0 commit comments

Comments
 (0)