Skip to content

Commit fc36685

Browse files
authored
Merge pull request #391 from L-jasmine/feat/dynamic_link
Support dynamic link llama
2 parents 38686de + 70f26c2 commit fc36685

File tree

3 files changed

+25
-6
lines changed

3 files changed

+25
-6
lines changed

llama-cpp-2/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ tracing = { workspace = true }
1717
[features]
1818
cuda = ["llama-cpp-sys-2/cuda"]
1919
metal = ["llama-cpp-sys-2/metal"]
20+
dynamic_link = ["llama-cpp-sys-2/dynamic_link"]
2021
vulkan = ["llama-cpp-sys-2/vulkan"]
2122
native = ["llama-cpp-sys-2/native"]
2223
sampler = []

llama-cpp-sys-2/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,5 +62,6 @@ cuda = []
6262
f16c = []
6363
fma = []
6464
metal = []
65+
dynamic_link = []
6566
vulkan = []
6667
native = []

llama-cpp-sys-2/build.rs

Lines changed: 23 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -86,11 +86,11 @@ compile_error!("feature \"vulkan\" cannot be enabled alongside other GPU based f
8686

8787
static LLAMA_PATH: Lazy<PathBuf> = Lazy::new(|| PathBuf::from("./llama.cpp"));
8888

89-
fn compile_bindings(out_path: &Path) {
89+
fn compile_bindings(out_path: &Path, llama_header_path: &Path) {
9090
println!("Generating bindings..");
9191
let bindings = bindgen::Builder::default()
92-
.header(LLAMA_PATH.join("ggml.h").to_string_lossy())
93-
.header(LLAMA_PATH.join("llama.h").to_string_lossy())
92+
// .header(llama_header_path.join("ggml.h").to_string_lossy())
93+
.header(llama_header_path.join("llama.h").to_string_lossy())
9494
.derive_partialeq(true)
9595
.allowlist_function("ggml_.*")
9696
.allowlist_type("ggml_.*")
@@ -670,18 +670,35 @@ fn compile_llama(mut cxx: Build, _out_path: impl AsRef<Path>) {
670670
}
671671

672672
fn main() {
673+
let out_path = PathBuf::from(env::var("OUT_DIR").expect("No out dir found"));
674+
675+
if cfg!(feature = "dynamic_link") {
676+
println!("cargo:rustc-link-lib=llama");
677+
println!("cargo:rustc-link-lib=ggml");
678+
679+
let llama_header_path = std::env::var("LLAMA_HEADE");
680+
if let Ok(llama_header_path) = llama_header_path {
681+
compile_bindings(&out_path, Path::new(&llama_header_path));
682+
} else {
683+
compile_bindings(&out_path, &LLAMA_PATH);
684+
}
685+
686+
if let Ok(llama_lib_path) = std::env::var("LLAMA_LIB") {
687+
println!("cargo:rustc-link-search={llama_lib_path}");
688+
}
689+
return;
690+
}
691+
673692
if std::fs::read_dir(LLAMA_PATH.as_path()).is_err() {
674693
panic!(
675694
"Could not find {}. Did you forget to initialize submodules?",
676695
LLAMA_PATH.display()
677696
);
678697
}
679698

680-
let out_path = PathBuf::from(env::var("OUT_DIR").expect("No out dir found"));
681-
682699
println!("cargo:rerun-if-changed={}", LLAMA_PATH.display());
683700

684-
compile_bindings(&out_path);
701+
compile_bindings(&out_path, &LLAMA_PATH);
685702

686703
let mut cx = Build::new();
687704
let mut cxx = Build::new();

0 commit comments

Comments
 (0)