Skip to content

Commit aaa6191

Browse files
python312Packages.llama-cpp-python: init at 0.3.1 (#349657)
2 parents 38caec4 + f4e43ac commit aaa6191

File tree

2 files changed

+105
-0
lines changed

2 files changed

+105
-0
lines changed
Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
{
2+
lib,
3+
stdenv,
4+
buildPythonPackage,
5+
cmake,
6+
fetchFromGitHub,
7+
gitUpdater,
8+
ninja,
9+
pathspec,
10+
pyproject-metadata,
11+
pytestCheckHook,
12+
pythonOlder,
13+
scikit-build-core,
14+
llama-cpp-python,
15+
16+
config,
17+
cudaSupport ? config.cudaSupport,
18+
cudaPackages ? { },
19+
20+
diskcache,
21+
jinja2,
22+
numpy,
23+
typing-extensions,
24+
scipy,
25+
huggingface-hub,
26+
}:
27+
let
28+
version = "0.3.1";
29+
in
30+
buildPythonPackage {
31+
pname = "llama-cpp-python";
32+
inherit version;
33+
pyproject = true;
34+
35+
disabled = pythonOlder "3.7";
36+
37+
stdenv = if cudaSupport then cudaPackages.backendStdenv else stdenv;
38+
39+
src = fetchFromGitHub {
40+
owner = "abetlen";
41+
repo = "llama-cpp-python";
42+
rev = "refs/tags/v${version}";
43+
hash = "sha256-eO1zvNJZBE5BCnbgbh00tFIRWBCWor1lIsrLXs/HFds=";
44+
fetchSubmodules = true;
45+
};
46+
47+
dontUseCmakeConfigure = true;
48+
SKBUILD_CMAKE_ARGS = lib.strings.concatStringsSep ";" (
49+
lib.optionals cudaSupport [
50+
"-DGGML_CUDA=on"
51+
"-DCUDAToolkit_ROOT=${lib.getDev cudaPackages.cuda_nvcc}"
52+
"-DCMAKE_CUDA_COMPILER=${lib.getExe cudaPackages.cuda_nvcc}"
53+
]
54+
);
55+
56+
nativeBuildInputs = [
57+
cmake
58+
ninja
59+
pathspec
60+
pyproject-metadata
61+
scikit-build-core
62+
];
63+
64+
buildInputs = lib.optionals cudaSupport (
65+
with cudaPackages;
66+
[
67+
cuda_cudart # cuda_runtime.h
68+
cuda_cccl # <thrust/*>
69+
libcublas # cublas_v2.h
70+
]
71+
);
72+
73+
propagatedBuildInputs = [
74+
diskcache
75+
jinja2
76+
numpy
77+
typing-extensions
78+
];
79+
80+
nativeCheckInputs = [
81+
pytestCheckHook
82+
scipy
83+
huggingface-hub
84+
];
85+
86+
disabledTests = [
87+
# tries to download model from huggingface-hub
88+
"test_real_model"
89+
"test_real_llama"
90+
];
91+
92+
pythonImportsCheck = [ "llama_cpp" ];
93+
94+
passthru.updateScript = gitUpdater { rev-prefix = "v"; };
95+
passthru.tests.llama-cpp-python = llama-cpp-python.override { cudaSupport = true; };
96+
97+
meta = {
98+
description = "Python bindings for llama.cpp";
99+
homepage = "https://github.com/abetlen/llama-cpp-python";
100+
license = lib.licenses.mit;
101+
maintainers = with lib.maintainers; [ kirillrdy ];
102+
};
103+
}

pkgs/top-level/python-packages.nix

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7476,6 +7476,8 @@ self: super: with self; {
74767476

74777477
lizard = callPackage ../development/python-modules/lizard { };
74787478

7479+
llama-cpp-python = callPackage ../development/python-modules/llama-cpp-python { };
7480+
74797481
llama-cloud = callPackage ../development/python-modules/llama-cloud { };
74807482

74817483
llama-index = callPackage ../development/python-modules/llama-index { };

0 commit comments

Comments
 (0)