We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent cf9e613 commit d24383eCopy full SHA for d24383e
CMakeLists.txt
@@ -4,6 +4,18 @@ project(llama_cpp)
4
5
option(BUILD_LLAMA_CPP "Build llama.cpp shared library and install alongside python package" ON)
6
7
+if (APPLE)
8
+ set(LLAMA_AVX OFF)
9
+ set(LLAMA_AVX2 OFF)
10
+ set(LLAMA_AVX512 OFF)
11
+ set(LLAMA_AVX512_VBMI OFF)
12
+ set(LLAMA_AVX512_VNNI OFF)
13
+ set(LLAMA_FMA OFF)
14
+ set(LLAMA_F16C OFF)
15
+ set(LLAMA_ACCELERATE OFF)
16
+ set(LLAMA_METAL OFF)
17
+endif()
18
+
19
if (BUILD_LLAMA_CPP)
20
set(BUILD_SHARED_LIBS "On")
21
add_subdirectory(vendor/llama.cpp)
0 commit comments