diff --git a/container-images/cuda/Containerfile b/container-images/cuda/Containerfile index a465e82df..a010a264f 100644 --- a/container-images/cuda/Containerfile +++ b/container-images/cuda/Containerfile @@ -15,6 +15,14 @@ COPY --from=builder /tmp/install /usr # Workaround for CUDA libraries not in the ld path in base container RUN echo "/usr/local/cuda-12.8/compat" > /etc/ld.so.conf.d/99_cuda_compat.conf && ldconfig +COPY ./container-images/scripts/lib.sh /run/lib.sh +RUN sh -e -c ". /run/lib.sh && \ + dnf_install_epel && \ + add_stream_repo AppStream && \ + add_stream_repo BaseOS && \ + add_stream_repo CRB && \ + dnf install -y vulkan vulkan-tools mesa-libEGL libXext && \ + rm_non_ubi_repos" RUN dnf install -y python3.11 python3.11-pip python3.11-devel && \ dnf -y clean all && \ ln -sf /usr/bin/python3.11 /usr/bin/python3 diff --git a/container-images/scripts/build_llama_and_whisper.sh b/container-images/scripts/build_llama_and_whisper.sh index 6d281dc94..1c1d9cac8 100755 --- a/container-images/scripts/build_llama_and_whisper.sh +++ b/container-images/scripts/build_llama_and_whisper.sh @@ -40,6 +40,14 @@ dnf_install_asahi() { } dnf_install_cuda() { + if is_rhel_based; then + dnf_install_epel + add_stream_repo "AppStream" + add_stream_repo "BaseOS" + add_stream_repo "CRB" + fi + + dnf install -y "${vulkan_rpms[@]}" dnf install -y gcc-toolset-12 # shellcheck disable=SC1091 . /opt/rh/gcc-toolset-12/enable @@ -226,7 +234,7 @@ configure_common_flags() { common_flags+=("-DGGML_HIP=ON" "-DAMDGPU_TARGETS=${AMDGPU_TARGETS:-gfx1010,gfx1012,gfx1030,gfx1032,gfx1100,gfx1101,gfx1102,gfx1103,gfx1151,gfx1200,gfx1201}") ;; cuda) - common_flags+=("-DGGML_CUDA=ON" "-DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined" "-DCMAKE_CUDA_FLAGS=\"-U__ARM_NEON -U__ARM_NEON__\"") + common_flags+=("-DGGML_VULKAN=1" "-DGGML_CUDA=ON" "-DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined" "-DCMAKE_CUDA_FLAGS=\"-U__ARM_NEON -U__ARM_NEON__\"") ;; vulkan | asahi) common_flags+=("-DGGML_VULKAN=1")