1+ ARG PYTHON_VERSION=311
2+ ARG IMAGE_TAG=9.7-1764607342
3+
4+ FROM registry.access.redhat.com/ubi9/python-${PYTHON_VERSION}:${IMAGE_TAG}
5+
6+ LABEL name="ray-ubi9-py311-cu121" \
7+ summary="CUDA 12.1 Python 3.11 image based on UBI9 for Ray" \
8+ description="CUDA 12.1 Python 3.11 image based on UBI9 for Ray" \
9+ io.k8s.display-name="CUDA 12.1 Python 3.11 base image for Ray" \
10+ io.k8s.description="CUDA 12.1 Python 3.11 image based on UBI9 for Ray" \
11+ authoritative-source-url="https://github.com/opendatahub-io/distributed-workloads"
12+
13+ # Install CUDA base from:
14+ # https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.1.1/ubi9/base/Dockerfile
15+ USER 0
16+ WORKDIR /opt/app-root/bin
17+
18+ ENV NVARCH=x86_64
19+ ENV NVIDIA_REQUIRE_CUDA="cuda>=12.1 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526"
20+ ENV NV_CUDA_CUDART_VERSION=12.1.105-1
21+
22+ COPY cuda.repo-x86_64 /etc/yum.repos.d/cuda.repo
23+
24+ RUN NVIDIA_GPGKEY_SUM=d0664fbbdb8c32356d45de36c5984617217b2d0bef41b93ccecd326ba3b80c87 && \
25+ curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/rhel9/${NVARCH}/D42D0685.pub | sed '/^Version/d' > /etc/pki/rpm-gpg/RPM-GPG-KEY-NVIDIA && \
26+ echo "$NVIDIA_GPGKEY_SUM /etc/pki/rpm-gpg/RPM-GPG-KEY-NVIDIA" | sha256sum -c --strict -
27+
28+ ENV CUDA_VERSION=12.1.1
29+
30+ # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
31+ RUN yum upgrade -y && yum install -y \
32+ cuda-cudart-12-1-${NV_CUDA_CUDART_VERSION} \
33+ cuda-compat-12-1 \
34+ && yum clean all \
35+ && rm -rf /var/cache/yum/*
36+
37+ # nvidia-docker 1.0
38+ RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
39+ echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf
40+
41+ ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH}
42+ ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64
43+
44+ COPY NGC-DL-CONTAINER-LICENSE /
45+
46+ # nvidia-container-runtime
47+ ENV NVIDIA_VISIBLE_DEVICES=all
48+ ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
49+
50+ # Install CUDA runtime from:
51+ # https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.1.1/ubi9/runtime/Dockerfile
52+ ENV NV_CUDA_LIB_VERSION=12.1.1-1
53+ ENV NV_NVTX_VERSION=12.1.105-1
54+ ENV NV_LIBNPP_VERSION=12.1.0.40-1
55+ ENV NV_LIBNPP_PACKAGE=libnpp-12-1-${NV_LIBNPP_VERSION}
56+ ENV NV_LIBCUBLAS_VERSION=12.1.3.1-1
57+ ENV NV_LIBNCCL_PACKAGE_NAME=libnccl
58+ ENV NV_LIBNCCL_PACKAGE_VERSION=2.17.1-1
59+ ENV NV_LIBNCCL_VERSION=2.17.1
60+ ENV NCCL_VERSION=2.17.1
61+ ENV NV_LIBNCCL_PACKAGE=${NV_LIBNCCL_PACKAGE_NAME}-${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.1
62+
63+ RUN yum install -y \
64+ cuda-libraries-12-1-${NV_CUDA_LIB_VERSION} \
65+ cuda-nvtx-12-1-${NV_NVTX_VERSION} \
66+ ${NV_LIBNPP_PACKAGE} \
67+ libcublas-12-1-${NV_LIBCUBLAS_VERSION} \
68+ ${NV_LIBNCCL_PACKAGE} \
69+ && yum clean all \
70+ && rm -rf /var/cache/yum/*
71+
72+ # Set this flag so that libraries can find the location of CUDA
73+ ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda
74+
75+ # Install CUDA devel from:
76+ # https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.1.1/ubi9/devel/Dockerfile
77+ ENV NV_CUDA_LIB_VERSION=12.1.1-1
78+ ENV NV_NVPROF_VERSION=12.1.105-1
79+ ENV NV_NVPROF_DEV_PACKAGE=cuda-nvprof-12-1-${NV_NVPROF_VERSION}
80+ ENV NV_CUDA_CUDART_DEV_VERSION=12.1.105-1
81+ ENV NV_NVML_DEV_VERSION=12.1.105-1
82+ ENV NV_LIBCUBLAS_DEV_VERSION=12.1.3.1-1
83+ ENV NV_LIBNPP_DEV_VERSION=12.1.0.40-1
84+ ENV NV_LIBNPP_DEV_PACKAGE=libnpp-devel-12-1-${NV_LIBNPP_DEV_VERSION}
85+ ENV NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-devel
86+ ENV NV_LIBNCCL_DEV_PACKAGE_VERSION=2.17.1-1
87+ ENV NCCL_VERSION=2.17.1
88+ ENV NV_LIBNCCL_DEV_PACKAGE=${NV_LIBNCCL_DEV_PACKAGE_NAME}-${NV_LIBNCCL_DEV_PACKAGE_VERSION}+cuda12.1
89+ ENV NV_CUDA_NSIGHT_COMPUTE_VERSION=12.1.1-1
90+ ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE=cuda-nsight-compute-12-1-${NV_CUDA_NSIGHT_COMPUTE_VERSION}
91+
92+ RUN yum install -y \
93+ make \
94+ findutils \
95+ cuda-command-line-tools-12-1-${NV_CUDA_LIB_VERSION} \
96+ cuda-libraries-devel-12-1-${NV_CUDA_LIB_VERSION} \
97+ cuda-minimal-build-12-1-${NV_CUDA_LIB_VERSION} \
98+ cuda-cudart-devel-12-1-${NV_CUDA_CUDART_DEV_VERSION} \
99+ ${NV_NVPROF_DEV_PACKAGE} \
100+ cuda-nvml-devel-12-1-${NV_NVML_DEV_VERSION} \
101+ libcublas-devel-12-1-${NV_LIBCUBLAS_DEV_VERSION} \
102+ ${NV_LIBNPP_DEV_PACKAGE} \
103+ ${NV_LIBNCCL_DEV_PACKAGE} \
104+ ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} \
105+ && yum clean all \
106+ && rm -rf /var/cache/yum/*
107+
108+ ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs
109+
110+ # Install CUDA devel cudnn8 from:
111+ # https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.1.1/ubi9/devel/cudnn8/Dockerfile
112+ ENV NV_CUDNN_VERSION=8.9.0.131-1
113+ ENV NV_CUDNN_PACKAGE=libcudnn8-${NV_CUDNN_VERSION}.cuda12.1
114+ ENV NV_CUDNN_PACKAGE_DEV=libcudnn8-devel-${NV_CUDNN_VERSION}.cuda12.1
115+
116+ LABEL com.nvidia.cudnn.version="${NV_CUDNN_VERSION}"
117+
118+ RUN yum install -y \
119+ ${NV_CUDNN_PACKAGE} \
120+ ${NV_CUDNN_PACKAGE_DEV} \
121+ && yum clean all \
122+ && rm -rf /var/cache/yum/*
123+
124+ # Install Python packages
125+
126+ # Install micropipenv to deploy packages from Pipfile.lock
127+ RUN pip install --no-cache-dir -U "micropipenv[toml]"
128+
129+ # Install Python dependencies from Pipfile.lock file
130+ COPY Pipfile.lock ./
131+
132+ RUN micropipenv install && rm -f ./Pipfile.lock
133+
134+ # Restore user workspace
135+ USER 1001
136+ WORKDIR /opt/app-root/src
0 commit comments