|
| 1 | +FROM nvcr.io/nvidia/pytorch:21.10-py3 |
| 2 | +# NVIDIA PyTorch with Python 3.8 (CONDA) |
| 3 | + |
| 4 | +ENV DEBIAN_FRONTEND=noninteractive \ |
| 5 | + MPLBACKEND=Svg \ |
| 6 | + PIP_IGNORE_INSTALLED=0 \ |
| 7 | + PYTHONUNBUFFERED=1 \ |
| 8 | + LD_LIBRARY_PATH="/usr/local/cuda/compat/lib:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/lib:/usr/local/cuda/lib64:/usr/local/nvidia/lib64:/usr/include/x86_64-linux-gnu" \ |
| 9 | + PATH="/usr/local/nvm/versions/node/v14.9.0/bin:/opt/conda/bin:/usr/local/mpi/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/ucx/bin:/opt/tensorrt/bin:/usr/local/src/lightgbm/LightGBM:/usr/local/bin/mecab" \ |
| 10 | + mecab_dicdir=/usr/local/lib/mecab/dic/mecab-ko-dic \ |
| 11 | + CPLUS_INCLUDE_PATH=/usr/include/gdal \ |
| 12 | + C_INCLUDE_PATH=/usr/include/gdal \ |
| 13 | + LANG=C.UTF-8 |
| 14 | + |
| 15 | +RUN apt-get update && \ |
| 16 | + apt-get install -y --no-install-recommends \ |
| 17 | + automake \ |
| 18 | + fonts-nanum \ |
| 19 | + fonts-nanum-coding \ |
| 20 | + fonts-nanum-extra \ |
| 21 | + pdsh \ |
| 22 | + gfortran \ |
| 23 | + htop \ |
| 24 | + ffmpeg \ |
| 25 | + libasound2-dev \ |
| 26 | + libatlas-base-dev \ |
| 27 | + libavresample-dev \ |
| 28 | + libdc1394-22-dev \ |
| 29 | + libeigen3-dev \ |
| 30 | + libfaac-dev \ |
| 31 | + libgdal-dev \ |
| 32 | + libgflags-dev \ |
| 33 | + libgoogle-glog-dev \ |
| 34 | + libgphoto2-dev \ |
| 35 | + libgstreamer-plugins-base1.0-dev \ |
| 36 | + libgstreamer1.0-dev \ |
| 37 | + libgtk-3-dev \ |
| 38 | + libhdf5-dev \ |
| 39 | + libmp3lame-dev \ |
| 40 | + libopenblas-dev \ |
| 41 | + libopencore-amrnb-dev \ |
| 42 | + libopencore-amrwb-dev \ |
| 43 | + libprotobuf-dev \ |
| 44 | + libtheora-dev \ |
| 45 | + libvorbis-dev \ |
| 46 | + libx264-dev \ |
| 47 | + libxext6 \ |
| 48 | + libxrender-dev \ |
| 49 | + libxvidcore-dev \ |
| 50 | + mercurial \ |
| 51 | + ncurses-term \ |
| 52 | + protobuf-compiler \ |
| 53 | + v4l-utils \ |
| 54 | + x264 \ |
| 55 | + openjdk-8-jdk \ |
| 56 | + libsm6 \ |
| 57 | + libtbb-dev |
| 58 | + |
| 59 | +# Install CUDA-11.3 + cuDNN 8.2.0 |
| 60 | +RUN ln -s /usr/lib/x86_64-linux-gnu/libcudnn.so.8.2.0 /usr/local/cuda/lib64/libcudnn.so && \ |
| 61 | + rm -rf /var/lib/apt/lists/* && \ |
| 62 | + ldconfig |
| 63 | + |
| 64 | +# nvtop install |
| 65 | +WORKDIR /tmp |
| 66 | +RUN git clone https://github.com/Syllo/nvtop.git && \ |
| 67 | + mkdir -p nvtop/build && \ |
| 68 | + cd /tmp/nvtop/build && \ |
| 69 | + cmake .. && \ |
| 70 | + cmake .. -DNVML_RETRIEVE_HEADER_ONLINE=True && \ |
| 71 | + make -j$(nproc) && \ |
| 72 | + make install |
| 73 | + |
| 74 | +RUN update-alternatives --install /opt/conda/bin/python python /opt/conda/bin/python3 2 |
| 75 | + |
| 76 | +WORKDIR /tmp |
| 77 | +RUN curl https://bootstrap.pypa.io/get-pip.py | python3 && \ |
| 78 | + python3 -m pip install --no-cache-dir -U setuptools pip |
| 79 | + |
| 80 | +# install NLP packages *mecab-ko & khai* |
| 81 | +WORKDIR /tmp |
| 82 | +RUN curl -LO https://bitbucket.org/eunjeon/mecab-ko/downloads/mecab-0.996-ko-0.9.2.tar.gz && \ |
| 83 | + tar zxfv mecab-0.996-ko-0.9.2.tar.gz && \ |
| 84 | + cd mecab-0.996-ko-0.9.2 && \ |
| 85 | + ./configure && \ |
| 86 | + make -j$(nproc) && \ |
| 87 | + make check && \ |
| 88 | + make install |
| 89 | + |
| 90 | +RUN echo "Install mecab-ko-dic" && \ |
| 91 | + cd /tmp && \ |
| 92 | + ldconfig && \ |
| 93 | + curl -LO https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/mecab-ko-dic-2.1.1-20180720.tar.gz && \ |
| 94 | + tar -zxvf mecab-ko-dic-2.1.1-20180720.tar.gz && \ |
| 95 | + cd mecab-ko-dic-2.1.1-20180720 && \ |
| 96 | + ./autogen.sh && \ |
| 97 | + ./configure && \ |
| 98 | + make -j$(nproc) && \ |
| 99 | + sh -c 'echo "dicdir=/usr/local/lib/mecab/dic/mecab-ko-dic" > /usr/local/etc/mecabrc' && \ |
| 100 | + make install && \ |
| 101 | + cd /tmp && \ |
| 102 | + git clone https://bitbucket.org/eunjeon/mecab-python-0.996.git && \ |
| 103 | + python3 -m pip install /tmp/mecab-python-0.996 |
| 104 | + |
| 105 | +RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - && \ |
| 106 | + apt-get update -y && \ |
| 107 | + apt-get install -y nodejs |
| 108 | + |
| 109 | +WORKDIR /tmp |
| 110 | +RUN git clone -q --branch=v0.3.18 git://github.com/xianyi/OpenBLAS.git && \ |
| 111 | + cd OpenBLAS && \ |
| 112 | + make DYNAMIC_ARCH=1 NO_AFFINITY=1 NUM_THREADS=48 FC=gfortran && \ |
| 113 | + make install |
| 114 | +RUN git clone --recursive https://github.com/bodono/scs-python.git && \ |
| 115 | + cd /tmp/scs-python && \ |
| 116 | + python setup.py install --scs --gpu |
| 117 | + |
| 118 | +RUN /opt/conda/bin/conda install -c conda-forge opencv ffmpeg spacy |
| 119 | + |
| 120 | +RUN /opt/conda/bin/python3 -m pip install --no-cache-dir \ |
| 121 | + Cython==0.29.24 \ |
| 122 | + tornado==6.1 \ |
| 123 | + pystan==3.3tornado.0 \ |
| 124 | + pycairo==1.20.1 \ |
| 125 | + jupyter==1.0.0 \ |
| 126 | + typeguard==2.12.1 \ |
| 127 | + python-language-server[all] \ |
| 128 | + pythran \ |
| 129 | + matplotlib==3.4.3 |
| 130 | +ENV SCIPY_VERSION 1.7.1 |
| 131 | +# Install scipy |
| 132 | +RUN cd /tmp && \ |
| 133 | + git clone --branch=v${SCIPY_VERSION} --depth=1 https://github.com/scipy/scipy.git scipy && \ |
| 134 | + cd scipy && \ |
| 135 | + git checkout -b v${SCIPY_VERSION} && \ |
| 136 | + git submodule update --init && \ |
| 137 | + cp site.cfg.example site.cfg && \ |
| 138 | + python3 -m pip install -U --no-cache-dir \ |
| 139 | + numpy==1.21.4 \ |
| 140 | + pandas==1.3.4 \ |
| 141 | + pythran \ |
| 142 | + scikit-learn==1.0.1 \ |
| 143 | + hypothesis==6.24.2 \ |
| 144 | + python-lsp-server \ |
| 145 | + && \ |
| 146 | + python3 setup.py install |
| 147 | + |
| 148 | +WORKDIR /tmp |
| 149 | +COPY ./requirements.txt /tmp |
| 150 | +RUN /opt/conda/bin/python3 -m pip install --no-cache-dir --ignore-installed -r requirements.txt && \ |
| 151 | + rm -f /tmp/*.whl /tmp/requirements.txt |
| 152 | + |
| 153 | +# install git-lfs |
| 154 | +WORKDIR /tmp |
| 155 | +RUN curl -sLO https://github.com/git-lfs/git-lfs/releases/download/v3.0.2/git-lfs-linux-amd64-v3.0.2.tar.gz && \ |
| 156 | + tar -zxf git-lfs-linux-amd64-v3.0.2.tar.gz && \ |
| 157 | + bash install.sh && \ |
| 158 | + rm -rf /tmp/* |
| 159 | + |
| 160 | +WORKDIR /tmp |
| 161 | +RUN git clone https://github.com/aristocratos/bashtop.git && \ |
| 162 | + cd bashtop && \ |
| 163 | + make install |
| 164 | + |
| 165 | +RUN curl -fL https://github.com/cdr/code-server/releases/download/v3.12.0/code-server-3.12.0-linux-amd64.tar.gz | tar -C /usr/local/lib -xz && \ |
| 166 | + mv /usr/local/lib/code-server-3.12.0-linux-amd64 /usr/local/lib/code-server-3.12.0 && \ |
| 167 | + ln -s /usr/local/lib/code-server-3.12.0/bin/code-server /usr/local/bin/code-server |
| 168 | + |
| 169 | +# Install Open MPI |
| 170 | +RUN mkdir /tmp/openmpi && \ |
| 171 | + cd /tmp/openmpi && \ |
| 172 | + wget https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.1.tar.gz && \ |
| 173 | + tar zxf openmpi-4.1.1.tar.gz && \ |
| 174 | + cd openmpi-4.1.1 && \ |
| 175 | + ./configure --enable-orterun-prefix-by-default && \ |
| 176 | + make -j $(nproc) all && \ |
| 177 | + make install && \ |
| 178 | + ldconfig && \ |
| 179 | + rm -rf /tmp/openmpi* |
| 180 | +# Create a wrapper for OpenMPI to allow running as root by default |
| 181 | +RUN mv /usr/local/bin/mpirun /usr/local/bin/mpirun.real && \ |
| 182 | + echo '#!/bin/bash' > /usr/local/bin/mpirun && \ |
| 183 | + echo 'mpirun.real --allow-run-as-root "$@"' >> /usr/local/bin/mpirun && \ |
| 184 | + chmod a+x /usr/local/bin/mpirun |
| 185 | + |
| 186 | +# Configure OpenMPI to run good defaults: |
| 187 | +RUN echo "btl_tcp_if_exclude = lo,docker0" >> /usr/local/etc/openmpi-mca-params.conf |
| 188 | + |
| 189 | +# Install Horovod, temporarily using CUDA stubs |
| 190 | +RUN ldconfig /usr/local/cuda/targets/x86_64-linux/lib/stubs && \ |
| 191 | + HOROVOD_CUDA_HOME=$CONDA_PREFIX HOROVOD_GPU_ALLREDUCE=NCCL HOROVOD_GPU_BROADCAST=NCCL HOROVOD_NCCL_LINK=SHARED \ |
| 192 | + HOROVOD_WITHOUT_TENSORFLOW=1 HOROVOD_WITH_PYTORCH=1 HOROVOD_WITHOUT_MXNET=1 \ |
| 193 | + pip install --no-cache-dir horovod==0.23.0 && \ |
| 194 | + ldconfig |
| 195 | + |
| 196 | +RUN python3 -m pip install --no-cache-dir \ |
| 197 | + mpi4py==3.1.2 \ |
| 198 | + nni==2.5 \ |
| 199 | + mlflow==1.21.0 \ |
| 200 | + scikit-nni==0.2.1 |
| 201 | + |
| 202 | +RUN jupyter nbextensions_configurator enable && \ |
| 203 | + jupyter contrib nbextension install && \ |
| 204 | + jupyter nbextension enable --py --sys-prefix widgetsnbextension && \ |
| 205 | + jupyter serverextension enable --py jupyterlab --sys-prefix && \ |
| 206 | + jupyter labextension install --no-build @jupyter-widgets/jupyterlab-manager && \ |
| 207 | + jupyter labextension install --no-build @krassowski/jupyterlab-lsp && \ |
| 208 | + jupyter serverextension enable --py jupyter_lsp && \ |
| 209 | + jupyter labextension install --no-build @jupyterlab/toc && \ |
| 210 | + jupyter nbextension enable execute_time/ExecuteTime && \ |
| 211 | + jupyter nbextension enable toc2/main && \ |
| 212 | + jupyter lab build |
| 213 | + |
| 214 | +RUN apt autoclean && \ |
| 215 | + sed -i 's/source \/usr\/local\/nvm\/nvm.sh//' /etc/bash.bashrc && \ |
| 216 | + ln -sf /usr/share/terminfo/x/xterm-color /usr/share/terminfo/x/xterm-256color && \ |
| 217 | + rm -rf /var/lib/apt/lists/* && \ |
| 218 | + rm -rf /root/.cache && \ |
| 219 | + rm -rf /tmp/* |
| 220 | + |
| 221 | +RUN /opt/conda/bin/python3 -m ipykernel install \ |
| 222 | + --prefix=/opt/conda/ \ |
| 223 | + --display-name "PyTorch 1.10 (NGC 21.10/Python 3.8 Conda) on Backend.AI" && \ |
| 224 | + cat /opt/conda/share/jupyter/kernels/python3/kernel.json |
| 225 | + |
| 226 | +# Backend.AI specifics |
| 227 | +COPY ./service-defs /etc/backend.ai/service-defs |
| 228 | +COPY ./runner-scripts/bootstrap.sh runner-scripts/setup_multinode.py /opt/container/ |
| 229 | + |
| 230 | +LABEL ai.backend.kernelspec="1" \ |
| 231 | + ai.backend.envs.corecount="OPENBLAS_NUM_THREADS,OMP_NUM_THREADS,NPROC" \ |
| 232 | + ai.backend.features="batch query uid-match user-input" \ |
| 233 | + ai.backend.base-distro="ubuntu16.04" \ |
| 234 | + ai.backend.accelerators="cuda" \ |
| 235 | + ai.backend.resource.min.cpu="1" \ |
| 236 | + ai.backend.resource.min.mem="1g" \ |
| 237 | + ai.backend.resource.min.cuda.device=1 \ |
| 238 | + ai.backend.resource.min.cuda.shares=0.1 \ |
| 239 | + ai.backend.base-distro="ubuntu16.04" \ |
| 240 | + ai.backend.runtime-type="python" \ |
| 241 | + ai.backend.runtime-path="/opt/conda/bin/python3" \ |
| 242 | + ai.backend.service-ports="ipython:pty:3000,jupyter:http:8091,jupyterlab:http:8090,vscode:http:8180,tensorboard:http:6006,mlflow-ui:preopen:5000,nniboard:preopen:8080" |
| 243 | + |
| 244 | +WORKDIR /home/work |
| 245 | +# vim: ft=dockerfile |
0 commit comments