Skip to content

Commit 09e9d2d

Browse files
authored
Create Dockerfile.21.12-py3
1 parent aa97842 commit 09e9d2d

File tree

1 file changed

+212
-0
lines changed

1 file changed

+212
-0
lines changed
Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
FROM nvcr.io/nvidia/pytorch:21.12-py3
2+
# NVIDIA PyTorch with Python 3.8 (CONDA)
3+
4+
ENV DEBIAN_FRONTEND=noninteractive \
5+
MPLBACKEND=Svg \
6+
PIP_IGNORE_INSTALLED=0 \
7+
PYTHONUNBUFFERED=1 \
8+
LD_LIBRARY_PATH="/usr/local/cuda/compat/lib:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/lib:/usr/local/cuda/lib64:/usr/local/nvidia/lib64:/usr/include/x86_64-linux-gnu" \
9+
PATH="/usr/local/nvm/versions/node/v16.6.1/bin:/opt/conda/bin:/usr/local/mpi/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/ucx/bin:/opt/tensorrt/bin:/usr/local/src/lightgbm/LightGBM:/usr/local/bin/mecab" \
10+
mecab_dicdir=/usr/local/lib/mecab/dic/mecab-ko-dic \
11+
CPLUS_INCLUDE_PATH=/usr/include/gdal \
12+
C_INCLUDE_PATH=/usr/include/gdal \
13+
LANG=C.UTF-8
14+
15+
RUN apt-get update && \
16+
apt-get install -y --no-install-recommends \
17+
automake \
18+
ffmpeg \
19+
fonts-nanum \
20+
fonts-nanum-coding \
21+
fonts-nanum-extra \
22+
gfortran \
23+
htop \
24+
libasound2-dev \
25+
libatlas-base-dev \
26+
libavresample-dev \
27+
libdc1394-22-dev \
28+
libeigen3-dev \
29+
libfaac-dev \
30+
libgdal-dev \
31+
libgflags-dev \
32+
libgoogle-glog-dev \
33+
libgphoto2-dev \
34+
libgstreamer-plugins-base1.0-dev \
35+
libgstreamer1.0-dev \
36+
libgtk-3-dev \
37+
libhdf5-dev \
38+
libmp3lame-dev \
39+
libopenblas-dev \
40+
libopencore-amrnb-dev \
41+
libopencore-amrwb-dev \
42+
libprotobuf-dev \
43+
libtheora-dev \
44+
libvorbis-dev \
45+
libx264-dev \
46+
libxext6 \
47+
libxrender-dev \
48+
libxvidcore-dev \
49+
libsm6 \
50+
libtbb-dev \
51+
mercurial \
52+
ncurses-term \
53+
openjdk-8-jdk \
54+
pdsh \
55+
protobuf-compiler \
56+
v4l-utils \
57+
x264
58+
59+
# Install CUDA + cuDNN
60+
RUN ln -s /usr/lib/x86_64-linux-gnu/libcudnn.so.8.2.4 /usr/local/cuda/lib64/libcudnn.so && \
61+
rm -rf /var/lib/apt/lists/* && \
62+
ldconfig
63+
64+
# nvtop install
65+
WORKDIR /tmp
66+
RUN git clone https://github.com/Syllo/nvtop.git && \
67+
mkdir -p nvtop/build && \
68+
cd /tmp/nvtop/build && \
69+
cmake .. && \
70+
cmake .. -DNVML_RETRIEVE_HEADER_ONLINE=True && \
71+
make -j$(nproc) && \
72+
make install
73+
74+
RUN update-alternatives --install /opt/conda/bin/python python /opt/conda/bin/python3 2
75+
76+
# install NLP packages *mecab-ko & khai*
77+
WORKDIR /tmp
78+
RUN curl -LO https://bitbucket.org/eunjeon/mecab-ko/downloads/mecab-0.996-ko-0.9.2.tar.gz && \
79+
tar zxfv mecab-0.996-ko-0.9.2.tar.gz && \
80+
cd mecab-0.996-ko-0.9.2 && \
81+
./configure && \
82+
make -j$(nproc) && \
83+
make check && \
84+
make install
85+
86+
RUN echo "Install mecab-ko-dic" && \
87+
cd /tmp && \
88+
ldconfig && \
89+
curl -LO https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/mecab-ko-dic-2.1.1-20180720.tar.gz && \
90+
tar -zxvf mecab-ko-dic-2.1.1-20180720.tar.gz && \
91+
cd mecab-ko-dic-2.1.1-20180720 && \
92+
./autogen.sh && \
93+
./configure && \
94+
make -j$(nproc) && \
95+
sh -c 'echo "dicdir=/usr/local/lib/mecab/dic/mecab-ko-dic" > /usr/local/etc/mecabrc' && \
96+
make install && \
97+
cd /tmp && \
98+
git clone https://bitbucket.org/eunjeon/mecab-python-0.996.git && \
99+
python3 -m pip install /tmp/mecab-python-0.996
100+
101+
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - && \
102+
apt-get update -y && \
103+
apt-get install -y nodejs
104+
105+
WORKDIR /tmp
106+
RUN git clone -q --branch=v0.3.18 git://github.com/xianyi/OpenBLAS.git && \
107+
cd OpenBLAS && \
108+
make DYNAMIC_ARCH=1 NO_AFFINITY=1 NUM_THREADS=48 FC=gfortran && \
109+
make install
110+
RUN git clone --recursive https://github.com/bodono/scs-python.git && \
111+
cd /tmp/scs-python && \
112+
python setup.py install --scs --gpu
113+
114+
RUN /opt/conda/bin/conda install -c conda-forge opencv ffmpeg spacy
115+
116+
WORKDIR /tmp
117+
COPY ./requirements.21.10.txt /tmp/requirements.txt
118+
RUN /opt/conda/bin/python3 -m pip install --no-cache-dir -r requirements.txt && \
119+
rm -f /tmp/*.whl /tmp/requirements.txt
120+
121+
# install git-lfs
122+
WORKDIR /tmp
123+
RUN curl -sLO https://github.com/git-lfs/git-lfs/releases/download/v3.0.2/git-lfs-linux-amd64-v3.0.2.tar.gz && \
124+
tar -zxf git-lfs-linux-amd64-v3.0.2.tar.gz && \
125+
bash install.sh && \
126+
rm -rf /tmp/*
127+
128+
WORKDIR /tmp
129+
RUN git clone https://github.com/aristocratos/bashtop.git && \
130+
cd bashtop && \
131+
make install
132+
133+
RUN curl -fL https://github.com/cdr/code-server/releases/download/v3.12.0/code-server-3.12.0-linux-amd64.tar.gz | tar -C /usr/local/lib -xz && \
134+
mv /usr/local/lib/code-server-3.12.0-linux-amd64 /usr/local/lib/code-server-3.12.0 && \
135+
ln -s /usr/local/lib/code-server-3.12.0/bin/code-server /usr/local/bin/code-server
136+
137+
# Install Open MPI
138+
RUN mkdir /tmp/openmpi && \
139+
cd /tmp/openmpi && \
140+
wget https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.1.tar.gz && \
141+
tar zxf openmpi-4.1.1.tar.gz && \
142+
cd openmpi-4.1.1 && \
143+
./configure --enable-orterun-prefix-by-default && \
144+
make -j $(nproc) all && \
145+
make install && \
146+
ldconfig && \
147+
rm -rf /tmp/openmpi*
148+
# Create a wrapper for OpenMPI to allow running as root by default
149+
RUN mv /usr/local/bin/mpirun /usr/local/bin/mpirun.real && \
150+
echo '#!/bin/bash' > /usr/local/bin/mpirun && \
151+
echo 'mpirun.real --allow-run-as-root "$@"' >> /usr/local/bin/mpirun && \
152+
chmod a+x /usr/local/bin/mpirun
153+
154+
# Configure OpenMPI to run good defaults:
155+
RUN echo "btl_tcp_if_exclude = lo,docker0" >> /usr/local/etc/openmpi-mca-params.conf
156+
157+
# Install Horovod, temporarily using CUDA stubs
158+
RUN ldconfig /usr/local/cuda/targets/x86_64-linux/lib/stubs && \
159+
HOROVOD_CUDA_HOME=$CONDA_PREFIX HOROVOD_GPU_ALLREDUCE=NCCL HOROVOD_GPU_BROADCAST=NCCL HOROVOD_NCCL_LINK=SHARED \
160+
HOROVOD_WITHOUT_TENSORFLOW=1 HOROVOD_WITH_PYTORCH=1 HOROVOD_WITHOUT_MXNET=1 \
161+
pip install --no-cache-dir horovod==0.23.0 && \
162+
ldconfig
163+
164+
RUN python3 -m pip install --no-cache-dir \
165+
mpi4py==3.1.2 \
166+
nni==2.5 \
167+
mlflow==1.21.0 \
168+
scikit-nni==0.2.1
169+
170+
RUN jupyter nbextensions_configurator enable && \
171+
jupyter contrib nbextension install && \
172+
jupyter nbextension enable --py --sys-prefix widgetsnbextension && \
173+
jupyter serverextension enable --py jupyterlab --sys-prefix && \
174+
jupyter labextension install --no-build @jupyter-widgets/jupyterlab-manager && \
175+
jupyter labextension install --no-build @krassowski/jupyterlab-lsp && \
176+
jupyter serverextension enable --py jupyter_lsp && \
177+
jupyter labextension install --no-build @jupyterlab/toc && \
178+
jupyter nbextension enable execute_time/ExecuteTime && \
179+
jupyter nbextension enable toc2/main && \
180+
jupyter lab build
181+
182+
RUN apt autoclean && \
183+
sed -i 's/source \/usr\/local\/nvm\/nvm.sh//' /etc/bash.bashrc && \
184+
ln -sf /usr/share/terminfo/x/xterm-color /usr/share/terminfo/x/xterm-256color && \
185+
rm -rf /var/lib/apt/lists/* && \
186+
rm -rf /root/.cache && \
187+
rm -rf /tmp/*
188+
189+
RUN /opt/conda/bin/python3 -m ipykernel install \
190+
--prefix=/opt/conda/ \
191+
--display-name "PyTorch 1.10 (NGC 21.12/Python 3.8 Conda) on Backend.AI" && \
192+
cat /opt/conda/share/jupyter/kernels/python3/kernel.json
193+
194+
# Backend.AI specifics
195+
COPY ./service-defs /etc/backend.ai/service-defs
196+
COPY ./runner-scripts/bootstrap.sh runner-scripts/setup_multinode.py /opt/container/
197+
198+
LABEL ai.backend.kernelspec="1" \
199+
ai.backend.envs.corecount="OPENBLAS_NUM_THREADS,OMP_NUM_THREADS,NPROC" \
200+
ai.backend.features="batch query uid-match user-input" \
201+
ai.backend.base-distro="ubuntu16.04" \
202+
ai.backend.accelerators="cuda" \
203+
ai.backend.resource.min.cpu="1" \
204+
ai.backend.resource.min.mem="1g" \
205+
ai.backend.resource.min.cuda.device=1 \
206+
ai.backend.resource.min.cuda.shares=0 \
207+
ai.backend.base-distro="ubuntu16.04" \
208+
ai.backend.runtime-type="python" \
209+
ai.backend.runtime-path="/opt/conda/bin/python3" \
210+
ai.backend.service-ports="ipython:pty:3000,jupyter:http:8091,jupyterlab:http:8090,vscode:http:8180,tensorboard:http:6006,mlflow-ui:preopen:5000,nniboard:preopen:8080"
211+
212+
WORKDIR /home/work

0 commit comments

Comments
 (0)