File tree Expand file tree Collapse file tree 2 files changed +39
-2
lines changed Expand file tree Collapse file tree 2 files changed +39
-2
lines changed Original file line number Diff line number Diff line change @@ -19,11 +19,18 @@ ARG TENSORRT_VERSION=8.2.3.0
1919ARG TORCH_VERSION=1.10.0
2020ARG TORCHVISION_VERSION=0.11.0
2121
22+ ARG TOOLSET_VERSION=7
23+
2224USER root
2325WORKDIR /root/workspace
2426
2527ENV FORCE_CUDA="1"
2628
29+ # install toolset
30+ RUN yum install devtoolset-${TOOLSET_VERSION}-gcc devtoolset-${TOOLSET_VERSION}-gcc-c++ -y
31+
32+ ENV PATH=/opt/rh/devtoolset-${TOOLSET_VERSION}/root/usr/bin:$PATH
33+
2734# install cuda cudnn
2835RUN curl -fsSL -v -o ./cuda_install.run -O $CUDA_URL &&\
2936 chmod +x ./cuda_install.run &&\
@@ -93,12 +100,13 @@ RUN curl -fsSL -v -o ~/miniconda.sh -O https://repo.anaconda.com/miniconda/Mini
93100 /opt/conda/bin/conda create -n mmdeploy-3.10 python=3.10 -y &&\
94101 export CUDA_INT=$(echo $CUDA_VERSION | awk '{split($0, a, "."); print a[1]a[2]}' ) &&\
95102 /opt/conda/bin/conda create -n torch${TORCH_VERSION} python=3.8 -y &&\
96- /opt/conda/envs/mmdeploy-3.6/bin/pip install --no-cache-dir setuptools wheel pyyaml &&\
103+ /opt/conda/envs/mmdeploy-3.6/bin/pip install --no-cache-dir setuptools wheel pyyaml packaging &&\
97104 /opt/conda/envs/torch${TORCH_VERSION}/bin/pip install --no-cache-dir onnxruntime-gpu==${ONNXRUNTIME_VERSION} &&\
98105 /opt/conda/envs/torch${TORCH_VERSION}/bin/pip install ${TENSORRT_DIR}/python/tensorrt-*-cp38-none-linux_x86_64.whl &&\
99106 /opt/conda/envs/torch${TORCH_VERSION}/bin/pip install --no-cache-dir torch==${TORCH_VERSION}+cu${CUDA_INT} \
100107 torchvision==${TORCHVISION_VERSION}+cu${CUDA_INT} -f https://download.pytorch.org/whl/torch_stable.html &&\
101- /opt/conda/bin/conda clean -ya
108+ /opt/conda/bin/conda init bash &&\
109+ /opt/conda/bin/conda clean -ya &&\
102110
103111ENV CONDA=/opt/conda
104112ENV PATH=$CONDA/bin:$PATH
Original file line number Diff line number Diff line change 1+ #! /bin/sh
2+
3+ set -e
4+
5+ ip=${1}
6+ port=${2: 8585}
7+
8+ date_today=` date +' %Y%m%d' `
9+
10+ # create http server
11+ nohup python3 -m http.server --directory /data2/shared/mmdeploy-manylinux2014_x86_64-cuda11.3 $port > tmp.log 2>&1
12+
13+ export ip=10.1.52.36
14+ export port=8585
15+ export CUDA_URL=http://$ip :$port /cuda_11.3.0_465.19.01_linux.run
16+ export CUDNN_URL=http://$ip :$port /cudnn-11.3-linux-x64-v8.2.1.32.tgz
17+ export TENSORRT_URL=http://$ip :$port /TensorRT-8.2.3.0.Linux.x86_64-gnu.cuda-11.4.cudnn8.2.tar.gz
18+ export TENSORRT_VERSION=8.2.3.0
19+ export TAG=manylinux2014_x86_64-cuda11.3
20+
21+ # build docker image
22+ docker build ./docker/prebuild/ -t openmmlab/mmdeploy:$TAG \
23+ --build-arg CUDA_URL=$CUDA_URL \
24+ --build-arg CUDNN_URL=$CUDNN_URL \
25+ --build-arg TENSORRT_URL=${TENSORRT_URL}
26+
27+ # push to docker hub
28+ docker login
29+ docker push openmmlab/mmdeploy:$TAG
You can’t perform that action at this time.
0 commit comments