Skip to content

Commit c82bc44

Browse files
committed
update dependency
2 parents ebd6334 + 52bf5af commit c82bc44

File tree

15 files changed

+795
-2
lines changed

15 files changed

+795
-2
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ Here we list the latest versions of our supported kernel images.
1212

1313
| Deep-Learning Framework | Image Name | Version | Batch | Query | Input Hook | TTY | Runtime Impl. |
1414
|-------------------------|----------------------------|---------|-------|-------|------------|-----|-------------------|
15+
| TensorFlow | `lablup/python-tensorflow` | 1.13.0 | O | O\* | O | | Bundled w/Keras 2 |
1516
| TensorFlow | `lablup/python-tensorflow` | 1.14.0 | O | O\* | O | | Bundled w/Keras 2 |
1617
| TensorFlow | `lablup/python-tensorflow` | 1.15.4 | O | O\* | O | | Bundled w/Keras 2 |
1718
| TensorFlow | `lablup/python-tensorflow` | 2.0.3 | O | O\* | O | | Bundled w/Keras 2 |

ffmpeg/Dockerfile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,10 @@ RUN set -xe && \
161161
apt-get clean && \
162162
rm -rf /usr/local/ffmpeg_sources
163163

164+
# Stream transcoding
165+
COPY ./scripts/bootstrap.sh /opt/container/bootstrap.sh
166+
RUN chmod +x /opt/container/bootstrap.sh
167+
164168
# Backend.AI specifics
165169
COPY ./service-defs /etc/backend.ai/service-defs
166170
COPY ./policy.yml /etc/backend.ai/jail/policy.yml

ffmpeg/scripts/bootstrap.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/bin/sh
2+
3+
USER_ID=${LOCAL_USER_ID:-9001}
4+
USER_NAME=work
5+
6+
export HOME="/home/$USER_NAME"
7+
8+
su - $USER_NAME -c "touch /home/work/ffmpeglog"
9+
10+
if (! [ -z ${INPUT_STREAM}] ) && ( ! [ -z ${OUTPUT_STREAM} ] ); then
11+
echo "nohup /usr/local/bin/ffmpeg -i $INPUT_STREAM -vcodec hevc_nvenc -f rtsp $OUTPUT_STREAM >> /home/work/ffmpeglog &" >> /home/work/ffmpeglog
12+
su - $USER_NAME -c "nohup /usr/local/bin/ffmpeg -i $INPUT_STREAM -vcodec hevc_nvenc -f rtsp $OUTPUT_STREAM >> /home/work/ffmpeglog &"
13+
else
14+
echo "INPUT_STREAM: $INPUT_STREAM" >> /home/work/ffmpeglog
15+
echo "OUTPUT_STREAM: $OUTPUT_STREAM" >> /home/work/ffmpeglog
16+
fi

python-ff/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ six==1.12.0
8080
syntax
8181
tensorboard==1.13.1
8282
tensorflow-estimator==1.13.0
83-
tensorflow-gpu==1.13.1+nv
83+
tensorflow-gpu==2.4.0
8484
termcolor==1.1.0
8585
terminado==0.8.2
8686
testpath==0.4.2

torchserve/Dockerfile

Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
# syntax = docker/dockerfile:experimental
2+
#
3+
# This file can build images for cpu and gpu env. By default it builds image for CPU.
4+
# Use following option to build image for cuda/GPU: --build-arg BASE_IMAGE=nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
5+
# Here is complete command for GPU/cuda -
6+
# $ DOCKER_BUILDKIT=1 docker build --file Dockerfile --build-arg BASE_IMAGE=nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 -t torchserve:latest .
7+
#
8+
# Following comments have been shamelessly copied from https://github.com/pytorch/pytorch/blob/master/Dockerfile
9+
#
10+
# NOTE: To build this you will need a docker version > 18.06 with
11+
# experimental enabled and DOCKER_BUILDKIT=1
12+
#
13+
# If you do not use buildkit you are not going to have a good time
14+
#
15+
# For reference:
16+
# https://docs.docker.com/develop/develop-images/build_enhancements/
17+
18+
ARG BASE_IMAGE=ubuntu:18.04
19+
20+
FROM ${BASE_IMAGE} AS compile-image
21+
ARG BASE_IMAGE=ubuntu:18.04
22+
ENV PYTHONUNBUFFERED TRUE
23+
24+
RUN --mount=type=cache,id=apt-dev,target=/var/cache/apt \
25+
apt-get update && \
26+
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
27+
ca-certificates \
28+
g++ \
29+
python3-dev \
30+
python3-distutils \
31+
python3-venv \
32+
openjdk-11-jre-headless \
33+
curl \
34+
&& rm -rf /var/lib/apt/lists/* \
35+
&& cd /tmp \
36+
&& curl -O https://bootstrap.pypa.io/get-pip.py \
37+
&& python3 get-pip.py
38+
39+
RUN python3 -m venv /home/venv
40+
ENV PATH="/home/venv/bin:$PATH"
41+
42+
RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 \
43+
&& update-alternatives --install /usr/local/bin/pip pip /usr/local/bin/pip3 1
44+
45+
# This is only useful for cuda env
46+
RUN export USE_CUDA=1
47+
48+
ARG CUDA_VERSION=""
49+
50+
RUN TORCH_VER=$(curl --silent --location https://pypi.org/pypi/torch/json | python -c "import sys, json, pkg_resources; releases = json.load(sys.stdin)['releases']; print(sorted(releases, key=pkg_resources.parse_version)[-1])") && \
51+
TORCH_VISION_VER=$(curl --silent --location https://pypi.org/pypi/torchvision/json | python -c "import sys, json, pkg_resources; releases = json.load(sys.stdin)['releases']; print(sorted(releases, key=pkg_resources.parse_version)[-1])") && \
52+
if echo "$BASE_IMAGE" | grep -q "cuda:"; then \
53+
# Install CUDA version specific binary when CUDA version is specified as a build arg
54+
if [ "$CUDA_VERSION" ]; then \
55+
pip install --no-cache-dir torch==$TORCH_VER+$CUDA_VERSION torchvision==$TORCH_VISION_VER+$CUDA_VERSION -f https://download.pytorch.org/whl/torch_stable.html; \
56+
# Install the binary with the latest CUDA version support
57+
else \
58+
pip install --no-cache-dir torch torchvision; \
59+
fi \
60+
# Install the CPU binary
61+
else \
62+
pip install --no-cache-dir torch==$TORCH_VER+cpu torchvision==$TORCH_VISION_VER+cpu -f https://download.pytorch.org/whl/torch_stable.html; \
63+
fi
64+
RUN pip install --no-cache-dir captum torchtext torchserve torch-model-archiver
65+
66+
# Final image for production
67+
FROM ${BASE_IMAGE} AS runtime-image
68+
69+
ENV PYTHONUNBUFFERED TRUE
70+
71+
RUN --mount=type=cache,id=apt-dev,target=/var/cache/apt \
72+
apt-get update && \
73+
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
74+
python3 \
75+
python3-distutils \
76+
python3-dev \
77+
openjdk-11-jre-headless \
78+
build-essential \
79+
&& rm -rf /var/lib/apt/lists/* \
80+
&& cd /tmp
81+
82+
#RUN useradd -m model-server \
83+
# && mkdir -p /home/model-server/tmp
84+
RUN mkdir -p /home/model-server/tmp
85+
86+
#COPY --chown=model-server --from=compile-image
87+
COPY --from=compile-image /home/venv /home/venv
88+
89+
ENV PATH="/home/venv/bin:$PATH"
90+
91+
COPY dockerd-entrypoint.sh /usr/local/bin/dockerd-entrypoint.sh
92+
93+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.sh
94+
#RUN chown -R /home/model-server
95+
96+
COPY config.properties /home/model-server/config.properties
97+
RUN mkdir /home/model-server/model-store
98+
99+
#&& chown -R model-server /home/work/model-server/model-store
100+
101+
EXPOSE 8080 8081 8082 7070 7071
102+
103+
#USER model-server
104+
#WORKDIR /home/model-server
105+
#ENV TEMP=/home/model-server/tmp
106+
#ENTRYPOINT ["/usr/local/bin/dockerd-entrypoint.sh"]
107+
#CMD ["serve"]
108+
109+
110+
# Backend.AI specifics
111+
COPY service-defs /etc/backend.ai/service-defs
112+
LABEL ai.backend.kernelspec="1" \
113+
ai.backend.envs.corecount="OPENBLAS_NUM_THREADS,OMP_NUM_THREADS,NPROC" \
114+
ai.backend.features="batch query uid-match user-input" \
115+
ai.backend.resource.min.cpu="1" \
116+
ai.backend.resource.min.mem="256m" \
117+
ai.backend.base-distro="ubuntu18.04" \
118+
ai.backend.runtime-type="python" \
119+
ai.backend.runtime-path="/bin/false" \
120+
ai.backend.service-ports="jupyter:http:8070"
121+
122+
COPY policy.yml /etc/backend.ai/jail/policy.yml

torchserve/Dockerfile.dev

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
# syntax = docker/dockerfile:experimental
2+
#
3+
# Following comments have been shamelessly copied from https://github.com/pytorch/pytorch/blob/master/Dockerfile
4+
#
5+
# NOTE: To build this you will need a docker version > 18.06 with
6+
# experimental enabled and DOCKER_BUILDKIT=1
7+
#
8+
# If you do not use buildkit you are not going to have a good time
9+
#
10+
# For reference:
11+
# https://docs.docker.com/develop/develop-images/build_enhancements/
12+
13+
ARG BASE_IMAGE=ubuntu:18.04
14+
ARG BUILD_TYPE=dev
15+
FROM ${BASE_IMAGE} AS compile-image
16+
17+
ARG BASE_IMAGE
18+
ARG BRANCH_NAME=master
19+
ARG MACHINE_TYPE=cpu
20+
ARG CUDA_VERSION
21+
22+
ENV PYTHONUNBUFFERED TRUE
23+
24+
RUN --mount=type=cache,id=apt-dev,target=/var/cache/apt \
25+
apt-get update && \
26+
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
27+
fakeroot \
28+
ca-certificates \
29+
dpkg-dev \
30+
sudo \
31+
g++ \
32+
git \
33+
python3-dev \
34+
build-essential \
35+
openjdk-11-jdk \
36+
curl \
37+
vim \
38+
&& rm -rf /var/lib/apt/lists/* \
39+
&& cd /tmp \
40+
&& curl -O https://bootstrap.pypa.io/get-pip.py \
41+
&& python3 get-pip.py
42+
43+
RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 \
44+
&& update-alternatives --install /usr/local/bin/pip pip /usr/local/bin/pip3 1
45+
46+
RUN pip install -U pip setuptools
47+
48+
# Build Dev Image
49+
FROM compile-image AS dev-image
50+
ARG MACHINE_TYPE=cpu
51+
ARG CUDA_VERSION
52+
RUN if [ "$MACHINE_TYPE" = "gpu" ]; then export USE_CUDA=1; fi \
53+
&& git clone https://github.com/pytorch/serve.git \
54+
&& cd serve \
55+
&& git checkout ${BRANCH_NAME} \
56+
&& if [ -z "$CUDA_VERSION" ]; then python ts_scripts/install_dependencies.py --environment=dev; else python ts_scripts/install_dependencies.py --environment=dev --cuda $CUDA_VERSION; fi \
57+
&& python ts_scripts/install_from_src.py \
58+
&& useradd -m model-server \
59+
&& mkdir -p /home/model-server/tmp \
60+
&& cp docker/dockerd-entrypoint.sh /usr/local/bin/dockerd-entrypoint.sh \
61+
&& chmod +x /usr/local/bin/dockerd-entrypoint.sh \
62+
&& chown -R model-server /home/model-server \
63+
&& cp docker/config.properties /home/model-server/config.properties \
64+
&& mkdir /home/model-server/model-store && chown -R model-server /home/model-server/model-store
65+
66+
EXPOSE 8080 8081 8082 7070 7071
67+
USER model-server
68+
WORKDIR /home/model-server
69+
ENV TEMP=/home/model-server/tmp
70+
ENTRYPOINT ["/usr/local/bin/dockerd-entrypoint.sh"]
71+
CMD ["serve"]
72+
73+
# Build CodeBuild Image
74+
FROM compile-image AS codebuild-image
75+
ENV JAVA_VERSION=11 \
76+
JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64" \
77+
JDK_HOME="/usr/lib/jvm/java-11-openjdk-amd64" \
78+
JRE_HOME="/usr/lib/jvm/java-11-openjdk-amd64" \
79+
ANT_VERSION=1.10.3 \
80+
MAVEN_HOME="/opt/maven" \
81+
MAVEN_VERSION=3.5.4 \
82+
MAVEN_CONFIG="/root/.m2" \
83+
MAVEN_DOWNLOAD_SHA1="22cac91b3557586bb1eba326f2f7727543ff15e3"
84+
85+
# Install Maven
86+
RUN set -ex \
87+
&& mkdir -p $MAVEN_HOME \
88+
&& curl -LSso /var/tmp/apache-maven-$MAVEN_VERSION-bin.tar.gz https://apache.org/dist/maven/maven-3/$MAVEN_VERSION/binaries/apache-maven-$MAVEN_VERSION-bin.tar.gz \
89+
&& echo "$MAVEN_DOWNLOAD_SHA1 /var/tmp/apache-maven-$MAVEN_VERSION-bin.tar.gz" | sha1sum -c - \
90+
&& tar xzvf /var/tmp/apache-maven-$MAVEN_VERSION-bin.tar.gz -C $MAVEN_HOME --strip-components=1 \
91+
&& update-alternatives --install /usr/bin/mvn mvn /opt/maven/bin/mvn 10000 \
92+
&& mkdir -p $MAVEN_CONFIG
93+
94+
FROM ${BUILD_TYPE}-image AS final-image
95+
ARG BUILD_TYPE
96+
RUN echo "${BUILD_TYPE} image creation completed"

0 commit comments

Comments
 (0)