|
1 |
| -FROM centos:7.6.1810 |
| 1 | +FROM python:3.7-buster as dependencies |
| 2 | +COPY api/src/main/resources/requirements.txt / |
| 3 | +RUN apt-get update && \ |
| 4 | + apt-get install -y libssl-dev libsasl2-dev libffi-dev cython3 |
| 5 | +RUN pip3 install -r requirements.txt |
2 | 6 |
|
3 |
| -RUN yum install -y epel-release |
4 |
| -RUN yum install -y python36-devel python36-pip cyrus-sasl-devel cyrus-sasl-gssapi cyrus-sasl-plain libffi-devel openssl-devel gcc gcc-c++ |
5 | 7 |
|
6 |
| -COPY api/src/main/resources /deployment-manager/ |
| 8 | +FROM python:3.7-slim-buster |
7 | 9 |
|
8 |
| -WORKDIR /deployment-manager |
| 10 | +LABEL maintainer= "[email protected]" \ |
| 11 | + organization="gradiant.org" |
9 | 12 |
|
10 |
| -RUN pip3 install -r requirements.txt |
| 13 | +ENV VERSION=2.1.2 |
| 14 | +# Create app directory |
| 15 | +WORKDIR /deployment-manager |
| 16 | +# Adding python runtime dependencies |
| 17 | +COPY --from=dependencies /requirements.txt ./ |
| 18 | +COPY --from=dependencies /root/.cache /root/.cache |
| 19 | +## Adding OS runtime dependencies |
| 20 | +RUN apt-get update && \ |
| 21 | + apt-get install -y libssl1.1 libsasl2-2 libsasl2-modules-gssapi-mit libffi6 && \ |
| 22 | + rm -rf /var/lib/apt/lists/* && \ |
| 23 | + pip3 install -r requirements.txt |
11 | 24 |
|
12 | 25 | # GIT http server to serve notebooks to jupyterhub
|
13 |
| -RUN yum install -y git nginx fcgiwrap && \ |
| 26 | +RUN apt-get update && \ |
| 27 | + apt-get install -y git nginx fcgiwrap && \ |
| 28 | + rm -rf /var/lib/apt/lists/* && \ |
14 | 29 | git config --global user.email "[email protected]" && \
|
15 | 30 | git config --global user.name "pnda" && \
|
16 | 31 | mkdir -p /data/git-repos/ && \
|
17 |
| - mkdir -p /data/stage/ |
| 32 | + mkdir -p /data/stage/ |
| 33 | + |
| 34 | +# Spark distribution to submit spark jobs. |
| 35 | +# Installed openjdk-8 since spark 2.4.4 does not yet support Java 11 |
| 36 | +# need unstable repo from debian buster |
| 37 | +ENV JAVA_HOME=/usr/lib/jvm/default-jvm/ \ |
| 38 | + SPARK_VERSION=2.4.4 \ |
| 39 | + SPARK_HOME=/opt/spark |
| 40 | +ENV PATH="$PATH:$SPARK_HOME/sbin:$SPARK_HOME/bin" \ |
| 41 | + SPARK_URL="local[*]" \ |
| 42 | + PYTHONPATH="${SPARK_HOME}/python/lib/pyspark.zip:${SPARK_HOME}/python/lib/py4j-src.zip:$PYTHONPATH" \ |
| 43 | + SPARK_OPTS="" \ |
| 44 | + PYSPARK_PYTHON=/usr/bin/python3 |
| 45 | +RUN mkdir -p /usr/share/man/man1 && \ |
| 46 | + echo "deb http://deb.debian.org/debian unstable main" > /etc/apt/sources.list.d/91-unstable.list && \ |
| 47 | + apt-get update && apt-get install -y openjdk-8-jre-headless wget && rm /etc/apt/sources.list.d/91-unstable.list && rm -rf /var/lib/apt/lists/* && \ |
| 48 | + cd /usr/lib/jvm && ln -s java-8-openjdk-amd64 default-jvm && \ |
| 49 | + wget -qO- https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop2.7.tgz | tar xvz -C /opt && \ |
| 50 | + ln -s /opt/spark-$SPARK_VERSION-bin-hadoop2.7 /opt/spark && \ |
| 51 | + cd /opt/spark/python/lib && ln -s py4j-*-src.zip py4j-src.zip |
| 52 | + |
| 53 | +# Bundle app source |
| 54 | +COPY api/src/main/resources ./ |
18 | 55 | COPY docker/nginx.conf /etc/nginx/nginx.conf
|
19 |
| -COPY docker/entrypoint.sh /entrypoint.sh |
| 56 | +COPY docker/entrypoint.sh . |
| 57 | + |
20 | 58 | # PNDA platform users must transition from Linux SO users to cloud-native. For now we add a pnda user to container images.
|
21 | 59 | RUN useradd pnda
|
22 | 60 |
|
23 |
| -ENTRYPOINT "/entrypoint.sh" |
24 |
| - |
| 61 | +ENTRYPOINT ["./entrypoint.sh"] |
0 commit comments