@@ -26,32 +26,32 @@ RUN curl -sL https://deb.nodesource.com/setup_0.12 | bash - && \
2626 npm install -g bower
2727
2828# for Apache Spark demos
29- ARG APACHE_SPARK_VERSION=3.3 .2
29+ ARG APACHE_SPARK_VERSION=3.5 .2
3030ARG SCALA_VERSION=2.13
3131
3232RUN apt-get -y update && \
3333 apt-get -y install software-properties-common
3434
35- RUN \
36- echo "===> add webupd8 repository..." && \
37- echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu trusty main" | tee /etc/ apt/sources.list.d/webupd8team-java.list && \
38- echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu trusty main" | tee -a /etc/apt/sources.list.d/webupd8team- java.list && \
39- apt-key adv --keyserver keyserver.ubuntu.com --recv-keys EEA14886 && \
40- apt-get update
41-
42- RUN echo "===> install Java" && \
43- echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \
44- echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections && \
45- DEBIAN_FRONTEND=noninteractive apt-get install -y --force-yes oracle-java8-installer oracle-java8-set-default && \
46- apt-get clean && \
47- update-java-alternatives -s java-8-oracle
48-
49- RUN cd /tmp && \
50- if [ "$SCALA_VERSION" = "2.13" ]; then APACHE_SPARK_CUSTOM_NAME=hadoop3-scala2.13; else APACHE_SPARK_CUSTOM_NAME=hadoop3; fi && \
51- wget -q https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-${APACHE_SPARK_CUSTOM_NAME} .tgz && \
52- tar xzf spark-${APACHE_SPARK_VERSION}-bin-${APACHE_SPARK_CUSTOM_NAME} .tgz -C /usr/local && \
53- rm spark-${APACHE_SPARK_VERSION}-bin-${APACHE_SPARK_CUSTOM_NAME}.tgz && \
54- ln -snf /usr/local/spark-${APACHE_SPARK_VERSION}-bin-${APACHE_SPARK_CUSTOM_NAME} /usr/local/spark
35+ RUN JAVA_8=`update-alternatives --list java | grep java-8-openjdk` || echo $JAVA_8 && \
36+ if [ "x$JAVA_8" = "x" ]; then \
37+ apt-get -y update ; \
38+ apt-get install -y --no-install-recommends openjdk-8-jdk ca-certificates- java ; \
39+ apt-get clean ; \
40+ rm -rf /var/lib/apt/lists/* ; \
41+ update-ca-certificates -f ; \
42+ JAVA_8=`update-alternatives --list java | grep java-8-openjdk` ; \
43+ update-alternatives --set java $JAVA_8 ; \
44+ fi
45+
46+ RUN if [ "$SCALA_VERSION" = "2.13" ]; then APACHE_SPARK_CUSTOM_NAME=hadoop3-scala2.13; else APACHE_SPARK_CUSTOM_NAME=hadoop3; fi && \
47+ SPARK_TGZ_NAME=spark-${APACHE_SPARK_VERSION}-bin-${APACHE_SPARK_CUSTOM_NAME} && \
48+ if [ ! -d "/usr/local/$SPARK_TGZ_NAME" ]; then \
49+ cd /tmp ; \
50+ wget -q https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/${SPARK_TGZ_NAME}.tgz ; \
51+ tar -xzf ${SPARK_TGZ_NAME} .tgz -C /usr/local ; \
52+ rm ${SPARK_TGZ_NAME} .tgz ; \
53+ ln -snf /usr/local/$SPARK_TGZ_NAME /usr/local/spark ; \
54+ fi
5555
5656# R support
5757RUN apt-get update && \
@@ -62,7 +62,7 @@ RUN apt-get update && \
6262 rm -rf /var/lib/apt/lists/*
6363
6464ENV SPARK_HOME /usr/local/spark
65- ENV PYTHONPATH $SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.9.5 -src.zip
65+ ENV PYTHONPATH $SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.9.7 -src.zip
6666ENV PYSPARK_PYTHON /home/main/anaconda2/envs/python3/bin/python
6767ENV R_LIBS_USER $SPARK_HOME/R/lib
6868
0 commit comments