Skip to content

Commit 5f7aef7

Browse files
committed
Preparing environment for Python 3.8
1 parent 3dead1f commit 5f7aef7

File tree

1 file changed

+7
-6
lines changed

1 file changed

+7
-6
lines changed

testing/Dockerfile

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
FROM openjdk:8-jre-stretch
22

3-
ARG SPARK_VERSION=2.4.4
3+
ARG SPARK_VERSION=2.4.5
44

55
RUN apt-get update -y
66
RUN apt-get install -y jq make build-essential libssl-dev zlib1g-dev libbz2-dev \
@@ -9,17 +9,18 @@ xz-utils tk-dev libffi-dev liblzma-dev python-openssl git gcc curl postgresql li
99
RUN curl https://pyenv.run | bash
1010
RUN echo 'eval "$(pyenv init -)"' >> /root/.bashrc
1111
ENV PATH="/root/.pyenv/bin:$PATH"
12-
RUN pyenv install 3.6.8
13-
RUN pyenv install 3.7.4
14-
RUN pyenv global 3.7.4 3.6.8
12+
RUN pyenv install 3.6.9
13+
RUN pyenv install 3.7.5
14+
RUN pyenv install 3.8.0
15+
RUN pyenv global 3.8.0 3.7.5 3.6.9
1516
ENV PYSPARK_PYTHON=python
1617
ENV PIP=/root/.pyenv/shims/pip
1718
RUN $PIP install --upgrade pip
1819
RUN $PIP install --no-cache pyspark==${SPARK_VERSION}
1920
RUN eval "$(pyenv init -)" && \
2021
export SPARK_HOME=$(python -c "import sys; print([x for x in sys.path if x.endswith('site-packages')][0])")/pyspark && \
21-
curl --url "http://central.maven.org/maven2/com/amazonaws/aws-java-sdk/1.7.4/aws-java-sdk-1.7.4.jar" --output ${SPARK_HOME}/jars/aws-java-sdk-1.7.4.jar && \
22-
curl --url "http://central.maven.org/maven2/org/apache/hadoop/hadoop-aws/2.7.3/hadoop-aws-2.7.3.jar" --output ${SPARK_HOME}/jars/hadoop-aws-2.7.3.jar && \
22+
curl --url "http://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk/1.11.717/aws-java-sdk-1.11.717.jar" --output ${SPARK_HOME}/jars/aws-java-sdk-1.11.717.jar && \
23+
curl --url "http://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/2.10.0/hadoop-aws-2.10.0.jar" --output ${SPARK_HOME}/jars/hadoop-aws-2.10.0.jar && \
2324
mkdir -p ${SPARK_HOME}/conf && \
2425
echo spark.hadoop.fs.s3.impl=org.apache.hadoop.fs.s3a.S3AFileSystem >> ${SPARK_HOME}/conf/spark-defaults.conf
2526
ADD requirements.txt /root/

0 commit comments

Comments
 (0)