Skip to content

Commit 3b51e19

Browse files
committed
[SPARK-53834][INFRA] Add a separate docker file for Python 3.14 daily build
### What changes were proposed in this pull request? This PR aims to add a separate docker file for `Python 3.14` daily build. ### Why are the changes needed? To prepare Python 3.14 test coverage for Apache Spark 4.1.0. Note that 1. SPARK-53835 is filed to handle `pyarrow/mlflow/torch/torchvision` package installation later when they are ready. 2. This PR will expose two kind of Python UT failures in order to help us be ready during Apache Spark 4.1.0 preparation. - Python 3.14 related failures - Python Package (PyArrow/MLFlow/Torch/TorchVision) related failures - Both `Classic` and `Connect` mode-related failures ### Does this PR introduce _any_ user-facing change? No, this is a new test infra. ### How was this patch tested? Manual review. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #52544 from dongjoon-hyun/SPARK-53834. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 22d9709 commit 3b51e19

File tree

3 files changed

+140
-0
lines changed

3 files changed

+140
-0
lines changed

.github/workflows/build_infra_images_cache.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ on:
3939
- 'dev/spark-test-image/python-312/Dockerfile'
4040
- 'dev/spark-test-image/python-313/Dockerfile'
4141
- 'dev/spark-test-image/python-313-nogil/Dockerfile'
42+
- 'dev/spark-test-image/python-314/Dockerfile'
4243
- 'dev/spark-test-image/numpy-213/Dockerfile'
4344
- '.github/workflows/build_infra_images_cache.yml'
4445
# Create infra image when cutting down branches/tags
@@ -230,6 +231,19 @@ jobs:
230231
- name: Image digest (PySpark with Python 3.13 no GIL)
231232
if: hashFiles('dev/spark-test-image/python-313-nogil/Dockerfile') != ''
232233
run: echo ${{ steps.docker_build_pyspark_python_313_nogil.outputs.digest }}
234+
- name: Build and push (PySpark with Python 3.14)
235+
if: hashFiles('dev/spark-test-image/python-314/Dockerfile') != ''
236+
id: docker_build_pyspark_python_314
237+
uses: docker/build-push-action@v6
238+
with:
239+
context: ./dev/spark-test-image/python-314/
240+
push: true
241+
tags: ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-314-cache:${{ github.ref_name }}-static
242+
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-314-cache:${{ github.ref_name }}
243+
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-314-cache:${{ github.ref_name }},mode=max
244+
- name: Image digest (PySpark with Python 3.14)
245+
if: hashFiles('dev/spark-test-image/python-314/Dockerfile') != ''
246+
run: echo ${{ steps.docker_build_pyspark_python_314.outputs.digest }}
233247
- name: Build and push (PySpark with Numpy 2.1.3)
234248
if: hashFiles('dev/spark-test-image/numpy-213/Dockerfile') != ''
235249
id: docker_build_pyspark_numpy_213
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
#
19+
20+
name: "Build / Python-only (master, Python 3.14)"
21+
22+
on:
23+
schedule:
24+
- cron: '0 21 * * *'
25+
workflow_dispatch:
26+
27+
jobs:
28+
run-build:
29+
permissions:
30+
packages: write
31+
name: Run
32+
uses: ./.github/workflows/build_and_test.yml
33+
if: github.repository == 'apache/spark'
34+
with:
35+
java: 17
36+
branch: master
37+
hadoop: hadoop3
38+
envs: >-
39+
{
40+
"PYSPARK_IMAGE_TO_TEST": "python-314",
41+
"PYTHON_TO_TEST": "python3.14"
42+
}
43+
jobs: >-
44+
{
45+
"pyspark": "true",
46+
"pyspark-pandas": "true"
47+
}
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
# Image for building and testing Spark branches. Based on Ubuntu 22.04.
19+
# See also in https://hub.docker.com/_/ubuntu
20+
FROM ubuntu:jammy-20240911.1
21+
LABEL org.opencontainers.image.authors="Apache Spark project <[email protected]>"
22+
LABEL org.opencontainers.image.licenses="Apache-2.0"
23+
LABEL org.opencontainers.image.ref.name="Apache Spark Infra Image For PySpark with Python 3.14"
24+
# Overwrite this label to avoid exposing the underlying Ubuntu OS version label
25+
LABEL org.opencontainers.image.version=""
26+
27+
ENV FULL_REFRESH_DATE=20251007
28+
29+
ENV DEBIAN_FRONTEND=noninteractive
30+
ENV DEBCONF_NONINTERACTIVE_SEEN=true
31+
32+
RUN apt-get update && apt-get install -y \
33+
build-essential \
34+
ca-certificates \
35+
curl \
36+
gfortran \
37+
git \
38+
gnupg \
39+
libcurl4-openssl-dev \
40+
libfontconfig1-dev \
41+
libfreetype6-dev \
42+
libfribidi-dev \
43+
libgit2-dev \
44+
libharfbuzz-dev \
45+
libjpeg-dev \
46+
liblapack-dev \
47+
libopenblas-dev \
48+
libpng-dev \
49+
libpython3-dev \
50+
libssl-dev \
51+
libtiff5-dev \
52+
libxml2-dev \
53+
openjdk-17-jdk-headless \
54+
pkg-config \
55+
qpdf \
56+
tzdata \
57+
software-properties-common \
58+
wget \
59+
zlib1g-dev
60+
61+
# Install Python 3.14
62+
RUN add-apt-repository ppa:deadsnakes/ppa
63+
RUN apt-get update && apt-get install -y \
64+
python3.14 \
65+
&& apt-get autoremove --purge -y \
66+
&& apt-get clean \
67+
&& rm -rf /var/lib/apt/lists/*
68+
69+
70+
ARG BASIC_PIP_PKGS="numpy six==1.16.0 pandas==2.3.3 scipy plotly<6.0.0 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2"
71+
# Python deps for Spark Connect
72+
ARG CONNECT_PIP_PKGS="grpcio==1.75.1 grpcio-status==1.71.2 protobuf==5.29.5 googleapis-common-protos==1.65.0 graphviz==0.20.3"
73+
74+
# Install Python 3.14 packages
75+
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.14
76+
RUN python3.14 -m pip install --ignore-installed blinker>=1.6.2 # mlflow needs this
77+
RUN python3.14 -m pip install $BASIC_PIP_PKGS unittest-xml-reporting $CONNECT_PIP_PKGS lxml && \
78+
python3.14 -m pip install torcheval && \
79+
python3.14 -m pip cache purge

0 commit comments

Comments
 (0)