Skip to content

Commit bcc00ca

Browse files
authored
Upgrade and align Spark versions across all workflows (#339)
1 parent 96111d5 commit bcc00ca

File tree

14 files changed

+171
-30
lines changed

14 files changed

+171
-30
lines changed

.github/actions/build-whl/action.yml

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,13 @@ runs:
3737
git diff
3838
shell: bash
3939

40+
- name: Make this work with PySpark preview versions
41+
if: contains(inputs.spark-version, 'preview')
42+
run: |
43+
sed -i -e 's/f"\(pyspark~=.*\)"/f"\1.dev1"/' -e 's/f"\({spark_compat_version}.0\)"/"${{ inputs.spark-version }}"/g' python/setup.py
44+
git diff python/setup.py
45+
shell: bash
46+
4047
- name: Restore Maven packages cache
4148
if: github.event_name != 'schedule'
4249
uses: actions/cache/restore@v4
@@ -87,7 +94,8 @@ runs:
8794
# Test whl
8895
echo "::group::test-release.py"
8996
twine check python/dist/*
90-
pip install python/dist/*.whl "pyspark~=${{ inputs.spark-compat-version }}.0"
97+
# .dev1 allows this to work with preview versions
98+
pip install python/dist/*.whl "pyspark~=${{ inputs.spark-compat-version }}.0.dev1"
9199
python test-release.py
92100
echo "::endgroup::"
93101
shell: bash

.github/actions/test-python/action.yml

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,13 @@ runs:
5252
echo "SPARK_EXTENSION_VERSION=$SPARK_EXTENSION_VERSION" | tee -a "$GITHUB_ENV"
5353
shell: bash
5454

55+
- name: Make this work with PySpark preview versions
56+
if: contains(inputs.spark-version, 'preview')
57+
run: |
58+
sed -i -e 's/\({spark_compat_version}.0\)"/\1.dev1"/' python/setup.py
59+
git diff python/setup.py
60+
shell: bash
61+
5562
- name: Restore Spark Binaries cache
5663
if: github.event_name != 'schedule' && ( startsWith(inputs.spark-version, '3.') && inputs.scala-compat-version == '2.12' || startsWith(inputs.spark-version, '4.') ) && ! contains(inputs.spark-version, '-SNAPSHOT')
5764
uses: actions/cache/restore@v4
@@ -195,7 +202,8 @@ runs:
195202
# Python Unit Tests (Spark Connect)
196203
197204
echo "::group::pip install"
198-
.pytest-venv/bin/pip install "pyspark[connect]~=${{ inputs.spark-compat-version }}.0"
205+
# .dev1 allows this to work with preview versions
206+
.pytest-venv/bin/pip install "pyspark[connect]~=${{ inputs.spark-compat-version }}.0.dev1"
199207
echo "::endgroup::"
200208
201209
.pytest-venv/bin/python -m pytest python/test --junit-xml test-results-connect/pytest-$(date +%s.%N)-$RANDOM.xml

.github/show-spark-versions.sh

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
#!/bin/bash
2+
3+
base=$(cd "$(dirname "$0")"; pwd)
4+
5+
grep -- "-version" "$base"/workflows/prime-caches.yml | sed -e "s/ -//g" -e "s/ //g" -e "s/'//g" | grep -v -e "matrix" -e "]" | while read line
6+
do
7+
IFS=":" read var compat_version <<< "$line"
8+
if [[ "$var" == "spark-compat-version" ]]
9+
then
10+
while read line
11+
do
12+
IFS=":" read var patch_version <<< "$line"
13+
if [[ "$var" == "spark-patch-version" ]]
14+
then
15+
echo -n "spark-version: $compat_version.$patch_version"
16+
read line
17+
if [[ "$line" == "spark-snapshot-version:true" ]]
18+
then
19+
echo "-SNAPSHOT"
20+
else
21+
echo
22+
fi
23+
break
24+
fi
25+
done
26+
fi
27+
done > "$base"/workflows/prime-caches.yml.tmp
28+
29+
grep spark-version "$base"/workflows/*.yml "$base"/workflows/prime-caches.yml.tmp | cut -d : -f 2- | sed -e "s/^[ -]*//" -e "s/'//g" | grep "^spark-version" | grep -v "matrix" | sort | uniq
30+

.github/workflows/build-jvm.yml

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ jobs:
3030
scala-version: '2.12.17'
3131
java-compat-version: '8'
3232
hadoop-version: '3'
33-
- spark-version: '3.5.7'
33+
- spark-version: '3.5.8'
3434
spark-compat-version: '3.5'
3535
scala-compat-version: '2.12'
3636
scala-version: '2.12.18'
@@ -55,7 +55,7 @@ jobs:
5555
scala-version: '2.13.8'
5656
java-compat-version: '8'
5757
hadoop-version: '3'
58-
- spark-version: '3.5.7'
58+
- spark-version: '3.5.8'
5959
spark-compat-version: '3.5'
6060
scala-compat-version: '2.13'
6161
scala-version: '2.13.8'
@@ -67,6 +67,18 @@ jobs:
6767
scala-version: '2.13.16'
6868
java-compat-version: '17'
6969
hadoop-version: '3'
70+
- spark-version: '4.1.1'
71+
spark-compat-version: '4.1'
72+
scala-compat-version: '2.13'
73+
scala-version: '2.13.17'
74+
java-compat-version: '17'
75+
hadoop-version: '3'
76+
- spark-version: '4.2.0-preview1'
77+
spark-compat-version: '4.2'
78+
scala-compat-version: '2.13'
79+
scala-version: '2.13.18'
80+
java-compat-version: '17'
81+
hadoop-version: '3'
7082

7183
steps:
7284
- name: Checkout

.github/workflows/build-python.yml

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,26 +18,45 @@ jobs:
1818
scala-compat-version: '2.12'
1919
scala-version: '2.12.15'
2020
java-compat-version: '8'
21+
python-version: '3.9'
2122
- spark-compat-version: '3.3'
2223
spark-version: '3.3.4'
2324
scala-compat-version: '2.12'
2425
scala-version: '2.12.15'
2526
java-compat-version: '8'
27+
python-version: '3.9'
2628
- spark-compat-version: '3.4'
2729
spark-version: '3.4.4'
2830
scala-compat-version: '2.12'
2931
scala-version: '2.12.17'
3032
java-compat-version: '8'
33+
python-version: '3.9'
3134
- spark-compat-version: '3.5'
32-
spark-version: '3.5.6'
35+
spark-version: '3.5.8'
3336
scala-compat-version: '2.12'
3437
scala-version: '2.12.18'
3538
java-compat-version: '8'
39+
python-version: '3.9'
3640
- spark-compat-version: '4.0'
37-
spark-version: '4.0.0'
41+
spark-version: '4.0.1'
3842
scala-compat-version: '2.13'
3943
scala-version: '2.13.16'
4044
java-compat-version: '17'
45+
python-version: '3.9'
46+
- spark-version: '4.1.1'
47+
spark-compat-version: '4.1'
48+
scala-compat-version: '2.13'
49+
scala-version: '2.13.17'
50+
java-compat-version: '17'
51+
hadoop-version: '3'
52+
python-version: '3.10'
53+
- spark-version: '4.2.0-preview1'
54+
spark-compat-version: '4.2'
55+
scala-compat-version: '2.13'
56+
scala-version: '2.13.18'
57+
java-compat-version: '17'
58+
hadoop-version: '3'
59+
python-version: '3.10'
4160

4261
steps:
4362
- name: Checkout
@@ -51,4 +70,4 @@ jobs:
5170
spark-compat-version: ${{ matrix.spark-compat-version }}
5271
scala-compat-version: ${{ matrix.scala-compat-version }}
5372
java-compat-version: ${{ matrix.java-compat-version }}
54-
python-version: "3.9"
73+
python-version: ${{ matrix.python-version }}

.github/workflows/build-snapshots.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828
scala-version: '2.12.17'
2929
java-compat-version: '8'
3030
- spark-compat-version: '3.5'
31-
spark-version: '3.5.8-SNAPSHOT'
31+
spark-version: '3.5.9-SNAPSHOT'
3232
scala-compat-version: '2.12'
3333
scala-version: '2.12.18'
3434
java-compat-version: '8'
@@ -49,7 +49,7 @@ jobs:
4949
scala-version: '2.13.8'
5050
java-compat-version: '8'
5151
- spark-compat-version: '3.5'
52-
spark-version: '3.5.8-SNAPSHOT'
52+
spark-version: '3.5.9-SNAPSHOT'
5353
scala-compat-version: '2.13'
5454
scala-version: '2.13.8'
5555
java-compat-version: '8'
@@ -59,14 +59,14 @@ jobs:
5959
scala-version: '2.13.16'
6060
java-compat-version: '17'
6161
- spark-compat-version: '4.1'
62-
spark-version: '4.1.0-SNAPSHOT'
62+
spark-version: '4.1.2-SNAPSHOT'
6363
scala-compat-version: '2.13'
6464
scala-version: '2.13.17'
6565
java-compat-version: '17'
6666
- spark-compat-version: '4.2'
6767
spark-version: '4.2.0-SNAPSHOT'
6868
scala-compat-version: '2.13'
69-
scala-version: '2.13.17'
69+
scala-version: '2.13.18'
7070
java-compat-version: '17'
7171

7272
steps:

.github/workflows/check.yml

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -77,21 +77,25 @@ jobs:
7777
scala-compat-version: '2.12'
7878
scala-version: '2.12.15'
7979
- spark-compat-version: '3.3'
80-
spark-version: '3.3.3'
80+
spark-version: '3.3.4'
8181
scala-compat-version: '2.12'
8282
scala-version: '2.12.15'
8383
- spark-compat-version: '3.4'
8484
scala-compat-version: '2.12'
8585
scala-version: '2.12.17'
86-
spark-version: '3.4.2'
86+
spark-version: '3.4.4'
8787
- spark-compat-version: '3.5'
8888
scala-compat-version: '2.12'
8989
scala-version: '2.12.18'
90-
spark-version: '3.5.0'
90+
spark-version: '3.5.8'
9191
- spark-compat-version: '4.0'
9292
scala-compat-version: '2.13'
9393
scala-version: '2.13.16'
94-
spark-version: '4.0.0'
94+
spark-version: '4.0.1'
95+
- spark-compat-version: '4.1'
96+
scala-compat-version: '2.13'
97+
scala-version: '2.13.17'
98+
spark-version: '4.1.1'
9599

96100
steps:
97101
- name: Checkout

.github/workflows/prime-caches.yml

Lines changed: 23 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ jobs:
3131
- spark-compat-version: '3.5'
3232
scala-compat-version: '2.12'
3333
scala-version: '2.12.18'
34-
spark-patch-version: '6'
34+
spark-patch-version: '8'
3535
hadoop-version: '3'
3636

3737
- spark-compat-version: '3.2'
@@ -52,14 +52,26 @@ jobs:
5252
- spark-compat-version: '3.5'
5353
scala-compat-version: '2.13'
5454
scala-version: '2.13.8'
55-
spark-patch-version: '6'
55+
spark-patch-version: '8'
5656
hadoop-version: '3'
5757
- spark-compat-version: '4.0'
5858
scala-compat-version: '2.13'
5959
scala-version: '2.13.16'
6060
spark-patch-version: '1'
6161
java-compat-version: '17'
6262
hadoop-version: '3'
63+
- spark-compat-version: '4.1'
64+
scala-compat-version: '2.13'
65+
scala-version: '2.13.17'
66+
spark-patch-version: '1'
67+
java-compat-version: '17'
68+
hadoop-version: '3'
69+
- spark-compat-version: '4.2'
70+
scala-compat-version: '2.13'
71+
scala-version: '2.13.18'
72+
spark-patch-version: '0-preview1'
73+
java-compat-version: '17'
74+
hadoop-version: '3'
6375

6476
- spark-compat-version: '3.2'
6577
scala-compat-version: '2.12'
@@ -82,7 +94,7 @@ jobs:
8294
- spark-compat-version: '3.5'
8395
scala-compat-version: '2.12'
8496
scala-version: '2.12.18'
85-
spark-patch-version: '8'
97+
spark-patch-version: '9'
8698
spark-snapshot-version: true
8799
hadoop-version: '3'
88100

@@ -107,7 +119,7 @@ jobs:
107119
- spark-compat-version: '3.5'
108120
scala-compat-version: '2.13'
109121
scala-version: '2.13.8'
110-
spark-patch-version: '8'
122+
spark-patch-version: '9'
111123
spark-snapshot-version: true
112124
hadoop-version: '3'
113125
- spark-compat-version: '4.0'
@@ -118,7 +130,13 @@ jobs:
118130
hadoop-version: '3'
119131
- spark-compat-version: '4.1'
120132
scala-compat-version: '2.13'
121-
scala-version: '2.13.16'
133+
scala-version: '2.13.17'
134+
spark-patch-version: '2'
135+
spark-snapshot-version: true
136+
hadoop-version: '3'
137+
- spark-compat-version: '4.2'
138+
scala-compat-version: '2.13'
139+
scala-version: '2.13.18'
122140
spark-patch-version: '0'
123141
spark-snapshot-version: true
124142
hadoop-version: '3'

.github/workflows/publish-release.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,9 @@ on:
1717
{"params": {"spark-version": "3.2.4", "scala-version": "2.13.5", "java-compat-version": "8"}},
1818
{"params": {"spark-version": "3.3.4", "scala-version": "2.13.8", "java-compat-version": "8"}},
1919
{"params": {"spark-version": "3.4.4", "scala-version": "2.13.8", "java-compat-version": "8"}},
20-
{"params": {"spark-version": "3.5.6", "scala-version": "2.13.8", "java-compat-version": "8"}},
21-
{"params": {"spark-version": "4.0.0", "scala-version": "2.13.16", "java-compat-version": "17"}}
20+
{"params": {"spark-version": "3.5.8", "scala-version": "2.13.8", "java-compat-version": "8"}},
21+
{"params": {"spark-version": "4.0.1", "scala-version": "2.13.16", "java-compat-version": "17"}}
22+
{"params": {"spark-version": "4.1.1", "scala-version": "2.13.17", "java-compat-version": "17"}}
2223
]
2324
}
2425

.github/workflows/publish-snapshot.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,9 @@ jobs:
5858
- params: {"spark-version": "3.2.4", "scala-version": "2.13.5", "scala-compat-version": "2.13", "java-compat-version": "8"}
5959
- params: {"spark-version": "3.3.4", "scala-version": "2.13.8", "scala-compat-version": "2.13", "java-compat-version": "8"}
6060
- params: {"spark-version": "3.4.4", "scala-version": "2.13.8", "scala-compat-version": "2.13", "java-compat-version": "8"}
61-
- params: {"spark-version": "3.5.6", "scala-version": "2.13.8", "scala-compat-version": "2.13", "java-compat-version": "8"}
61+
- params: {"spark-version": "3.5.8", "scala-version": "2.13.8", "scala-compat-version": "2.13", "java-compat-version": "8"}
6262
- params: {"spark-version": "4.0.1", "scala-version": "2.13.16", "scala-compat-version": "2.13", "java-compat-version": "17"}
63+
- params: {"spark-version": "4.1.1", "scala-version": "2.13.17", "scala-compat-version": "2.13", "java-compat-version": "17"}
6364

6465
steps:
6566
- name: Checkout code

0 commit comments

Comments
 (0)