Skip to content

Commit 3c1fdf8

Browse files
authored
Merge branch 'apache:master' into asautins/explode-lazy
2 parents 174d29a + 1124b09 commit 3c1fdf8

File tree

5 files changed

+162
-202
lines changed

5 files changed

+162
-202
lines changed

.github/workflows/build_and_test.yml

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -522,9 +522,9 @@ jobs:
522522
- >-
523523
pyspark-pandas-slow
524524
- >-
525-
pyspark-pandas-connect-part0, pyspark-pandas-connect-part3
525+
pyspark-pandas-connect
526526
- >-
527-
pyspark-pandas-connect-part1, pyspark-pandas-connect-part2
527+
pyspark-pandas-slow-connect
528528
exclude:
529529
# Always run if pyspark == 'true', even infra-image is skip (such as non-master job)
530530
# In practice, the build will run in individual PR, but not against the individual commit
@@ -538,10 +538,8 @@ jobs:
538538
# in Apache Spark repository.
539539
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas' }}
540540
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-slow' }}
541-
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-connect-part0' }}
542-
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-connect-part1' }}
543-
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-connect-part2' }}
544-
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-connect-part3' }}
541+
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-connect' }}
542+
- modules: ${{ fromJson(needs.precondition.outputs.required).pyspark-pandas != 'true' && 'pyspark-pandas-slow-connect' }}
545543
env:
546544
MODULES_TO_TEST: ${{ matrix.modules }}
547545
HADOOP_PROFILE: ${{ inputs.hadoop }}

.github/workflows/build_python_connect.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ jobs:
9696
# Several tests related to catalog requires to run them sequencially, e.g., writing a table in a listener.
9797
./python/run-tests --parallelism=1 --python-executables=python3 --modules pyspark-connect,pyspark-ml-connect
9898
# None of tests are dependent on each other in Pandas API on Spark so run them in parallel
99-
./python/run-tests --parallelism=1 --python-executables=python3 --modules pyspark-pandas-connect-part0,pyspark-pandas-connect-part1,pyspark-pandas-connect-part2,pyspark-pandas-connect-part3
99+
./python/run-tests --parallelism=1 --python-executables=python3 --modules pyspark-pandas-connect,pyspark-pandas-slow-connect
100100
101101
# Stop Spark Connect server.
102102
./sbin/stop-connect-server.sh

.github/workflows/python_hosted_runner_test.yml

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -82,13 +82,9 @@ jobs:
8282
- >-
8383
pyspark-pandas-slow
8484
- >-
85-
pyspark-pandas-connect-part0
85+
pyspark-pandas-connect
8686
- >-
87-
pyspark-pandas-connect-part1
88-
- >-
89-
pyspark-pandas-connect-part2
90-
- >-
91-
pyspark-pandas-connect-part3
87+
pyspark-pandas-slow-connect
9288
env:
9389
MODULES_TO_TEST: ${{ matrix.modules }}
9490
PYTHON_TO_TEST: python${{inputs.python}}

0 commit comments

Comments
 (0)