From d71ad2de3519d7ba0ec6d14248ecfd2548e6633c Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Tue, 23 Sep 2025 15:33:22 -0700 Subject: [PATCH] [SPARK-53683] Use `Spark 3.5.7` for Spark 3 integration tests --- .github/workflows/build_and_test.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 9b2fbf5..7c35d83 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -217,11 +217,11 @@ jobs: run: swift test --filter NOTHING -c release - name: Test run: | - curl -LO https://www.apache.org/dyn/closer.lua/spark/spark-3.5.6/spark-3.5.6-bin-hadoop3.tgz?action=download - tar xvfz spark-3.5.6-bin-hadoop3.tgz && rm spark-3.5.6-bin-hadoop3.tgz - mv spark-3.5.6-bin-hadoop3 /tmp/spark + curl -LO https://www.apache.org/dyn/closer.lua/spark/spark-3.5.7/spark-3.5.7-bin-hadoop3.tgz?action=download + tar xvfz spark-3.5.7-bin-hadoop3.tgz && rm spark-3.5.7-bin-hadoop3.tgz + mv spark-3.5.7-bin-hadoop3 /tmp/spark cd /tmp/spark/sbin - ./start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.6 + ./start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.7 cd - swift test --no-parallel -c release @@ -245,11 +245,11 @@ jobs: run: swift test --filter NOTHING -c release - name: Test run: | - curl -LO https://www.apache.org/dyn/closer.lua/spark/spark-3.5.6/spark-3.5.6-bin-hadoop3.tgz?action=download - tar xvfz spark-3.5.6-bin-hadoop3.tgz && rm spark-3.5.6-bin-hadoop3.tgz - mv spark-3.5.6-bin-hadoop3 /tmp/spark + curl -LO https://www.apache.org/dyn/closer.lua/spark/spark-3.5.7/spark-3.5.7-bin-hadoop3.tgz?action=download + tar xvfz spark-3.5.7-bin-hadoop3.tgz && rm spark-3.5.7-bin-hadoop3.tgz + mv spark-3.5.7-bin-hadoop3 /tmp/spark cd /tmp/spark/sbin - ./start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.6,org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0 -c spark.sql.catalog.local=org.apache.iceberg.spark.SparkCatalog -c spark.sql.catalog.local.type=hadoop -c spark.sql.catalog.local.warehouse=/tmp/spark/warehouse -c spark.sql.defaultCatalog=local + ./start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.7,org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.0 -c spark.sql.catalog.local=org.apache.iceberg.spark.SparkCatalog -c spark.sql.catalog.local.type=hadoop -c spark.sql.catalog.local.warehouse=/tmp/spark/warehouse -c spark.sql.defaultCatalog=local cd - swift test --filter DataFrameWriterV2Tests -c release swift test --filter IcebergTest -c release