@@ -25,15 +25,28 @@ jobs:
2525
2626 - name : Install Spark
2727 run : |
28- curl -fL https://dlcdn.apache.org/spark/spark-3.5.8 /spark-3.5.8 -bin-hadoop3.tgz -o spark.tgz
28+ curl -fL https://dlcdn.apache.org/spark/spark-3.5.3 /spark-3.5.3 -bin-hadoop3.tgz -o spark.tgz
2929 tar -xzf spark.tgz
30- mv spark-3.5.8 -bin-hadoop3 $HOME/spark
30+ mv spark-3.5.3 -bin-hadoop3 $HOME/spark
3131
3232 - name : Start Spark Connect Server
3333 run : |
3434 $HOME/spark/sbin/start-connect-server.sh \
35- --packages org.apache.spark:spark-connect_2.12:3.5.1
36- sleep 10
35+ --packages "org.apache.spark:spark-connect_2.12:3.5.3,io.delta:delta-spark_2.12:3.2.0,io.graphframes:graphframes-spark3_2.12:0.10.0,io.graphframes:graphframes-connect-spark3_2.12:0.10.0" \
36+ --conf "spark.connect.extensions.relation.classes=org.apache.spark.sql.graphframes.GraphFramesConnect" \
37+ --conf "spark.driver.extraJavaOptions=-Divy.cache.dir=/tmp -Divy.home=/tmp -XX:+UseG1GC -XX:+HeapDumpOnOutOfMemoryError" \
38+ --conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" \
39+ --conf "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog" \
40+ --conf "spark.driver.memory=4g" \
41+ --conf "spark.executor.memory=4g" \
42+ --conf "spark.memory.fraction=0.8" \
43+ --conf "spark.memory.storageFraction=0.3" \
44+ --conf "spark.sql.shuffle.partitions=8" \
45+ --conf "spark.default.parallelism=8" \
46+ --conf "spark.driver.maxResultSize=2g"
47+
48+ # wait for server
49+ sleep 15
3750
3851 - name : Install gcovr
3952 run : |
5164
5265 - name : Run Tests
5366 run : |
54- export SPARK_REMOTE=sc://localhost
67+ export SPARK_REMOTE=sc://localhost:15002
5568 cd build
5669 ctest -LE dbrx --output-on-failure
5770
0 commit comments