Skip to content

Commit 16a38d5

Browse files
committed
enable with env var
1 parent 3ed034d commit 16a38d5

File tree

5 files changed

+6
-6
lines changed

5 files changed

+6
-6
lines changed

.github/workflows/spark_sql_test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ jobs:
8181
run: |
8282
cd apache-spark
8383
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
84-
ENABLE_COMET=true ENABLE_COMET_ONHEAP=true ENABLE_COMET_LOG_FALLBACK_REASONS=${{ github.event.inputs.collect-fallback-logs || 'false' }} \
84+
ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_SPARK_TO_COLUMNAR_ENABLED=true ENABLE_COMET_LOG_FALLBACK_REASONS=${{ github.event.inputs.collect-fallback-logs || 'false' }} \
8585
build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
8686
if [ "${{ github.event.inputs.collect-fallback-logs }}" = "true" ]; then
8787
find . -type f -name "unit-tests.log" -print0 | xargs -0 grep -h "Comet cannot accelerate" | sed 's/.*Comet cannot accelerate/Comet cannot accelerate/' | sort -u > fallback.log

.github/workflows/spark_sql_test_ansi.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ jobs:
7373
run: |
7474
cd apache-spark
7575
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
76-
RUST_BACKTRACE=1 ENABLE_COMET=true ENABLE_COMET_ONHEAP=true ENABLE_COMET_ANSI_MODE=true build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
76+
RUST_BACKTRACE=1 ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_SPARK_TO_COLUMNAR_ENABLED=true ENABLE_COMET_ANSI_MODE=true build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
7777
env:
7878
LC_ALL: "C.UTF-8"
7979

.github/workflows/spark_sql_test_native_datafusion.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ jobs:
6565
run: |
6666
cd apache-spark
6767
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
68-
ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_PARQUET_SCAN_IMPL=native_datafusion build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
68+
ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_SPARK_TO_COLUMNAR_ENABLED=true COMET_PARQUET_SCAN_IMPL=native_datafusion build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
6969
env:
7070
LC_ALL: "C.UTF-8"
7171

.github/workflows/spark_sql_test_native_iceberg_compat.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ jobs:
6565
run: |
6666
cd apache-spark
6767
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
68-
ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_PARQUET_SCAN_IMPL=native_iceberg_compat build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
68+
ENABLE_COMET=true ENABLE_COMET_ONHEAP=true COMET_SPARK_TO_COLUMNAR_ENABLED=true COMET_PARQUET_SCAN_IMPL=native_iceberg_compat build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
6969
env:
7070
LC_ALL: "C.UTF-8"
7171

common/src/main/scala/org/apache/comet/CometConf.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -630,7 +630,7 @@ object CometConf extends ShimCometConf {
630630
"`spark.comet.sparkToColumnar.supportedOperatorList` into Arrow columnar format before " +
631631
"processing.")
632632
.booleanConf
633-
.createWithDefault(true)
633+
.createWithEnvVarOrDefault("COMET_SPARK_TO_COLUMNAR_ENABLED", false)
634634

635635
val COMET_SPARK_TO_ARROW_SUPPORTED_OPERATOR_LIST: ConfigEntry[Seq[String]] =
636636
conf("spark.comet.sparkToColumnar.supportedOperatorList")
@@ -639,7 +639,7 @@ object CometConf extends ShimCometConf {
639639
"format when `spark.comet.sparkToColumnar.enabled` is true")
640640
.stringConf
641641
.toSequence
642-
.createWithDefault(Seq("Range,InMemoryTableScan,RDDScan,ExternalRDDScanExec"))
642+
.createWithDefault(Seq("Range,LocalTableScan,InMemoryTableScan,RDDScan,ExternalRDDScan"))
643643

644644
val COMET_CASE_CONVERSION_ENABLED: ConfigEntry[Boolean] =
645645
conf("spark.comet.caseConversion.enabled")

0 commit comments

Comments
 (0)