@@ -53,7 +53,7 @@ index b386d135da1..46449e3f3f1 100644
5353 <!--
5454 This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
5555diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
56- index c595b50950b..6b60213e775 100644
56+ index c595b50950b..3abb6cb9441 100644
5757--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
5858+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
5959@@ -102,7 +102,7 @@ class SparkSession private(
@@ -79,7 +79,7 @@ index c595b50950b..6b60213e775 100644
7979 }
8080
8181+ private def loadCometExtension(sparkContext: SparkContext): Seq[String] = {
82- + if (sparkContext.getConf.getBoolean("spark.comet.enabled", false )) {
82+ + if (sparkContext.getConf.getBoolean("spark.comet.enabled", isCometEnabled )) {
8383+ Seq("org.apache.comet.CometSparkSessionExtensions")
8484+ } else {
8585+ Seq.empty
@@ -100,6 +100,19 @@ index c595b50950b..6b60213e775 100644
100100 try {
101101 val extensionConfClass = Utils.classForName(extensionConfClassName)
102102 val extensionConf = extensionConfClass.getConstructor().newInstance()
103+ @@ -1323,4 +1333,12 @@ object SparkSession extends Logging {
104+ }
105+ }
106+ }
107+ +
108+ + /**
109+ + * Whether Comet extension is enabled
110+ + */
111+ + def isCometEnabled: Boolean = {
112+ + val v = System.getenv("ENABLE_COMET")
113+ + v == null || v.toBoolean
114+ + }
115+ }
103116diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
104117index db587dd9868..aac7295a53d 100644
105118--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
@@ -957,6 +970,37 @@ index 525d97e4998..8a3e7457618 100644
957970 AccumulatorSuite.verifyPeakExecutionMemorySet(sparkContext, "external sort") {
958971 sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC").collect()
959972 }
973+ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
974+ index 48ad10992c5..51d1ee65422 100644
975+ --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
976+ +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
977+ @@ -221,6 +221,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite with SQLHelper {
978+ withSession(extensions) { session =>
979+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, true)
980+ session.conf.set(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "-1")
981+ + // https://github.com/apache/datafusion-comet/issues/1197
982+ + session.conf.set("spark.comet.enabled", false)
983+ assert(session.sessionState.columnarRules.contains(
984+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
985+ import session.sqlContext.implicits._
986+ @@ -279,6 +281,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite with SQLHelper {
987+ }
988+ withSession(extensions) { session =>
989+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, enableAQE)
990+ + // https://github.com/apache/datafusion-comet/issues/1197
991+ + session.conf.set("spark.comet.enabled", false)
992+ assert(session.sessionState.columnarRules.contains(
993+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
994+ import session.sqlContext.implicits._
995+ @@ -317,6 +321,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite with SQLHelper {
996+ val session = SparkSession.builder()
997+ .master("local[1]")
998+ .config(COLUMN_BATCH_SIZE.key, 2)
999+ + // https://github.com/apache/datafusion-comet/issues/1197
1000+ + .config("spark.comet.enabled", false)
1001+ .withExtensions { extensions =>
1002+ extensions.injectColumnar(session =>
1003+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())) }
9601004diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
9611005index 75eabcb96f2..36e3318ad7e 100644
9621006--- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
@@ -2746,7 +2790,7 @@ index abe606ad9c1..2d930b64cca 100644
27462790 val tblTargetName = "tbl_target"
27472791 val tblSourceQualified = s"default.$tblSourceName"
27482792diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
2749- index dd55fcfe42c..aa9b0be8e68 100644
2793+ index dd55fcfe42c..2702f87c1f1 100644
27502794--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
27512795+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
27522796@@ -41,6 +41,7 @@ import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -2770,17 +2814,14 @@ index dd55fcfe42c..aa9b0be8e68 100644
27702814 }
27712815 }
27722816
2773- @@ -242,6 +247,41 @@ private[sql] trait SQLTestUtilsBase
2817+ @@ -242,6 +247,38 @@ private[sql] trait SQLTestUtilsBase
27742818 protected override def _sqlContext: SQLContext = self.spark.sqlContext
27752819 }
27762820
27772821+ /**
27782822+ * Whether Comet extension is enabled
27792823+ */
2780- + protected def isCometEnabled: Boolean = {
2781- + val v = System.getenv("ENABLE_COMET")
2782- + v != null && v.toBoolean
2783- + }
2824+ + protected def isCometEnabled: Boolean = SparkSession.isCometEnabled
27842825+
27852826+ /**
27862827+ * Whether to enable ansi mode This is only effective when
@@ -2812,7 +2853,7 @@ index dd55fcfe42c..aa9b0be8e68 100644
28122853 protected override def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
28132854 SparkSession.setActiveSession(spark)
28142855 super.withSQLConf(pairs: _*)(f)
2815- @@ -434,6 +474 ,8 @@ private[sql] trait SQLTestUtilsBase
2856+ @@ -434,6 +471 ,8 @@ private[sql] trait SQLTestUtilsBase
28162857 val schema = df.schema
28172858 val withoutFilters = df.queryExecution.executedPlan.transform {
28182859 case FilterExec(_, child) => child
@@ -2910,10 +2951,10 @@ index 1966e1e64fd..cde97a0aafe 100644
29102951 spark.sql(
29112952 """
29122953diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
2913- index 07361cfdce9..6673c141c9a 100644
2954+ index 07361cfdce9..e40c59a4207 100644
29142955--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
29152956+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
2916- @@ -55,25 +55,53 @@ object TestHive
2957+ @@ -55,25 +55,52 @@ object TestHive
29172958 new SparkContext(
29182959 System.getProperty("spark.sql.test.master", "local[1]"),
29192960 "TestSQLContext",
@@ -2955,8 +2996,7 @@ index 07361cfdce9..6673c141c9a 100644
29552996+ // ConstantPropagation etc.
29562997+ .set(SQLConf.OPTIMIZER_EXCLUDED_RULES.key, ConvertToLocalRelation.ruleName)
29572998+
2958- + val v = System.getenv("ENABLE_COMET")
2959- + if (v != null && v.toBoolean) {
2999+ + if (SparkSession.isCometEnabled) {
29603000+ conf
29613001+ .set("spark.sql.extensions", "org.apache.comet.CometSparkSessionExtensions")
29623002+ .set("spark.comet.enabled", "true")
0 commit comments