@@ -1189,7 +1189,7 @@ index 9e9d717db3b..c1a7caf56e0 100644
11891189 package org.apache.spark.sql.execution
11901190
11911191- import org.apache.spark.sql.{DataFrame, QueryTest, Row}
1192- + import org.apache.spark.sql.{DataFrame, IgnoreComet, QueryTest, Row}
1192+ + import org.apache.spark.sql.{DataFrame, QueryTest, Row}
11931193+ import org.apache.spark.sql.comet.CometProjectExec
11941194 import org.apache.spark.sql.connector.SimpleWritableDataSource
11951195 import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanHelper, DisableAdaptiveExecutionSuite, EnableAdaptiveExecutionSuite}
@@ -1206,13 +1206,12 @@ index 9e9d717db3b..c1a7caf56e0 100644
12061206 assert(actual == expected)
12071207 }
12081208 }
1209- @@ -112,7 +116,8 @@ abstract class RemoveRedundantProjectsSuiteBase
1209+ @@ -112,7 +116,7 @@ abstract class RemoveRedundantProjectsSuiteBase
12101210 assertProjectExec(query, 1, 3)
12111211 }
12121212
12131213- test("join with ordering requirement") {
1214- + test("join with ordering requirement",
1215- + IgnoreComet("TODO: Support SubqueryBroadcastExec in Comet: #242")) {
1214+ + test("join with ordering requirement") {
12161215 val query = "select * from (select key, a, c, b from testView) as t1 join " +
12171216 "(select key, a, b, c from testView) as t2 on t1.key = t2.key where t2.a > 50"
12181217 assertProjectExec(query, 2, 2)
0 commit comments