Skip to content

Commit 086e9fb

Browse files
committed
Merge remote-tracking branch 'apache/main' into test-spark-sql-coverage
2 parents db3b39d + 31a2160 commit 086e9fb

File tree

3 files changed

+12
-3
lines changed

3 files changed

+12
-3
lines changed

spark/src/main/scala/org/apache/comet/rules/CometExecRule.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -887,7 +887,7 @@ case class CometExecRule(session: SparkSession) extends Rule[SparkPlan] {
887887
var supported = true
888888
for (o <- orderings) {
889889
if (QueryPlanSerde.exprToProto(o, inputs).isEmpty) {
890-
withInfo(s, s"unsupported range partitioning sort order: $o")
890+
withInfo(s, s"unsupported range partitioning sort order: $o", o)
891891
supported = false
892892
// We don't short-circuit in case there is more than one unsupported expression
893893
// to provide info for.

spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,9 +74,11 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
7474
df.createOrReplaceTempView("tbl")
7575

7676
withSQLConf(CometConf.getExprAllowIncompatConfigKey("SortOrder") -> "false") {
77-
checkSparkAnswerAndFallbackReason(
77+
checkSparkAnswerAndFallbackReasons(
7878
"select * from tbl order by 1, 2",
79-
"unsupported range partitioning sort order")
79+
Set(
80+
"unsupported range partitioning sort order",
81+
"Sorting on floating-point is not 100% compatible with Spark"))
8082
}
8183
}
8284

spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -271,6 +271,13 @@ abstract class CometTestBase
271271
checkSparkAnswerAndFallbackReasons(df, Set(fallbackReason))
272272
}
273273

274+
/** Check for the correct results as well as the expected fallback reasons */
275+
protected def checkSparkAnswerAndFallbackReasons(
276+
query: String,
277+
fallbackReasons: Set[String]): (SparkPlan, SparkPlan) = {
278+
checkSparkAnswerAndFallbackReasons(sql(query), fallbackReasons)
279+
}
280+
274281
/** Check for the correct results as well as the expected fallback reasons */
275282
protected def checkSparkAnswerAndFallbackReasons(
276283
df: => DataFrame,

0 commit comments

Comments
 (0)