Skip to content

Commit d3ee468

Browse files
committed
rebase_main
1 parent 5c75603 commit d3ee468

File tree

1 file changed

+14
-15
lines changed

1 file changed

+14
-15
lines changed

spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
115115

116116
test("Integral Division Overflow Handling Matches Spark Behavior") {
117117
withTable("t1") {
118-
withSQLConf(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
118+
withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
119119
val value = Long.MinValue
120120
sql("create table t1(c1 long, c2 short) using parquet")
121121
sql(s"insert into t1 values($value, -1)")
@@ -409,7 +409,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
409409
checkSparkMaybeThrows(res) match {
410410
case (Some(sparkExc), Some(cometExc)) =>
411411
val cometErrorPattern =
412-
"""org.apache.comet.CometNativeException: [ARITHMETIC_OVERFLOW] Integer/Float overflow. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
412+
"""org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. Use 'try_add' to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
413413
assert(cometExc.getMessage.contains(cometErrorPattern))
414414
assert(sparkExc.getMessage.contains("overflow"))
415415
case _ => fail("Exception should be thrown")
@@ -427,11 +427,10 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
427427
| _1 - _2
428428
| from tbl
429429
| """.stripMargin)
430-
431430
checkSparkMaybeThrows(res) match {
432431
case (Some(sparkExc), Some(cometExc)) =>
433432
val cometErrorPattern =
434-
"""org.apache.comet.CometNativeException: [ARITHMETIC_OVERFLOW] Integer/Float overflow. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
433+
"""org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. Use 'try_subtract' to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error."""
435434
assert(cometExc.getMessage.contains(cometErrorPattern))
436435
assert(sparkExc.getMessage.contains("overflow"))
437436
case _ => fail("Exception should be thrown")
@@ -453,7 +452,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
453452
checkSparkMaybeThrows(res) match {
454453
case (Some(sparkExc), Some(cometExc)) =>
455454
val cometErrorPattern =
456-
"""org.apache.comet.CometNativeException: [ARITHMETIC_OVERFLOW] Integer/Float overflow. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
455+
"""org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error"""
457456
assert(cometExc.getMessage.contains(cometErrorPattern))
458457
assert(sparkExc.getMessage.contains("overflow"))
459458
case _ => fail("Exception should be thrown")
@@ -476,7 +475,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
476475
checkSparkMaybeThrows(res) match {
477476
case (Some(sparkExc), Some(cometExc)) =>
478477
val cometErrorPattern =
479-
"""org.apache.comet.CometNativeException: [DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead"""
478+
"""org.apache.spark.SparkArithmeticException: [DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead"""
480479
assert(cometExc.getMessage.contains(cometErrorPattern))
481480
assert(sparkExc.getMessage.contains("Division by zero"))
482481
case _ => fail("Exception should be thrown")
@@ -639,7 +638,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
639638
test("cast timestamp and timestamp_ntz") {
640639
withSQLConf(
641640
SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
642-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
641+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
643642
Seq(true, false).foreach { dictionaryEnabled =>
644643
withTempDir { dir =>
645644
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -661,7 +660,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
661660
test("cast timestamp and timestamp_ntz to string") {
662661
withSQLConf(
663662
SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
664-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
663+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
665664
Seq(true, false).foreach { dictionaryEnabled =>
666665
withTempDir { dir =>
667666
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -683,7 +682,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
683682
test("cast timestamp and timestamp_ntz to long, date") {
684683
withSQLConf(
685684
SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
686-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
685+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
687686
Seq(true, false).foreach { dictionaryEnabled =>
688687
withTempDir { dir =>
689688
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -771,7 +770,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
771770
}
772771

773772
test("date_trunc with timestamp_ntz") {
774-
withSQLConf(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
773+
withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
775774
Seq(true, false).foreach { dictionaryEnabled =>
776775
withTempDir { dir =>
777776
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -806,7 +805,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
806805
}
807806

808807
test("date_trunc with format array") {
809-
withSQLConf(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
808+
withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
810809
val numRows = 1000
811810
Seq(true, false).foreach { dictionaryEnabled =>
812811
withTempDir { dir =>
@@ -1473,7 +1472,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
14731472
Seq("true", "false").foreach { dictionary =>
14741473
withSQLConf(
14751474
"parquet.enable.dictionary" -> dictionary,
1476-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
1475+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
14771476
withParquetTable(
14781477
(-5 until 5).map(i => (i.toDouble + 0.3, i.toDouble + 0.8)),
14791478
"tbl",
@@ -2024,7 +2023,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
20242023
Seq(true, false).foreach { dictionary =>
20252024
withSQLConf(
20262025
"parquet.enable.dictionary" -> dictionary.toString,
2027-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
2026+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
20282027
val table = "test"
20292028
withTable(table) {
20302029
sql(s"create table $table(col string, a int, b float) using parquet")
@@ -2130,7 +2129,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
21302129
Seq(true, false).foreach { dictionary =>
21312130
withSQLConf(
21322131
"parquet.enable.dictionary" -> dictionary.toString,
2133-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
2132+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
21342133
val table = "test"
21352134
withTable(table) {
21362135
sql(s"create table $table(col string, a int, b float) using parquet")
@@ -2857,7 +2856,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
28572856
// this test requires native_comet scan due to unsigned u8/u16 issue
28582857
withSQLConf(
28592858
CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_NATIVE_COMET,
2860-
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
2859+
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
28612860
Seq(true, false).foreach { dictionaryEnabled =>
28622861
withTempDir { dir =>
28632862
val path1 = new Path(dir.toURI.toString, "test1.parquet")

0 commit comments

Comments
 (0)