diff --git a/docs/source/user-guide/compatibility.md b/docs/source/user-guide/compatibility.md index 23e31d0bc5..a54ad91b00 100644 --- a/docs/source/user-guide/compatibility.md +++ b/docs/source/user-guide/compatibility.md @@ -166,6 +166,7 @@ The following cast operations are generally compatible with Spark except for the | long | integer | | | long | float | | | long | double | | +| long | decimal | | | long | string | | | float | boolean | | | float | byte | | @@ -212,7 +213,6 @@ The following cast operations are not compatible with Spark for all inputs and a | From Type | To Type | Notes | |-|-|-| | integer | decimal | No overflow check | -| long | decimal | No overflow check | | float | decimal | There can be rounding differences | | double | decimal | There can be rounding differences | | string | float | Does not support inputs ending with 'd' or 'f'. Does not support 'inf'. Does not support ANSI mode. | diff --git a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala index 337eae11db..18ac0fd812 100644 --- a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala +++ b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala @@ -253,10 +253,12 @@ object CometCast { Compatible() case DataTypes.ByteType | DataTypes.ShortType | DataTypes.IntegerType => Compatible() - case DataTypes.FloatType | DataTypes.DoubleType => + case d: DecimalType if(d.scale == 0 && d.precision == 19) => Compatible() case _: DecimalType => Incompatible(Some("No overflow check")) + case DataTypes.FloatType | DataTypes.DoubleType => + Compatible() case _ => Unsupported } diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 42058036e4..866aef8d62 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -115,7 +115,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { test("Integral Division Overflow Handling Matches Spark Behavior") { withTable("t1") { - withSQLConf(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") { + withSQLConf(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "false") { val value = Long.MinValue sql("create table t1(c1 long, c2 short) using parquet") sql(s"insert into t1 values($value, -1)")