@@ -29,6 +29,7 @@ import org.apache.spark.sql.types._
2929
3030import org .apache .comet .DataTypeSupport .isComplexType
3131import org .apache .comet .testing .{DataGenOptions , ParquetGenerator , SchemaGenOptions }
32+ import org .apache .comet .testing .FuzzDataGenerator .{doubleNaNLiteral , floatNaNLiteral }
3233
3334class CometFuzzTestSuite extends CometFuzzTestBase {
3435
@@ -71,8 +72,20 @@ class CometFuzzTestSuite extends CometFuzzTestBase {
7172 // Construct the string for the default value based on the column type.
7273 val defaultValueString = defaultValueType match {
7374 // These explicit type definitions for TINYINT, SMALLINT, FLOAT, DOUBLE, and DATE are only needed for 3.4.
74- case " TINYINT" | " SMALLINT" | " FLOAT " | " DOUBLE " =>
75+ case " TINYINT" | " SMALLINT" =>
7576 s " $defaultValueType( ${defaultValueRow.get(0 )}) "
77+ case " FLOAT" =>
78+ if (Float .NaN .equals(defaultValueRow.get(0 ))) {
79+ floatNaNLiteral
80+ } else {
81+ s " $defaultValueType( ${defaultValueRow.get(0 )}) "
82+ }
83+ case " DOUBLE" =>
84+ if (Double .NaN .equals(defaultValueRow.get(0 ))) {
85+ doubleNaNLiteral
86+ } else {
87+ s " $defaultValueType( ${defaultValueRow.get(0 )}) "
88+ }
7689 case " DATE" => s " $defaultValueType(' ${defaultValueRow.get(0 )}') "
7790 case " STRING" => s " ' ${defaultValueRow.get(0 )}' "
7891 case " TIMESTAMP" | " TIMESTAMP_NTZ" => s " TIMESTAMP ' ${defaultValueRow.get(0 )}' "
@@ -101,7 +114,7 @@ class CometFuzzTestSuite extends CometFuzzTestBase {
101114 .asInstanceOf [Array [Byte ]]
102115 .sameElements(spark.sql(sql).collect()(0 ).get(0 ).asInstanceOf [Array [Byte ]]))
103116 } else {
104- assert(defaultValueRow.get(0 ) == spark.sql(sql).collect()(0 ).get(0 ))
117+ assert(defaultValueRow.get(0 ).equals( spark.sql(sql).collect()(0 ).get(0 ) ))
105118 }
106119 }
107120 }
0 commit comments