Skip to content

Commit ebd83a5

Browse files
srowencloud-fan
authored andcommitted
[SPARK-30009][CORE][SQL][FOLLOWUP] Remove OrderingUtil and Utils.nanSafeCompare{Doubles,Floats} and use java.lang.{Double,Float}.compare directly
### What changes were proposed in this pull request? Follow up on apache#26654 (comment) Instead of OrderingUtil or Utils.nanSafeCompare{Doubles,Floats}, just use java.lang.{Double,Float}.compare directly. All work identically w.r.t. NaN when used to `compare`. ### Why are the changes needed? Simplification of the previous change, which existed to support Scala 2.13 migration. ### Does this PR introduce any user-facing change? No. ### How was this patch tested? Existing tests Closes apache#26761 from srowen/SPARK-30009.2. Authored-by: Sean Owen <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent c5f312a commit ebd83a5

File tree

9 files changed

+7
-134
lines changed

9 files changed

+7
-134
lines changed

core/src/main/scala-2.12/org/apache/spark/util/OrderingUtil.scala

Lines changed: 0 additions & 33 deletions
This file was deleted.

core/src/main/scala-2.13/org/apache/spark/util/OrderingUtil.scala

Lines changed: 0 additions & 34 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1744,34 +1744,6 @@ private[spark] object Utils extends Logging {
17441744
hashAbs
17451745
}
17461746

1747-
/**
1748-
* NaN-safe version of `java.lang.Double.compare()` which allows NaN values to be compared
1749-
* according to semantics where NaN == NaN and NaN is greater than any non-NaN double.
1750-
*/
1751-
def nanSafeCompareDoubles(x: Double, y: Double): Int = {
1752-
val xIsNan: Boolean = java.lang.Double.isNaN(x)
1753-
val yIsNan: Boolean = java.lang.Double.isNaN(y)
1754-
if ((xIsNan && yIsNan) || (x == y)) 0
1755-
else if (xIsNan) 1
1756-
else if (yIsNan) -1
1757-
else if (x > y) 1
1758-
else -1
1759-
}
1760-
1761-
/**
1762-
* NaN-safe version of `java.lang.Float.compare()` which allows NaN values to be compared
1763-
* according to semantics where NaN == NaN and NaN is greater than any non-NaN float.
1764-
*/
1765-
def nanSafeCompareFloats(x: Float, y: Float): Int = {
1766-
val xIsNan: Boolean = java.lang.Float.isNaN(x)
1767-
val yIsNan: Boolean = java.lang.Float.isNaN(y)
1768-
if ((xIsNan && yIsNan) || (x == y)) 0
1769-
else if (xIsNan) 1
1770-
else if (yIsNan) -1
1771-
else if (x > y) 1
1772-
else -1
1773-
}
1774-
17751747
/**
17761748
* Returns the system properties map that is thread-safe to iterator over. It gets the
17771749
* properties which have been set explicitly, as well as those for which only a default value

core/src/test/scala/org/apache/spark/util/UtilsSuite.scala

Lines changed: 0 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -849,36 +849,6 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
849849
assert(buffer.toString === "st circular test circular")
850850
}
851851

852-
test("nanSafeCompareDoubles") {
853-
def shouldMatchDefaultOrder(a: Double, b: Double): Unit = {
854-
assert(Utils.nanSafeCompareDoubles(a, b) === JDouble.compare(a, b))
855-
assert(Utils.nanSafeCompareDoubles(b, a) === JDouble.compare(b, a))
856-
}
857-
shouldMatchDefaultOrder(0d, 0d)
858-
shouldMatchDefaultOrder(0d, 1d)
859-
shouldMatchDefaultOrder(Double.MinValue, Double.MaxValue)
860-
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NaN) === 0)
861-
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.PositiveInfinity) === 1)
862-
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NegativeInfinity) === 1)
863-
assert(Utils.nanSafeCompareDoubles(Double.PositiveInfinity, Double.NaN) === -1)
864-
assert(Utils.nanSafeCompareDoubles(Double.NegativeInfinity, Double.NaN) === -1)
865-
}
866-
867-
test("nanSafeCompareFloats") {
868-
def shouldMatchDefaultOrder(a: Float, b: Float): Unit = {
869-
assert(Utils.nanSafeCompareFloats(a, b) === JFloat.compare(a, b))
870-
assert(Utils.nanSafeCompareFloats(b, a) === JFloat.compare(b, a))
871-
}
872-
shouldMatchDefaultOrder(0f, 0f)
873-
shouldMatchDefaultOrder(1f, 1f)
874-
shouldMatchDefaultOrder(Float.MinValue, Float.MaxValue)
875-
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NaN) === 0)
876-
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.PositiveInfinity) === 1)
877-
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NegativeInfinity) === 1)
878-
assert(Utils.nanSafeCompareFloats(Float.PositiveInfinity, Float.NaN) === -1)
879-
assert(Utils.nanSafeCompareFloats(Float.NegativeInfinity, Float.NaN) === -1)
880-
}
881-
882852
test("isDynamicAllocationEnabled") {
883853
val conf = new SparkConf()
884854
conf.set("spark.master", "yarn")

core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ import java.util.concurrent.TimeUnit
2323

2424
import org.apache.spark.SparkFunSuite
2525
import org.apache.spark.internal.Logging
26-
import org.apache.spark.util.OrderingUtil
2726
import org.apache.spark.util.Utils.timeIt
2827
import org.apache.spark.util.random.XORShiftRandom
2928

@@ -60,7 +59,7 @@ class SorterSuite extends SparkFunSuite with Logging {
6059

6160
Arrays.sort(keys)
6261
new Sorter(new KVArraySortDataFormat[Double, Number])
63-
.sort(keyValueArray, 0, keys.length, OrderingUtil.compareDouble)
62+
.sort(keyValueArray, 0, keys.length, (x, y) => java.lang.Double.compare(x, y))
6463

6564
keys.zipWithIndex.foreach { case (k, i) =>
6665
assert(k === keyValueArray(2 * i))

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -625,8 +625,8 @@ class CodegenContext extends Logging {
625625
def genComp(dataType: DataType, c1: String, c2: String): String = dataType match {
626626
// java boolean doesn't support > or < operator
627627
case BooleanType => s"($c1 == $c2 ? 0 : ($c1 ? 1 : -1))"
628-
case DoubleType => s"org.apache.spark.util.Utils.nanSafeCompareDoubles($c1, $c2)"
629-
case FloatType => s"org.apache.spark.util.Utils.nanSafeCompareFloats($c1, $c2)"
628+
case DoubleType => s"java.lang.Double.compare($c1, $c2)"
629+
case FloatType => s"java.lang.Float.compare($c1, $c2)"
630630
// use c1 - c2 may overflow
631631
case dt: DataType if isPrimitiveType(dt) => s"($c1 > $c2 ? 1 : $c1 < $c2 ? -1 : 0)"
632632
case BinaryType => s"org.apache.spark.sql.catalyst.util.TypeUtils.compareBinary($c1, $c2)"

sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class DoubleType private() extends FractionalType {
3939
private[sql] val numeric = implicitly[Numeric[Double]]
4040
private[sql] val fractional = implicitly[Fractional[Double]]
4141
private[sql] val ordering =
42-
(x: Double, y: Double) => Utils.nanSafeCompareDoubles(x, y)
42+
(x: Double, y: Double) => java.lang.Double.compare(x, y)
4343
private[sql] val asIntegral = DoubleAsIfIntegral
4444

4545
override private[sql] def exactNumeric = DoubleExactNumeric

sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class FloatType private() extends FractionalType {
3939
private[sql] val numeric = implicitly[Numeric[Float]]
4040
private[sql] val fractional = implicitly[Fractional[Float]]
4141
private[sql] val ordering =
42-
(x: Float, y: Float) => Utils.nanSafeCompareFloats(x, y)
42+
(x: Float, y: Float) => java.lang.Float.compare(x, y)
4343
private[sql] val asIntegral = FloatAsIfIntegral
4444

4545
override private[sql] def exactNumeric = FloatExactNumeric

sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ import scala.math.Numeric._
2121
import scala.math.Ordering
2222

2323
import org.apache.spark.sql.types.Decimal.DecimalIsConflicted
24-
import org.apache.spark.util.OrderingUtil
2524

2625
object ByteExactNumeric extends ByteIsIntegral with Ordering.ByteOrdering {
2726
private def checkOverflow(res: Int, x: Byte, y: Byte, op: String): Unit = {
@@ -149,7 +148,7 @@ object FloatExactNumeric extends FloatIsFractional {
149148
}
150149
}
151150

152-
override def compare(x: Float, y: Float): Int = OrderingUtil.compareFloat(x, y)
151+
override def compare(x: Float, y: Float): Int = java.lang.Float.compare(x, y)
153152
}
154153

155154
object DoubleExactNumeric extends DoubleIsFractional {
@@ -177,7 +176,7 @@ object DoubleExactNumeric extends DoubleIsFractional {
177176
}
178177
}
179178

180-
override def compare(x: Double, y: Double): Int = OrderingUtil.compareDouble(x, y)
179+
override def compare(x: Double, y: Double): Int = java.lang.Double.compare(x, y)
181180
}
182181

183182
object DecimalExactNumeric extends DecimalIsConflicted {

0 commit comments

Comments
 (0)