Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 1 addition & 17 deletions spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ object QueryPlanSerde extends Logging with CometExprShim {
classOf[Sqrt] -> CometScalarFunction("sqrt"),
classOf[Subtract] -> CometSubtract,
classOf[Tan] -> CometScalarFunction("tan"),
// TODO UnaryMinus
classOf[UnaryMinus] -> CometUnaryMinus,
classOf[Unhex] -> CometUnhex)

private val mapExpressions: Map[Class[_ <: Expression], CometExpressionSerde[_]] = Map(
Expand Down Expand Up @@ -942,22 +942,6 @@ object QueryPlanSerde extends Logging with CometExprShim {
None
}

case UnaryMinus(child, failOnError) =>
val childExpr = exprToProtoInternal(child, inputs, binding)
if (childExpr.isDefined) {
val builder = ExprOuterClass.UnaryMinus.newBuilder()
builder.setChild(childExpr.get)
builder.setFailOnError(failOnError)
Some(
ExprOuterClass.Expr
.newBuilder()
.setUnaryMinus(builder)
.build())
} else {
withInfo(expr, child)
None
}

// With Spark 3.4, CharVarcharCodegenUtils.readSidePadding gets called to pad spaces for
// char types.
// See https://github.com/apache/spark/pull/38151
Expand Down
24 changes: 23 additions & 1 deletion spark/src/main/scala/org/apache/comet/serde/arithmetic.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ package org.apache.comet.serde

import scala.math.min

import org.apache.spark.sql.catalyst.expressions.{Add, Attribute, Cast, Divide, EmptyRow, EqualTo, EvalMode, Expression, If, IntegralDivide, Literal, Multiply, Remainder, Round, Subtract}
import org.apache.spark.sql.catalyst.expressions.{Add, Attribute, Cast, Divide, EmptyRow, EqualTo, EvalMode, Expression, If, IntegralDivide, Literal, Multiply, Remainder, Round, Subtract, UnaryMinus}
import org.apache.spark.sql.types.{ByteType, DataType, DecimalType, DoubleType, FloatType, IntegerType, LongType, ShortType}

import org.apache.comet.CometSparkSessionExtensions.withInfo
Expand Down Expand Up @@ -370,3 +370,25 @@ object CometRound extends CometExpressionSerde[Round] {

}
}
object CometUnaryMinus extends CometExpressionSerde[UnaryMinus] {

override def convert(
expr: UnaryMinus,
inputs: Seq[Attribute],
binding: Boolean): Option[ExprOuterClass.Expr] = {
val childExpr = exprToProtoInternal(expr.child, inputs, binding)
if (childExpr.isDefined) {
val builder = ExprOuterClass.UnaryMinus.newBuilder()
builder.setChild(childExpr.get)
builder.setFailOnError(expr.failOnError)
Some(
ExprOuterClass.Expr
.newBuilder()
.setUnaryMinus(builder)
.build())
} else {
withInfo(expr, expr.child)
None
}
}
}
Loading