Skip to content

Commit dd9547a

Browse files
committed
cleanup
1 parent ac1eee8 commit dd9547a

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -333,15 +333,17 @@ case class OneRowRelationExec() extends LeafExecNode
333333

334334
override val output: Seq[Attribute] = Nil
335335

336-
val rdd = session.sparkContext.parallelize(Seq(InternalRow()), 1)
336+
private val emptyRow: InternalRow = InternalRow.empty
337+
338+
private val rdd = session.sparkContext.parallelize(Seq(emptyRow), 1)
337339

338340
override lazy val metrics = Map(
339341
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
340342

341343
protected override def doExecute(): RDD[InternalRow] = {
342344
val numOutputRows = longMetric("numOutputRows")
343345
val proj = UnsafeProjection.create(schema)
344-
val outputRow = proj(InternalRow.empty)
346+
val outputRow = proj(emptyRow)
345347
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
346348
iter.map { r =>
347349
numOutputRows += 1
@@ -360,6 +362,4 @@ case class OneRowRelationExec() extends LeafExecNode
360362
override protected def doCanonicalize(): SparkPlan = {
361363
super.doCanonicalize().asInstanceOf[OneRowRelationExec].copy()
362364
}
363-
364-
// override def getStream: Option[SparkDataStream] = None
365365
}

0 commit comments

Comments
 (0)