Skip to content

Commit c993f51

Browse files
committed
comments
1 parent 597c7f5 commit c993f51

File tree

1 file changed

+9
-9
lines changed

1 file changed

+9
-9
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -333,30 +333,30 @@ case class OneRowRelationExec() extends LeafExecNode
333333

334334
override val output: Seq[Attribute] = Nil
335335

336-
private val emptyRow: InternalRow = InternalRow.empty
337-
338-
private val rdd = session.sparkContext.parallelize(Seq(emptyRow), 1)
336+
private val rdd: RDD[UnsafeRow] = {
337+
val proj = UnsafeProjection.create(schema)
338+
val emptyRow = proj(InternalRow.empty)
339+
session.sparkContext.parallelize(Seq(emptyRow), 1)
340+
}
339341

340342
override lazy val metrics = Map(
341343
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
342344

343345
protected override def doExecute(): RDD[InternalRow] = {
344346
val numOutputRows = longMetric("numOutputRows")
345-
val proj = UnsafeProjection.create(schema)
346-
val outputRow = proj(emptyRow)
347-
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
347+
rdd.mapPartitionsWithIndexInternal { (_, iter) =>
348348
iter.map { r =>
349349
numOutputRows += 1
350-
outputRow
350+
r
351351
}
352352
}
353353
}
354354

355355
override def simpleString(maxFields: Int): String = s"$nodeName[]"
356356

357-
override def inputRDD: RDD[InternalRow] = rdd
357+
override def inputRDD: RDD[InternalRow] = rdd.asInstanceOf[RDD[InternalRow]]
358358

359-
override protected val createUnsafeProjection: Boolean = true
359+
override protected val createUnsafeProjection: Boolean = false
360360

361361
override protected def doCanonicalize(): SparkPlan = {
362362
super.doCanonicalize().asInstanceOf[OneRowRelationExec].copy()

0 commit comments

Comments
 (0)