@@ -333,30 +333,30 @@ case class OneRowRelationExec() extends LeafExecNode
333333
334334 override val output : Seq [Attribute ] = Nil
335335
336- private val emptyRow : InternalRow = InternalRow .empty
337-
338- private val rdd = session.sparkContext.parallelize(Seq (emptyRow), 1 )
336+ private val rdd : RDD [UnsafeRow ] = {
337+ val proj = UnsafeProjection .create(schema)
338+ val emptyRow = proj(InternalRow .empty)
339+ session.sparkContext.parallelize(Seq (emptyRow), 1 )
340+ }
339341
340342 override lazy val metrics = Map (
341343 " numOutputRows" -> SQLMetrics .createMetric(sparkContext, " number of output rows" ))
342344
343345 protected override def doExecute (): RDD [InternalRow ] = {
344346 val numOutputRows = longMetric(" numOutputRows" )
345- val proj = UnsafeProjection .create(schema)
346- val outputRow = proj(emptyRow)
347- rdd.mapPartitionsWithIndexInternal { (index, iter) =>
347+ rdd.mapPartitionsWithIndexInternal { (_, iter) =>
348348 iter.map { r =>
349349 numOutputRows += 1
350- outputRow
350+ r
351351 }
352352 }
353353 }
354354
355355 override def simpleString (maxFields : Int ): String = s " $nodeName[] "
356356
357- override def inputRDD : RDD [InternalRow ] = rdd
357+ override def inputRDD : RDD [InternalRow ] = rdd. asInstanceOf [ RDD [ InternalRow ]]
358358
359- override protected val createUnsafeProjection : Boolean = true
359+ override protected val createUnsafeProjection : Boolean = false
360360
361361 override protected def doCanonicalize (): SparkPlan = {
362362 super .doCanonicalize().asInstanceOf [OneRowRelationExec ].copy()
0 commit comments