Skip to content

Commit 0b3db01

Browse files
committed
commetns
1 parent 1b3bdf8 commit 0b3db01

File tree

1 file changed

+14
-11
lines changed

1 file changed

+14
-11
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -333,21 +333,24 @@ case class OneRowRelationExec() extends LeafExecNode
333333

334334
override val output: Seq[Attribute] = Nil
335335

336-
private val rdd: RDD[InternalRow] = session.sparkContext.parallelize(Seq(InternalRow.empty), 1)
336+
private val rdd: RDD[InternalRow] = {
337+
val numOutputRows = longMetric("numOutputRows")
338+
session
339+
.sparkContext
340+
.parallelize(Seq(InternalRow()), 1)
341+
.mapPartitionsInternal { _ =>
342+
val proj = UnsafeProjection.create(Seq.empty[Expression])
343+
Iterator(proj.apply(InternalRow.empty)).map { r =>
344+
numOutputRows += 1
345+
r
346+
}
347+
}
348+
}
337349

338350
override lazy val metrics = Map(
339351
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
340352

341-
protected override def doExecute(): RDD[InternalRow] = {
342-
val numOutputRows = longMetric("numOutputRows")
343-
rdd.mapPartitionsWithIndexInternal { (_, iter) =>
344-
val proj = UnsafeProjection.create(schema)
345-
iter.map { r =>
346-
numOutputRows += 1
347-
proj(r)
348-
}
349-
}
350-
}
353+
protected override def doExecute(): RDD[InternalRow] = rdd
351354

352355
override def simpleString(maxFields: Int): String = s"$nodeName[]"
353356

0 commit comments

Comments
 (0)