@@ -22,7 +22,7 @@ package org.apache.spark.sql.comet
2222import org .apache .spark .TaskContext
2323import org .apache .spark .rdd .{ParallelCollectionRDD , RDD }
2424import org .apache .spark .serializer .Serializer
25- import org .apache .spark .sql .catalyst .expressions .{Attribute , NamedExpression , SortOrder }
25+ import org .apache .spark .sql .catalyst .expressions .{Attribute , AttributeSet , NamedExpression , SortOrder }
2626import org .apache .spark .sql .catalyst .util .truncatedString
2727import org .apache .spark .sql .comet .execution .shuffle .{CometShuffledBatchRDD , CometShuffleExchangeExec }
2828import org .apache .spark .sql .execution .{SparkPlan , TakeOrderedAndProjectExec , UnaryExecNode , UnsafeRowSerializer }
@@ -98,10 +98,15 @@ case class CometTakeOrderedAndProjectExec(
9898 child : SparkPlan )
9999 extends CometExec
100100 with UnaryExecNode {
101+
102+ override def producedAttributes : AttributeSet = outputSet ++ AttributeSet (projectList)
103+
101104 private lazy val writeMetrics =
102105 SQLShuffleWriteMetricsReporter .createShuffleWriteMetrics(sparkContext)
106+
103107 private lazy val readMetrics =
104108 SQLShuffleReadMetricsReporter .createShuffleReadMetrics(sparkContext)
109+
105110 override lazy val metrics : Map [String , SQLMetric ] = Map (
106111 " dataSize" -> SQLMetrics .createSizeMetric(sparkContext, " data size" ),
107112 " numPartitions" -> SQLMetrics .createMetric(
0 commit comments