17
17
18
18
package org .apache .spark .sql .execution .adaptive
19
19
20
+ import java .util .Properties
21
+
20
22
import scala .concurrent .{ExecutionContext , Future }
21
23
import scala .concurrent .duration .Duration
22
24
23
- import org .apache .spark .MapOutputStatistics
24
- import org .apache .spark .broadcast
25
+ import org .apache .spark .{broadcast , MapOutputStatistics , SparkContext }
25
26
import org .apache .spark .rdd .RDD
26
27
import org .apache .spark .sql .catalyst .InternalRow
27
28
import org .apache .spark .sql .catalyst .expressions ._
@@ -48,20 +49,30 @@ abstract class QueryStage extends UnaryExecNode {
48
49
49
50
override def outputOrdering : Seq [SortOrder ] = child.outputOrdering
50
51
52
+ def withLocalProperties [T ](sc : SparkContext , properties : Properties )(body : => T ): T = {
53
+ val oldProperties = sc.getLocalProperties
54
+ try {
55
+ sc.setLocalProperties(properties)
56
+ body
57
+ } finally {
58
+ sc.setLocalProperties(oldProperties)
59
+ }
60
+ }
61
+
51
62
/**
52
63
* Execute childStages and wait until all stages are completed. Use a thread pool to avoid
53
64
* blocking on one child stage.
54
65
*/
55
66
def executeChildStages (): Unit = {
56
- val executionId = sqlContext.sparkContext.getLocalProperty( SQLExecution . EXECUTION_ID_KEY )
67
+ val localProperties = sqlContext.sparkContext.getLocalProperties
57
68
58
69
// Handle broadcast stages
59
70
val broadcastQueryStages : Seq [BroadcastQueryStage ] = child.collect {
60
71
case bqs : BroadcastQueryStageInput => bqs.childStage
61
72
}
62
73
val broadcastFutures = broadcastQueryStages.map { queryStage =>
63
74
Future {
64
- SQLExecution .withExecutionId (sqlContext.sparkSession, executionId ) {
75
+ withLocalProperties (sqlContext.sparkContext, localProperties ) {
65
76
queryStage.prepareBroadcast()
66
77
}
67
78
}(QueryStage .executionContext)
@@ -73,7 +84,7 @@ abstract class QueryStage extends UnaryExecNode {
73
84
}
74
85
val shuffleStageFutures = shuffleQueryStages.map { queryStage =>
75
86
Future {
76
- SQLExecution .withExecutionId (sqlContext.sparkSession, executionId ) {
87
+ withLocalProperties (sqlContext.sparkContext, localProperties ) {
77
88
queryStage.execute()
78
89
}
79
90
}(QueryStage .executionContext)
0 commit comments