Skip to content

Commit 7add4e9

Browse files
mgaido91gatorsmile
authored andcommitted
[SPARK-21738] Thriftserver doesn't cancel jobs when session is closed
## What changes were proposed in this pull request? When a session is closed the Thriftserver doesn't cancel the jobs which may still be running. This is a huge waste of resources. This PR address the problem canceling the pending jobs when a session is closed. ## How was this patch tested? The patch was tested manually. Author: Marco Gaido <[email protected]> Closes apache#18951 from mgaido91/SPARK-21738.
1 parent 1cce1a3 commit 7add4e9

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,9 @@ private[hive] class SparkExecuteStatementOperation(
7171

7272
def close(): Unit = {
7373
// RDDs will be cleaned automatically upon garbage collection.
74-
sqlContext.sparkContext.clearJobGroup()
7574
logDebug(s"CLOSING $statementId")
7675
cleanup(OperationState.CLOSED)
76+
sqlContext.sparkContext.clearJobGroup()
7777
}
7878

7979
def addNonNullColumnValue(from: SparkRow, to: ArrayBuffer[Any], ordinal: Int) {
@@ -273,9 +273,6 @@ private[hive] class SparkExecuteStatementOperation(
273273

274274
override def cancel(): Unit = {
275275
logInfo(s"Cancel '$statement' with $statementId")
276-
if (statementId != null) {
277-
sqlContext.sparkContext.cancelJobGroup(statementId)
278-
}
279276
cleanup(OperationState.CANCELED)
280277
}
281278

@@ -287,6 +284,9 @@ private[hive] class SparkExecuteStatementOperation(
287284
backgroundHandle.cancel(true)
288285
}
289286
}
287+
if (statementId != null) {
288+
sqlContext.sparkContext.cancelJobGroup(statementId)
289+
}
290290
}
291291
}
292292

0 commit comments

Comments
 (0)