Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 18 additions & 7 deletions core/src/main/scala/org/apache/spark/executor/Executor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -930,21 +930,17 @@ private[spark] class Executor(
}

private def setMDCForTask(taskName: String, mdc: Seq[(String, String)]): Unit = {
try {
if (Executor.mdcIsSupported) {
mdc.foreach { case (key, value) => MDC.put(key, value) }
// avoid overriding the takName by the user
MDC.put(taskNameMDCKey, taskName)
} catch {
case _: NoSuchFieldError => logInfo("MDC is not supported.")
}
}

private def cleanMDCForTask(taskName: String, mdc: Seq[(String, String)]): Unit = {
try {
if (Executor.mdcIsSupported) {
mdc.foreach { case (key, _) => MDC.remove(key) }
MDC.remove(taskNameMDCKey)
} catch {
case _: NoSuchFieldError => logInfo("MDC is not supported.")
}
}

Expand Down Expand Up @@ -1299,7 +1295,7 @@ private[spark] class Executor(
}
}

private[spark] object Executor {
private[spark] object Executor extends Logging {
// This is reserved for internal use by components that need to read task properties before a
// task is fully deserialized. When possible, the TaskContext.getLocalProperty call should be
// used instead.
Expand All @@ -1308,6 +1304,21 @@ private[spark] object Executor {
// Used to store executorSource, for local mode only
var executorSourceLocalModeOnly: ExecutorSource = null

lazy val mdcIsSupported: Boolean = {
try {
// This tests if any class initialization error is thrown
val testKey = System.nanoTime().toString
MDC.put(testKey, "testValue")
MDC.remove(testKey)

true
} catch {
case t: Throwable =>
logInfo("MDC is not supported.", t)
false
}
}

/**
* Whether a `Throwable` thrown from a task is a fatal error. We will use this to decide whether
* to kill the executor.
Expand Down