diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index a940bd90d718f..bdfe227797a66 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -930,21 +930,17 @@ private[spark] class Executor( } private def setMDCForTask(taskName: String, mdc: Seq[(String, String)]): Unit = { - try { + if (Executor.mdcIsSupported) { mdc.foreach { case (key, value) => MDC.put(key, value) } // avoid overriding the takName by the user MDC.put(taskNameMDCKey, taskName) - } catch { - case _: NoSuchFieldError => logInfo("MDC is not supported.") } } private def cleanMDCForTask(taskName: String, mdc: Seq[(String, String)]): Unit = { - try { + if (Executor.mdcIsSupported) { mdc.foreach { case (key, _) => MDC.remove(key) } MDC.remove(taskNameMDCKey) - } catch { - case _: NoSuchFieldError => logInfo("MDC is not supported.") } } @@ -1299,7 +1295,7 @@ private[spark] class Executor( } } -private[spark] object Executor { +private[spark] object Executor extends Logging { // This is reserved for internal use by components that need to read task properties before a // task is fully deserialized. When possible, the TaskContext.getLocalProperty call should be // used instead. @@ -1308,6 +1304,21 @@ private[spark] object Executor { // Used to store executorSource, for local mode only var executorSourceLocalModeOnly: ExecutorSource = null + lazy val mdcIsSupported: Boolean = { + try { + // This tests if any class initialization error is thrown + val testKey = System.nanoTime().toString + MDC.put(testKey, "testValue") + MDC.remove(testKey) + + true + } catch { + case t: Throwable => + logInfo("MDC is not supported.", t) + false + } + } + /** * Whether a `Throwable` thrown from a task is a fatal error. We will use this to decide whether * to kill the executor.