Skip to content

Commit 4fe86f0

Browse files
authored
Merge pull request #1 from kimoonkim/pr-414
Fix executor env to include simple authn
2 parents 6052a13 + 91e364c commit 4fe86f0

File tree

3 files changed

+10
-20
lines changed

3 files changed

+10
-20
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@ package object constants {
6060
private[spark] val ENV_DRIVER_URL = "SPARK_DRIVER_URL"
6161
private[spark] val ENV_EXECUTOR_CORES = "SPARK_EXECUTOR_CORES"
6262
private[spark] val ENV_EXECUTOR_MEMORY = "SPARK_EXECUTOR_MEMORY"
63-
private[spark] val ENV_EXECUTOR_JAVA_OPTS = "SPARK_EXECUTOR_JAVA_OPTS"
6463
private[spark] val ENV_APPLICATION_ID = "SPARK_APPLICATION_ID"
6564
private[spark] val ENV_EXECUTOR_ID = "SPARK_EXECUTOR_ID"
6665
private[spark] val ENV_EXECUTOR_POD_IP = "SPARK_EXECUTOR_POD_IP"

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterSchedulerBackend.scala

Lines changed: 9 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -72,14 +72,12 @@ private[spark] class KubernetesClusterSchedulerBackend(
7272
private val executorsToRemove = Collections.newSetFromMap[String](
7373
new ConcurrentHashMap[String, java.lang.Boolean]()).asScala
7474

75-
private val executorExtraJavaOpts = conf.get(
76-
org.apache.spark.internal.config.EXECUTOR_JAVA_OPTIONS)
7775
private val executorExtraClasspath = conf.get(
7876
org.apache.spark.internal.config.EXECUTOR_CLASS_PATH)
7977
private val executorJarsDownloadDir = conf.get(INIT_CONTAINER_JARS_DOWNLOAD_LOCATION)
8078
private val isKerberosEnabled = conf.get(KUBERNETES_KERBEROS_SUPPORT)
8179
private val maybeSimpleAuthentication =
82-
if (isKerberosEnabled) s" -D$HADOOP_SECURITY_AUTHENTICATION=simple" else ""
80+
if (isKerberosEnabled) Some(s"-D$HADOOP_SECURITY_AUTHENTICATION=simple") else None
8381
private val executorLabels = ConfigurationUtils.combinePrefixedKeyValuePairsWithDeprecatedConf(
8482
conf,
8583
KUBERNETES_EXECUTOR_LABEL_PREFIX,
@@ -455,27 +453,21 @@ private[spark] class KubernetesClusterSchedulerBackend(
455453
val executorCpuQuantity = new QuantityBuilder(false)
456454
.withAmount(executorCores.toString)
457455
.build()
458-
val executorJavaOpts = executorExtraJavaOpts.getOrElse("") + maybeSimpleAuthentication
459-
val executorJavaOptsEnv = if (executorJavaOpts.nonEmpty) {
460-
Some(new EnvVarBuilder()
461-
.withName(ENV_EXECUTOR_JAVA_OPTS)
462-
.withValue(executorJavaOpts)
463-
.build()) } else None
464456
val executorExtraClasspathEnv = executorExtraClasspath.map { cp =>
465457
new EnvVarBuilder()
466458
.withName(ENV_EXECUTOR_EXTRA_CLASSPATH)
467459
.withValue(cp)
468460
.build()
469461
}
470462
val executorExtraJavaOptionsEnv = conf
471-
.get(org.apache.spark.internal.config.EXECUTOR_JAVA_OPTIONS)
472-
.map { opts =>
473-
val delimitedOpts = Utils.splitCommandString(opts)
474-
delimitedOpts.zipWithIndex.map {
475-
case (opt, index) =>
476-
new EnvVarBuilder().withName(s"$ENV_JAVA_OPT_PREFIX$index").withValue(opt).build()
477-
}
478-
}.getOrElse(Seq.empty[EnvVar])
463+
.get(org.apache.spark.internal.config.EXECUTOR_JAVA_OPTIONS)
464+
.map { opts =>
465+
val delimitedOpts = Utils.splitCommandString(opts) ++ maybeSimpleAuthentication
466+
delimitedOpts.zipWithIndex.map {
467+
case (opt, index) =>
468+
new EnvVarBuilder().withName(s"$ENV_JAVA_OPT_PREFIX$index").withValue(opt).build()
469+
}
470+
}.getOrElse(Seq.empty[EnvVar])
479471
val executorEnv = (Seq(
480472
(ENV_EXECUTOR_PORT, executorPort.toString),
481473
(ENV_DRIVER_URL, driverUrl),
@@ -516,8 +508,6 @@ private[spark] class KubernetesClusterSchedulerBackend(
516508
.addToLimits("memory", executorMemoryLimitQuantity)
517509
.addToRequests("cpu", executorCpuQuantity)
518510
.endResources()
519-
.addToEnv(executorExtraClasspathEnv.toSeq: _*)
520-
.addToEnv(executorJavaOptsEnv.toSeq: _*)
521511
.addAllToEnv(executorEnv.asJava)
522512
.withPorts(requiredPorts.asJava)
523513
.build()

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,5 +104,6 @@ private[spark] class SparkDockerImageBuilder
104104
name,
105105
dockerFile,
106106
new LoggingBuildHandler())
107+
logInfo(s"Built docker image for $name")
107108
}
108109
}

0 commit comments

Comments
 (0)