Skip to content

Commit 88e2b0e

Browse files
committed
Revert "Use a list of environment variables for JVM options."
This reverts commit 34d7af2.
1 parent ec8e9fc commit 88e2b0e

File tree

8 files changed

+30
-150
lines changed

8 files changed

+30
-150
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,6 @@ package object constants {
6969
private[spark] val ENV_MOUNTED_FILES_DIR = "SPARK_MOUNTED_FILES_DIR"
7070
private[spark] val ENV_PYSPARK_FILES = "PYSPARK_FILES"
7171
private[spark] val ENV_PYSPARK_PRIMARY = "PYSPARK_PRIMARY"
72-
private[spark] val ENV_JAVA_OPT_PREFIX = "SPARK_JAVA_OPT_"
7372

7473
// Bootstrapping dependencies with the init-container
7574
private[spark] val INIT_CONTAINER_ANNOTATION = "pod.beta.kubernetes.io/init-containers"

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/Client.scala

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,9 @@ package org.apache.spark.deploy.kubernetes.submit
1818

1919
import java.util.{Collections, UUID}
2020

21-
import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVar, EnvVarBuilder, OwnerReferenceBuilder, PodBuilder}
21+
import io.fabric8.kubernetes.api.model.{ContainerBuilder, OwnerReferenceBuilder, PodBuilder}
2222
import io.fabric8.kubernetes.client.KubernetesClient
2323
import scala.collection.mutable
24-
import scala.collection.JavaConverters._
2524

2625
import org.apache.spark.SparkConf
2726
import org.apache.spark.deploy.kubernetes.config._
@@ -93,21 +92,18 @@ private[spark] class Client(
9392
currentDriverSpec = nextStep.configureDriver(currentDriverSpec)
9493
}
9594
val resolvedDriverJavaOpts = currentDriverSpec
96-
.driverSparkConf
97-
// We don't need this anymore since we just set the JVM options on the environment
98-
.remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
99-
.getAll
100-
.map {
101-
case (confKey, confValue) => s"-D$confKey=$confValue"
102-
} ++ driverJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
103-
val driverJavaOptsEnvs: Seq[EnvVar] = resolvedDriverJavaOpts.zipWithIndex.map {
104-
case (option, index) => new EnvVarBuilder()
105-
.withName(s"$ENV_JAVA_OPT_PREFIX$index")
106-
.withValue(option)
107-
.build()
108-
}
95+
.driverSparkConf
96+
// We don't need this anymore since we just set the JVM options on the environment
97+
.remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
98+
.getAll
99+
.map {
100+
case (confKey, confValue) => s"-D$confKey=$confValue"
101+
}.mkString(" ") + driverJavaOptions.map(" " + _).getOrElse("")
109102
val resolvedDriverContainer = new ContainerBuilder(currentDriverSpec.driverContainer)
110-
.addAllToEnv(driverJavaOptsEnvs.asJava)
103+
.addNewEnv()
104+
.withName(ENV_DRIVER_JAVA_OPTS)
105+
.withValue(resolvedDriverJavaOpts)
106+
.endEnv()
111107
.build()
112108
val resolvedDriverPod = new PodBuilder(currentDriverSpec.driverPod)
113109
.editSpec()

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/ClientSuite.scala

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter {
135135
.set("spark.logConf", "true")
136136
.set(
137137
org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS,
138-
"-XX:+HeapDumpOnOutOfMemoryError -XX:+PrintGCDetails")
138+
"-XX:+|-HeapDumpOnOutOfMemoryError")
139139
val submissionClient = new Client(
140140
submissionSteps,
141141
sparkConf,
@@ -147,22 +147,15 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter {
147147
val createdPod = createdPodArgumentCaptor.getValue
148148
val driverContainer = Iterables.getOnlyElement(createdPod.getSpec.getContainers)
149149
assert(driverContainer.getName === SecondTestConfigurationStep.containerName)
150-
val driverJvmOptsEnvs = driverContainer.getEnv.asScala.filter { env =>
151-
env.getName.startsWith(ENV_JAVA_OPT_PREFIX)
152-
}.sortBy(_.getName)
153-
assert(driverJvmOptsEnvs.size === 4)
154-
155-
val expectedJvmOptsValues = Seq(
156-
"-Dspark.logConf=true",
150+
val driverJvmOptsEnv = Iterables.getOnlyElement(driverContainer.getEnv)
151+
assert(driverJvmOptsEnv.getName === ENV_DRIVER_JAVA_OPTS)
152+
val driverJvmOpts = driverJvmOptsEnv.getValue.split(" ").toSet
153+
assert(driverJvmOpts.contains("-Dspark.logConf=true"))
154+
assert(driverJvmOpts.contains(
157155
s"-D${SecondTestConfigurationStep.sparkConfKey}=" +
158-
s"${SecondTestConfigurationStep.sparkConfValue}",
159-
s"-XX:+HeapDumpOnOutOfMemoryError",
160-
s"-XX:+PrintGCDetails")
161-
driverJvmOptsEnvs.zip(expectedJvmOptsValues).zipWithIndex.foreach {
162-
case ((resolvedEnv, expectedJvmOpt), index) =>
163-
assert(resolvedEnv.getName === s"$ENV_JAVA_OPT_PREFIX$index")
164-
assert(resolvedEnv.getValue === expectedJvmOpt)
165-
}
156+
SecondTestConfigurationStep.sparkConfValue))
157+
assert(driverJvmOpts.contains(
158+
"-XX:+|-HeapDumpOnOutOfMemoryError"))
166159
}
167160

168161
test("Waiting for app completion should stall on the watcher") {
@@ -218,8 +211,8 @@ private object SecondTestConfigurationStep extends DriverConfigurationStep {
218211
override def configureDriver(driverSpec: KubernetesDriverSpec): KubernetesDriverSpec = {
219212
val modifiedPod = new PodBuilder(driverSpec.driverPod)
220213
.editMetadata()
221-
.addToAnnotations(annotationKey, annotationValue)
222-
.endMetadata()
214+
.addToAnnotations(annotationKey, annotationValue)
215+
.endMetadata()
223216
.build()
224217
val resolvedSparkConf = driverSpec.driverSparkConf.clone().set(sparkConfKey, sparkConfValue)
225218
val modifiedContainer = new ContainerBuilder(driverSpec.driverContainer)

resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,10 @@ ENV PYSPARK_DRIVER_PYTHON python
3939
ENV PYTHONPATH ${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.4-src.zip:${PYTHONPATH}
4040

4141
CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \
42-
env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \
43-
readarray -t SPARK_DRIVER_JAVA_OPTS < /tmp/java_opts.txt && \
4442
if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \
4543
if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
4644
if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
4745
if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
48-
${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS
46+
${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH \
47+
-Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY \
48+
$SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS

resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,8 @@ FROM spark-base
2424
COPY examples /opt/spark/examples
2525

2626
CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \
27-
env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \
28-
readarray -t SPARK_DRIVER_JAVA_OPTS < /tmp/java_opts.txt && \
2927
if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \
3028
if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
3129
if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
32-
if ! [ -z ${SPARK_MOUNTED_FILES_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
33-
${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS
30+
if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
31+
${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS

resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/spark-base/Dockerfile

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@ RUN apk upgrade --no-cache && \
2424
apk add --no-cache bash tini && \
2525
mkdir -p /opt/spark && \
2626
touch /opt/spark/RELEASE && \
27-
rm /bin/sh && \
28-
ln -sv /bin/bash /bin/sh && \
2927
chgrp root /etc/passwd && chmod ug+rw /etc/passwd
3028

3129
COPY jars /opt/spark/jars

resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/JavaOptionsTest.scala

Lines changed: 0 additions & 68 deletions
This file was deleted.

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 2 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@
1616
*/
1717
package org.apache.spark.deploy.kubernetes.integrationtest
1818

19-
import java.io.{File, FileOutputStream}
19+
import java.io.File
2020
import java.nio.file.Paths
21-
import java.util.{Properties, UUID}
21+
import java.util.UUID
2222

2323
import com.google.common.base.Charsets
2424
import com.google.common.io.Files
@@ -226,26 +226,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
226226
Seq.empty[String])
227227
}
228228

229-
test("Setting JVM options on the driver and executors with spaces.") {
230-
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
231-
launchStagingServer(SSLOptions(), None)
232-
val driverJvmOptionsFile = storeJvmOptionsInTempFile(
233-
Map("simpleDriverConf" -> "simpleDriverConfValue",
234-
"driverconfwithspaces" -> "driver conf with spaces value"),
235-
"driver-jvm-options.properties",
236-
"JVM options that should be set on the driver.")
237-
sparkConf.set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS,
238-
"-DsimpleDriverConf=simpleDriverConfValue" +
239-
" -Ddriverconfwithspaces='driver conf with spaces value'")
240-
sparkConf.set("spark.files", driverJvmOptionsFile.getAbsolutePath)
241-
runSparkApplicationAndVerifyCompletion(
242-
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
243-
JAVA_OPTIONS_MAIN_CLASS,
244-
Seq(s"All expected JVM options were present on the driver and executors."),
245-
Array(driverJvmOptionsFile.getName),
246-
Seq.empty[String])
247-
}
248-
249229
test("Use a very long application name.") {
250230
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
251231

@@ -359,20 +339,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
359339
}
360340
}
361341
}
362-
363-
private def storeJvmOptionsInTempFile(
364-
options: Map[String, String],
365-
propertiesFileName: String,
366-
comments: String): File = {
367-
val tempDir = Utils.createTempDir()
368-
val propertiesFile = new File(tempDir, propertiesFileName)
369-
val properties = new Properties()
370-
options.foreach { case (propKey, propValue) => properties.setProperty(propKey, propValue) }
371-
Utils.tryWithResource(new FileOutputStream(propertiesFile)) { os =>
372-
properties.store(os, comments)
373-
}
374-
propertiesFile
375-
}
376342
}
377343

378344
private[spark] object KubernetesSuite {
@@ -402,8 +368,6 @@ private[spark] object KubernetesSuite {
402368
".integrationtest.jobs.FileExistenceTest"
403369
val GROUP_BY_MAIN_CLASS = "org.apache.spark.deploy.kubernetes" +
404370
".integrationtest.jobs.GroupByTest"
405-
val JAVA_OPTIONS_MAIN_CLASS = "org.apache.spark.deploy.kubernetes" +
406-
".integrationtest.jobs.JavaOptionsTest"
407371
val TEST_EXISTENCE_FILE_CONTENTS = "contents"
408372

409373
case object ShuffleNotReadyException extends Exception

0 commit comments

Comments
 (0)