Skip to content

Commit ec8e9fc

Browse files
committed
Revert "Support spark.executor.extraJavaOptions."
This reverts commit 50c690d.
1 parent 701bd2a commit ec8e9fc

File tree

5 files changed

+16
-59
lines changed

5 files changed

+16
-59
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterSchedulerBackend.scala

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -457,16 +457,7 @@ private[spark] class KubernetesClusterSchedulerBackend(
457457
.withValue(cp)
458458
.build()
459459
}
460-
val executorExtraJavaOptionsEnv = conf
461-
.get(org.apache.spark.internal.config.EXECUTOR_JAVA_OPTIONS)
462-
.map { opts =>
463-
val delimitedOpts = Utils.splitCommandString(opts)
464-
delimitedOpts.zipWithIndex.map {
465-
case (opt, index) =>
466-
new EnvVarBuilder().withName(s"$ENV_JAVA_OPT_PREFIX$index").withValue(opt).build()
467-
}
468-
}.getOrElse(Seq.empty[EnvVar])
469-
val executorEnv = (Seq(
460+
val requiredEnv = (Seq(
470461
(ENV_EXECUTOR_PORT, executorPort.toString),
471462
(ENV_DRIVER_URL, driverUrl),
472463
// Executor backend expects integral value for executor cores, so round it up to an int.
@@ -486,7 +477,7 @@ private[spark] class KubernetesClusterSchedulerBackend(
486477
.withNewFieldRef("v1", "status.podIP")
487478
.build())
488479
.build()
489-
) ++ executorExtraJavaOptionsEnv ++ executorExtraClasspathEnv.toSeq
480+
)
490481
val requiredPorts = Seq(
491482
(EXECUTOR_PORT_NAME, executorPort),
492483
(BLOCK_MANAGER_PORT_NAME, blockmanagerPort))
@@ -506,7 +497,8 @@ private[spark] class KubernetesClusterSchedulerBackend(
506497
.addToLimits("memory", executorMemoryLimitQuantity)
507498
.addToRequests("cpu", executorCpuQuantity)
508499
.endResources()
509-
.addAllToEnv(executorEnv.asJava)
500+
.addAllToEnv(requiredEnv.asJava)
501+
.addToEnv(executorExtraClasspathEnv.toSeq: _*)
510502
.withPorts(requiredPorts.asJava)
511503
.build()
512504

resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/executor-py/Dockerfile

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,9 @@ ENV PYSPARK_PYTHON python
3838
ENV PYSPARK_DRIVER_PYTHON python
3939
ENV PYTHONPATH ${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.4-src.zip:${PYTHONPATH}
4040

41+
# TODO support spark.executor.extraClassPath
4142
CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \
42-
env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \
43-
readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt && \
4443
if ! [ -z ${SPARK_MOUNTED_CLASSPATH}+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \
4544
if ! [ -z ${SPARK_EXECUTOR_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXECUTOR_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
4645
if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
47-
${JAVA_HOME}/bin/java "${SPARK_EXECUTOR_JAVA_OPTS[@]}" -Dspark.executor.port=$SPARK_EXECUTOR_PORT -Xms$SPARK_EXECUTOR_MEMORY -Xmx$SPARK_EXECUTOR_MEMORY -cp $SPARK_CLASSPATH org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url $SPARK_DRIVER_URL --executor-id $SPARK_EXECUTOR_ID --cores $SPARK_EXECUTOR_CORES --app-id $SPARK_APPLICATION_ID --hostname $SPARK_EXECUTOR_POD_IP
46+
${JAVA_HOME}/bin/java -Dspark.executor.port=$SPARK_EXECUTOR_PORT -Xms$SPARK_EXECUTOR_MEMORY -Xmx$SPARK_EXECUTOR_MEMORY -cp $SPARK_CLASSPATH org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url $SPARK_DRIVER_URL --executor-id $SPARK_EXECUTOR_ID --cores $SPARK_EXECUTOR_CORES --app-id $SPARK_APPLICATION_ID --hostname $SPARK_EXECUTOR_POD_IP

resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/executor/Dockerfile

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,10 @@ FROM spark-base
2323

2424
COPY examples /opt/spark/examples
2525

26+
# TODO support spark.executor.extraClassPath
2627
CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \
27-
env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \
28-
readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt && \
2928
if ! [ -z ${SPARK_MOUNTED_CLASSPATH}+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \
3029
if ! [ -z ${SPARK_EXECUTOR_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXECUTOR_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
3130
if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
3231
if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
33-
${JAVA_HOME}/bin/java "${SPARK_EXECUTOR_JAVA_OPTS[@]}" -Dspark.executor.port=$SPARK_EXECUTOR_PORT -Xms$SPARK_EXECUTOR_MEMORY -Xmx$SPARK_EXECUTOR_MEMORY -cp $SPARK_CLASSPATH org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url $SPARK_DRIVER_URL --executor-id $SPARK_EXECUTOR_ID --cores $SPARK_EXECUTOR_CORES --app-id $SPARK_APPLICATION_ID --hostname $SPARK_EXECUTOR_POD_IP
32+
${JAVA_HOME}/bin/java -Dspark.executor.port=$SPARK_EXECUTOR_PORT -Xms$SPARK_EXECUTOR_MEMORY -Xmx$SPARK_EXECUTOR_MEMORY -cp $SPARK_CLASSPATH org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url $SPARK_DRIVER_URL --executor-id $SPARK_EXECUTOR_ID --cores $SPARK_EXECUTOR_CORES --app-id $SPARK_APPLICATION_ID --hostname $SPARK_EXECUTOR_POD_IP

resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/JavaOptionsTest.scala

Lines changed: 5 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -29,51 +29,28 @@ private[spark] object JavaOptionsTest {
2929

3030
def main(args: Array[String]): Unit = {
3131
// scalastyle:off println
32-
if (args.length != 2) {
32+
if (args.length != 1) {
3333
println(s"Invalid arguments: ${args.mkString(",")}." +
34-
s"Usage: JavaOptionsTest <driver-java-options-list-file> <executor-java-options-list-file>")
34+
s"Usage: JavaOptionsTest <driver-java-options-list-file>")
3535
System.exit(1)
3636
}
3737
val expectedDriverJavaOptions = loadPropertiesFromFile(args(0))
38-
val expectedExecutorJavaOptions = loadPropertiesFromFile(args(1))
3938
val nonMatchingDriverOptions = expectedDriverJavaOptions.filter {
4039
case (optKey, optValue) => System.getProperty(optKey) != optValue
4140
}
4241
if (nonMatchingDriverOptions.nonEmpty) {
4342
println(s"The driver's JVM options did not match. Expected $expectedDriverJavaOptions." +
4443
s" But these options did not match: $nonMatchingDriverOptions.")
4544
val sysProps = Maps.fromProperties(System.getProperties).asScala
46-
println("Driver system properties are:")
45+
println("System properties are:")
4746
for (prop <- sysProps) {
4847
println(s"Key: ${prop._1}, Value: ${prop._2}")
4948
}
5049
System.exit(1)
5150
}
5251

53-
val spark = SparkSession.builder().getOrCreate().sparkContext
54-
try {
55-
val nonMatchingExecutorOptions = spark.parallelize(Seq(0)).flatMap { _ =>
56-
expectedExecutorJavaOptions.filter {
57-
case (optKey, optValue) => System.getProperty(optKey) != optValue
58-
}
59-
}.collectAsMap()
60-
if (nonMatchingExecutorOptions.nonEmpty) {
61-
val executorSysProps = spark.parallelize(Seq(0)).flatMap { _ =>
62-
Maps.fromProperties(System.getProperties).asScala
63-
}.collectAsMap()
64-
println(s"The executor's JVM options did not match. Expected" +
65-
s" $expectedExecutorJavaOptions. But these options did not" +
66-
s" match: $nonMatchingExecutorOptions.")
67-
println("Executor system properties are:")
68-
for (prop <- executorSysProps) {
69-
println(s"Key: ${prop._1}, Value: ${prop._2}")
70-
}
71-
} else {
72-
println("All expected JVM options were present on the driver and executors.")
73-
}
74-
} finally {
75-
spark.stop()
76-
}
52+
// TODO support spark.executor.extraJavaOptions and test here.
53+
println(s"All expected JVM options were present on the driver and executors.")
7754
// scalastyle:on println
7855
}
7956

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -231,28 +231,18 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
231231
launchStagingServer(SSLOptions(), None)
232232
val driverJvmOptionsFile = storeJvmOptionsInTempFile(
233233
Map("simpleDriverConf" -> "simpleDriverConfValue",
234-
"driverconfwithspaces" -> "driver conf with spaces value"),
234+
"driverconfwithspaces" -> "driver conf with spaces value"),
235235
"driver-jvm-options.properties",
236236
"JVM options that should be set on the driver.")
237-
val executorJvmOptionsFile = storeJvmOptionsInTempFile(
238-
Map("simpleExecutorConf" -> "simpleExecutorConfValue",
239-
"executor conf with spaces" -> "executor conf with spaces value"),
240-
"executor-jvm-options.properties",
241-
"JVM options that should be set on the executors.")
242237
sparkConf.set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS,
243238
"-DsimpleDriverConf=simpleDriverConfValue" +
244239
" -Ddriverconfwithspaces='driver conf with spaces value'")
245-
sparkConf.set(SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS,
246-
"-DsimpleExecutorConf=simpleExecutorConfValue" +
247-
" -D\'executor conf with spaces\'=\'executor conf with spaces value\'")
248-
sparkConf.set("spark.files",
249-
Seq(driverJvmOptionsFile.getAbsolutePath, executorJvmOptionsFile.getAbsolutePath)
250-
.mkString(","))
240+
sparkConf.set("spark.files", driverJvmOptionsFile.getAbsolutePath)
251241
runSparkApplicationAndVerifyCompletion(
252242
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
253243
JAVA_OPTIONS_MAIN_CLASS,
254244
Seq(s"All expected JVM options were present on the driver and executors."),
255-
Array(driverJvmOptionsFile.getName, executorJvmOptionsFile.getName),
245+
Array(driverJvmOptionsFile.getName),
256246
Seq.empty[String])
257247
}
258248

0 commit comments

Comments
 (0)