Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 3ff2cbb

Browse files
authored
Spark Submit changes and test (#542)
* Spark Submit Unit tests * Improvements * Add missing options * Added check for jar
1 parent 8f73508 commit 3ff2cbb

File tree

2 files changed

+31
-5
lines changed

2 files changed

+31
-5
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -483,20 +483,20 @@ object SparkSubmit extends CommandLineUtils {
483483
sysProp = "spark.kubernetes.namespace"),
484484

485485
// Other options
486-
OptionAssigner(args.executorCores, STANDALONE | YARN, ALL_DEPLOY_MODES,
486+
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
487487
sysProp = "spark.executor.cores"),
488-
OptionAssigner(args.executorMemory, STANDALONE | MESOS | YARN, ALL_DEPLOY_MODES,
488+
OptionAssigner(args.executorMemory, STANDALONE | MESOS | YARN | KUBERNETES, ALL_DEPLOY_MODES,
489489
sysProp = "spark.executor.memory"),
490-
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS, ALL_DEPLOY_MODES,
490+
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
491491
sysProp = "spark.cores.max"),
492492
OptionAssigner(args.files, LOCAL | STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
493493
sysProp = "spark.files"),
494494
OptionAssigner(args.jars, LOCAL, CLIENT, sysProp = "spark.jars"),
495495
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
496496
sysProp = "spark.jars"),
497-
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN, CLUSTER,
497+
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
498498
sysProp = "spark.driver.memory"),
499-
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN, CLUSTER,
499+
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
500500
sysProp = "spark.driver.cores"),
501501
OptionAssigner(args.supervise.toString, STANDALONE | MESOS, CLUSTER,
502502
sysProp = "spark.driver.supervise"),

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -381,6 +381,32 @@ class SparkSubmitSuite
381381
sysProps("spark.ui.enabled") should be ("false")
382382
}
383383

384+
test("handles k8s cluster mode") {
385+
val clArgs = Seq(
386+
"--deploy-mode", "cluster",
387+
"--master", "k8s://h:p",
388+
"--executor-memory", "5g",
389+
"--class", "org.SomeClass",
390+
"--kubernetes-namespace", "foo",
391+
"--driver-memory", "4g",
392+
"--conf", "spark.kubernetes.driver.docker.image=bar",
393+
"/home/thejar.jar",
394+
"arg1")
395+
val appArgs = new SparkSubmitArguments(clArgs)
396+
val (childArgs, classpath, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
397+
398+
val childArgsMap = childArgs.grouped(2).map(a => a(0) -> a(1)).toMap
399+
childArgsMap.get("--primary-java-resource") should be (Some("file:/home/thejar.jar"))
400+
childArgsMap.get("--main-class") should be (Some("org.SomeClass"))
401+
childArgsMap.get("--arg") should be (Some("arg1"))
402+
mainClass should be ("org.apache.spark.deploy.k8s.submit.Client")
403+
classpath should have length (0)
404+
sysProps("spark.executor.memory") should be ("5g")
405+
sysProps("spark.driver.memory") should be ("4g")
406+
sysProps("spark.kubernetes.namespace") should be ("foo")
407+
sysProps("spark.kubernetes.driver.docker.image") should be ("bar")
408+
}
409+
384410
test("handles confs with flag equivalents") {
385411
val clArgs = Seq(
386412
"--deploy-mode", "cluster",

0 commit comments

Comments
 (0)