Skip to content

Commit 68abf77

Browse files
hehuiyuanHyukjinKwon
authored andcommitted
[SPARK-27184][CORE] Avoid hardcoded 'spark.jars', 'spark.files', 'spark.submit.pyFiles' and 'spark.submit.deployMode'
## What changes were proposed in this pull request? For [SPARK-27184](https://issues.apache.org/jira/browse/SPARK-27184) In the `org.apache.spark.internal.config`, we define the variables of `FILES` and `JARS`, we can use them instead of "spark.jars" and "spark.files". ```scala private[spark] val JARS = ConfigBuilder("spark.jars") .stringConf .toSequence .createWithDefault(Nil) ``` ```scala private[spark] val FILES = ConfigBuilder("spark.files") .stringConf .toSequence .createWithDefault(Nil) ``` Other : In the `org.apache.spark.internal.config`, we define the variables of `SUBMIT_PYTHON_FILES ` and `SUBMIT_DEPLOY_MODE `, we can use them instead of "spark.submit.pyFiles" and "spark.submit.deployMode". ```scala private[spark] val SUBMIT_PYTHON_FILES = ConfigBuilder("spark.submit.pyFiles") .stringConf .toSequence .createWithDefault(Nil) ``` ```scala private[spark] val SUBMIT_DEPLOY_MODE = ConfigBuilder("spark.submit.deployMode") .stringConf .createWithDefault("client") ``` Closes apache#24123 from hehuiyuan/hehuiyuan-patch-6. Authored-by: hehuiyuan <[email protected]> Signed-off-by: Hyukjin Kwon <[email protected]>
1 parent 8a9eb05 commit 68abf77

File tree

4 files changed

+9
-9
lines changed

4 files changed

+9
-9
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -394,7 +394,7 @@ class SparkContext(config: SparkConf) extends Logging {
394394
_conf.set(EXECUTOR_ID, SparkContext.DRIVER_IDENTIFIER)
395395

396396
_jars = Utils.getUserJars(_conf)
397-
_files = _conf.getOption("spark.files").map(_.split(",")).map(_.filter(_.nonEmpty))
397+
_files = _conf.getOption(FILES.key).map(_.split(",")).map(_.filter(_.nonEmpty))
398398
.toSeq.flatten
399399

400400
_eventLogDir =

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -542,10 +542,10 @@ private[spark] class SparkSubmit extends Logging {
542542
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
543543
confKey = CORES_MAX.key),
544544
OptionAssigner(args.files, LOCAL | STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
545-
confKey = "spark.files"),
546-
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = "spark.jars"),
545+
confKey = FILES.key),
546+
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = JARS.key),
547547
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
548-
confKey = "spark.jars"),
548+
confKey = JARS.key),
549549
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
550550
confKey = DRIVER_MEMORY.key),
551551
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -183,9 +183,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
183183
.orElse(sparkProperties.get(config.CORES_MAX.key))
184184
.orNull
185185
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
186-
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
187-
files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
188-
pyFiles = Option(pyFiles).orElse(sparkProperties.get("spark.submit.pyFiles")).orNull
186+
jars = Option(jars).orElse(sparkProperties.get(config.JARS.key)).orNull
187+
files = Option(files).orElse(sparkProperties.get(config.FILES.key)).orNull
188+
pyFiles = Option(pyFiles).orElse(sparkProperties.get(config.SUBMIT_PYTHON_FILES.key)).orNull
189189
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
190190
ivySettingsPath = sparkProperties.get("spark.jars.ivySettings")
191191
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
@@ -194,7 +194,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
194194
repositories = Option(repositories)
195195
.orElse(sparkProperties.get("spark.jars.repositories")).orNull
196196
deployMode = Option(deployMode)
197-
.orElse(sparkProperties.get("spark.submit.deployMode"))
197+
.orElse(sparkProperties.get(config.SUBMIT_DEPLOY_MODE.key))
198198
.orElse(env.get("DEPLOY_MODE"))
199199
.orNull
200200
numExecutors = Option(numExecutors)

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -681,7 +681,7 @@ class SparkSubmitSuite
681681
appArgs.jars should be(Utils.resolveURIs(jars))
682682
appArgs.files should be(Utils.resolveURIs(files))
683683
conf.get(JARS) should be(Utils.resolveURIs(jars + ",thejar.jar").split(",").toSeq)
684-
conf.get("spark.files") should be(Utils.resolveURIs(files))
684+
conf.get(FILES) should be(Utils.resolveURIs(files).split(",").toSeq)
685685

686686
// Test files and archives (Yarn)
687687
val clArgs2 = Seq(

0 commit comments

Comments
 (0)