Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit fb3636b

Browse files
foxishMarcelo Vanzin
authored andcommitted
[SPARK-22807][SCHEDULER] Remove config that says docker and replace with container
## What changes were proposed in this pull request? Changes discussed in apache#19946 (comment) docker -> container, since with CRI, we are not limited to running only docker images. ## How was this patch tested? Manual testing Author: foxish <[email protected]> Closes apache#19995 from foxish/make-docker-container.
1 parent 7f6d10a commit fb3636b

File tree

8 files changed

+32
-33
lines changed

8 files changed

+32
-33
lines changed

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -396,7 +396,7 @@ class SparkSubmitSuite
396396
"--class", "org.SomeClass",
397397
"--driver-memory", "4g",
398398
"--conf", "spark.kubernetes.namespace=spark",
399-
"--conf", "spark.kubernetes.driver.docker.image=bar",
399+
"--conf", "spark.kubernetes.driver.container.image=bar",
400400
"/home/thejar.jar",
401401
"arg1")
402402
val appArgs = new SparkSubmitArguments(clArgs)
@@ -412,7 +412,7 @@ class SparkSubmitSuite
412412
conf.get("spark.executor.memory") should be ("5g")
413413
conf.get("spark.driver.memory") should be ("4g")
414414
conf.get("spark.kubernetes.namespace") should be ("spark")
415-
conf.get("spark.kubernetes.driver.docker.image") should be ("bar")
415+
conf.get("spark.kubernetes.driver.container.image") should be ("bar")
416416
}
417417

418418
test("handles confs with flag equivalents") {

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,21 +30,20 @@ private[spark] object Config extends Logging {
3030
.stringConf
3131
.createWithDefault("default")
3232

33-
val DRIVER_DOCKER_IMAGE =
34-
ConfigBuilder("spark.kubernetes.driver.docker.image")
35-
.doc("Docker image to use for the driver. Specify this using the standard Docker tag format.")
33+
val DRIVER_CONTAINER_IMAGE =
34+
ConfigBuilder("spark.kubernetes.driver.container.image")
35+
.doc("Container image to use for the driver.")
3636
.stringConf
3737
.createOptional
3838

39-
val EXECUTOR_DOCKER_IMAGE =
40-
ConfigBuilder("spark.kubernetes.executor.docker.image")
41-
.doc("Docker image to use for the executors. Specify this using the standard Docker tag " +
42-
"format.")
39+
val EXECUTOR_CONTAINER_IMAGE =
40+
ConfigBuilder("spark.kubernetes.executor.container.image")
41+
.doc("Container image to use for the executors.")
4342
.stringConf
4443
.createOptional
4544

46-
val DOCKER_IMAGE_PULL_POLICY =
47-
ConfigBuilder("spark.kubernetes.docker.image.pullPolicy")
45+
val CONTAINER_IMAGE_PULL_POLICY =
46+
ConfigBuilder("spark.kubernetes.container.image.pullPolicy")
4847
.doc("Kubernetes image pull policy. Valid values are Always, Never, and IfNotPresent.")
4948
.stringConf
5049
.checkValues(Set("Always", "Never", "IfNotPresent"))

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
4949
s"$appName-$uuid".toLowerCase.replaceAll("\\.", "-")
5050
}
5151

52-
private val dockerImagePullPolicy = submissionSparkConf.get(DOCKER_IMAGE_PULL_POLICY)
52+
private val imagePullPolicy = submissionSparkConf.get(CONTAINER_IMAGE_PULL_POLICY)
5353
private val jarsDownloadPath = submissionSparkConf.get(JARS_DOWNLOAD_LOCATION)
5454
private val filesDownloadPath = submissionSparkConf.get(FILES_DOWNLOAD_LOCATION)
5555

@@ -72,7 +72,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
7272
kubernetesAppId,
7373
kubernetesResourceNamePrefix,
7474
allDriverLabels,
75-
dockerImagePullPolicy,
75+
imagePullPolicy,
7676
appName,
7777
mainClass,
7878
appArgs,

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStep.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ private[spark] class BaseDriverConfigurationStep(
3434
kubernetesAppId: String,
3535
kubernetesResourceNamePrefix: String,
3636
driverLabels: Map[String, String],
37-
dockerImagePullPolicy: String,
37+
imagePullPolicy: String,
3838
appName: String,
3939
mainClass: String,
4040
appArgs: Array[String],
@@ -46,9 +46,9 @@ private[spark] class BaseDriverConfigurationStep(
4646
private val driverExtraClasspath = submissionSparkConf.get(
4747
DRIVER_CLASS_PATH)
4848

49-
private val driverDockerImage = submissionSparkConf
50-
.get(DRIVER_DOCKER_IMAGE)
51-
.getOrElse(throw new SparkException("Must specify the driver Docker image"))
49+
private val driverContainerImage = submissionSparkConf
50+
.get(DRIVER_CONTAINER_IMAGE)
51+
.getOrElse(throw new SparkException("Must specify the driver container image"))
5252

5353
// CPU settings
5454
private val driverCpuCores = submissionSparkConf.getOption("spark.driver.cores").getOrElse("1")
@@ -110,8 +110,8 @@ private[spark] class BaseDriverConfigurationStep(
110110

111111
val driverContainer = new ContainerBuilder(driverSpec.driverContainer)
112112
.withName(DRIVER_CONTAINER_NAME)
113-
.withImage(driverDockerImage)
114-
.withImagePullPolicy(dockerImagePullPolicy)
113+
.withImage(driverContainerImage)
114+
.withImagePullPolicy(imagePullPolicy)
115115
.addAllToEnv(driverCustomEnvs.asJava)
116116
.addToEnv(driverExtraClasspathEnv.toSeq: _*)
117117
.addNewEnv()

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,10 @@ private[spark] class ExecutorPodFactoryImpl(sparkConf: SparkConf)
7272
sparkConf,
7373
KUBERNETES_NODE_SELECTOR_PREFIX)
7474

75-
private val executorDockerImage = sparkConf
76-
.get(EXECUTOR_DOCKER_IMAGE)
77-
.getOrElse(throw new SparkException("Must specify the executor Docker image"))
78-
private val dockerImagePullPolicy = sparkConf.get(DOCKER_IMAGE_PULL_POLICY)
75+
private val executorContainerImage = sparkConf
76+
.get(EXECUTOR_CONTAINER_IMAGE)
77+
.getOrElse(throw new SparkException("Must specify the executor container image"))
78+
private val imagePullPolicy = sparkConf.get(CONTAINER_IMAGE_PULL_POLICY)
7979
private val blockManagerPort = sparkConf
8080
.getInt("spark.blockmanager.port", DEFAULT_BLOCKMANAGER_PORT)
8181

@@ -166,8 +166,8 @@ private[spark] class ExecutorPodFactoryImpl(sparkConf: SparkConf)
166166

167167
val executorContainer = new ContainerBuilder()
168168
.withName("executor")
169-
.withImage(executorDockerImage)
170-
.withImagePullPolicy(dockerImagePullPolicy)
169+
.withImage(executorContainerImage)
170+
.withImagePullPolicy(imagePullPolicy)
171171
.withNewResources()
172172
.addToRequests("memory", executorMemoryQuantity)
173173
.addToLimits("memory", executorMemoryLimitQuantity)

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
package org.apache.spark.deploy.k8s.submit
1818

1919
import org.apache.spark.{SparkConf, SparkFunSuite}
20-
import org.apache.spark.deploy.k8s.Config.DRIVER_DOCKER_IMAGE
20+
import org.apache.spark.deploy.k8s.Config.DRIVER_CONTAINER_IMAGE
2121
import org.apache.spark.deploy.k8s.submit.steps._
2222

2323
class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
@@ -32,7 +32,7 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
3232

3333
test("Base submission steps with a main app resource.") {
3434
val sparkConf = new SparkConf(false)
35-
.set(DRIVER_DOCKER_IMAGE, DRIVER_IMAGE)
35+
.set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
3636
val mainAppResource = JavaMainAppResource("local:///var/apps/jars/main.jar")
3737
val orchestrator = new DriverConfigurationStepsOrchestrator(
3838
NAMESPACE,
@@ -54,7 +54,7 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
5454

5555
test("Base submission steps without a main app resource.") {
5656
val sparkConf = new SparkConf(false)
57-
.set(DRIVER_DOCKER_IMAGE, DRIVER_IMAGE)
57+
.set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
5858
val orchestrator = new DriverConfigurationStepsOrchestrator(
5959
NAMESPACE,
6060
APP_ID,

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStepSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
3030
private val APP_ID = "spark-app-id"
3131
private val RESOURCE_NAME_PREFIX = "spark"
3232
private val DRIVER_LABELS = Map("labelkey" -> "labelvalue")
33-
private val DOCKER_IMAGE_PULL_POLICY = "IfNotPresent"
33+
private val CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"
3434
private val APP_NAME = "spark-test"
3535
private val MAIN_CLASS = "org.apache.spark.examples.SparkPi"
3636
private val APP_ARGS = Array("arg1", "arg2", "arg 3")
@@ -47,7 +47,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
4747
.set(KUBERNETES_DRIVER_LIMIT_CORES, "4")
4848
.set(org.apache.spark.internal.config.DRIVER_MEMORY.key, "256M")
4949
.set(org.apache.spark.internal.config.DRIVER_MEMORY_OVERHEAD, 200L)
50-
.set(DRIVER_DOCKER_IMAGE, "spark-driver:latest")
50+
.set(DRIVER_CONTAINER_IMAGE, "spark-driver:latest")
5151
.set(s"$KUBERNETES_DRIVER_ANNOTATION_PREFIX$CUSTOM_ANNOTATION_KEY", CUSTOM_ANNOTATION_VALUE)
5252
.set(s"$KUBERNETES_DRIVER_ENV_KEY$DRIVER_CUSTOM_ENV_KEY1", "customDriverEnv1")
5353
.set(s"$KUBERNETES_DRIVER_ENV_KEY$DRIVER_CUSTOM_ENV_KEY2", "customDriverEnv2")
@@ -56,7 +56,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
5656
APP_ID,
5757
RESOURCE_NAME_PREFIX,
5858
DRIVER_LABELS,
59-
DOCKER_IMAGE_PULL_POLICY,
59+
CONTAINER_IMAGE_PULL_POLICY,
6060
APP_NAME,
6161
MAIN_CLASS,
6262
APP_ARGS,
@@ -71,7 +71,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
7171

7272
assert(preparedDriverSpec.driverContainer.getName === DRIVER_CONTAINER_NAME)
7373
assert(preparedDriverSpec.driverContainer.getImage === "spark-driver:latest")
74-
assert(preparedDriverSpec.driverContainer.getImagePullPolicy === DOCKER_IMAGE_PULL_POLICY)
74+
assert(preparedDriverSpec.driverContainer.getImagePullPolicy === CONTAINER_IMAGE_PULL_POLICY)
7575

7676
assert(preparedDriverSpec.driverContainer.getEnv.size === 7)
7777
val envs = preparedDriverSpec.driverContainer

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ class ExecutorPodFactorySuite extends SparkFunSuite with BeforeAndAfter with Bef
5050
baseConf = new SparkConf()
5151
.set(KUBERNETES_DRIVER_POD_NAME, driverPodName)
5252
.set(KUBERNETES_EXECUTOR_POD_NAME_PREFIX, executorPrefix)
53-
.set(EXECUTOR_DOCKER_IMAGE, executorImage)
53+
.set(EXECUTOR_CONTAINER_IMAGE, executorImage)
5454
}
5555

5656
test("basic executor pod has reasonable defaults") {

0 commit comments

Comments
 (0)