Skip to content

Commit 3ec521f

Browse files
liyinan926foxish
authored andcommitted
Added some coverage for executors and test cases for secret mounting and init-containers (#15)
* Added some coverage for executors and test cases for secret mounting * Addressed comments * Added a test case of SparkPi with an argument * Added test cases that trigger the use of the init-container * Addressed comments * Fixed the test using a master url without a scheme * Updated e2e/runner.sh
1 parent 1e5b290 commit 3ec521f

File tree

5 files changed

+174
-34
lines changed

5 files changed

+174
-34
lines changed

e2e/runner.sh

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,7 @@ $root/spark/build/mvn clean -Ddownload.plugin.skip=true integration-test \
112112
-Dspark-distro-tgz=$root/spark/*.tgz \
113113
-DextraScalaTestArgs="-Dspark.kubernetes.test.master=k8s://$MASTER \
114114
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$tag \
115-
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$tag" || :
115+
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$tag \
116+
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$tag" || :
116117

117118
echo "TEST SUITE FINISHED"

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala

Lines changed: 162 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import java.util.regex.Pattern
2424
import scala.collection.JavaConverters._
2525

2626
import com.google.common.io.PatternFilenameFilter
27-
import io.fabric8.kubernetes.api.model.Pod
27+
import io.fabric8.kubernetes.api.model.{Container, Pod}
2828
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
2929
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
3030
import org.scalatest.time.{Minutes, Seconds, Span}
@@ -52,6 +52,7 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
5252
before {
5353
sparkAppConf = kubernetesTestComponents.newSparkAppConf()
5454
.set("spark.kubernetes.driver.label.spark-app-locator", APP_LOCATOR_LABEL)
55+
.set("spark.kubernetes.executor.label.spark-app-locator", APP_LOCATOR_LABEL)
5556
kubernetesTestComponents.createNamespace()
5657
}
5758

@@ -70,10 +71,25 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
7071

7172
test("Run SparkPi with a master URL without a scheme.") {
7273
val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
73-
sparkAppConf.set("spark.master", s"k8s://${url.getHost}:${url.getPort}")
74+
val k8sMasterUrl = if (url.getPort < 0) {
75+
s"k8s://${url.getHost}"
76+
} else {
77+
s"k8s://${url.getHost}:${url.getPort}"
78+
}
79+
sparkAppConf.set("spark.master", k8sMasterUrl)
7480
runSparkPiAndVerifyCompletion()
7581
}
7682

83+
test("Run SparkPi with an argument.") {
84+
runSparkPiAndVerifyCompletion(appArgs = Array("5"))
85+
}
86+
87+
test("Run SparkPi using the remote example jar.") {
88+
sparkAppConf.set("spark.kubernetes.initContainer.image",
89+
System.getProperty("spark.docker.test.initContainerImage", "spark-init:latest"))
90+
runSparkPiAndVerifyCompletion(appResource = REMOTE_EXAMPLES_JAR_URI)
91+
}
92+
7793
test("Run SparkPi with custom driver pod name, labels, annotations, and environment variables.") {
7894
sparkAppConf
7995
.set("spark.kubernetes.driver.pod.name", "spark-integration-spark-pi")
@@ -83,56 +99,109 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
8399
.set("spark.kubernetes.driver.annotation.annotation2", "annotation2-value")
84100
.set("spark.kubernetes.driverEnv.ENV1", "VALUE1")
85101
.set("spark.kubernetes.driverEnv.ENV2", "VALUE2")
86-
runSparkPiAndVerifyCompletion(driverPodChecker = (driverPod: Pod) => {
87-
doBasicDriverPodCheck(driverPod)
88-
assert(driverPod.getMetadata.getName === "spark-integration-spark-pi")
89-
90-
assert(driverPod.getMetadata.getLabels.get("label1") === "label1-value")
91-
assert(driverPod.getMetadata.getLabels.get("label2") === "label2-value")
92-
assert(driverPod.getMetadata.getAnnotations.get("annotation1") === "annotation1-value")
93-
assert(driverPod.getMetadata.getAnnotations.get("annotation2") === "annotation2-value")
94-
95-
val driverContainer = driverPod.getSpec.getContainers.get(0)
96-
val envVars = driverContainer
97-
.getEnv
98-
.asScala
99-
.map { env =>
100-
(env.getName, env.getValue)
101-
}
102-
.toMap
103-
assert(envVars("ENV1") === "VALUE1")
104-
assert(envVars("ENV2") === "VALUE2")
105-
})
102+
.set("spark.kubernetes.executor.label.label1", "label1-value")
103+
.set("spark.kubernetes.executor.label.label2", "label2-value")
104+
.set("spark.kubernetes.executor.annotation.annotation1", "annotation1-value")
105+
.set("spark.kubernetes.executor.annotation.annotation2", "annotation2-value")
106+
.set("spark.executorEnv.ENV1", "VALUE1")
107+
.set("spark.executorEnv.ENV2", "VALUE2")
108+
109+
runSparkPiAndVerifyCompletion(
110+
driverPodChecker = (driverPod: Pod) => {
111+
doBasicDriverPodCheck(driverPod)
112+
assert(driverPod.getMetadata.getName === "spark-integration-spark-pi")
113+
checkCustomSettings(driverPod)
114+
},
115+
executorPodChecker = (executorPod: Pod) => {
116+
doBasicExecutorPodCheck(executorPod)
117+
checkCustomSettings(executorPod)
118+
})
119+
}
120+
121+
test("Run SparkPi with a test secret mounted into the driver and executor pods") {
122+
createTestSecret()
123+
sparkAppConf
124+
.set(s"spark.kubernetes.driver.secrets.$TEST_SECRET_NAME", TEST_SECRET_MOUNT_PATH)
125+
.set(s"spark.kubernetes.executor.secrets.$TEST_SECRET_NAME", TEST_SECRET_MOUNT_PATH)
126+
runSparkPiAndVerifyCompletion(
127+
driverPodChecker = (driverPod: Pod) => {
128+
doBasicDriverPodCheck(driverPod)
129+
checkTestSecret(driverPod)
130+
},
131+
executorPodChecker = (executorPod: Pod) => {
132+
doBasicExecutorPodCheck(executorPod)
133+
checkTestSecret(executorPod)
134+
})
135+
}
136+
137+
test("Run SparkPi using the remote example jar with a test secret mounted into the driver and " +
138+
"executor pods") {
139+
sparkAppConf
140+
.set(s"spark.kubernetes.driver.secrets.$TEST_SECRET_NAME", TEST_SECRET_MOUNT_PATH)
141+
.set(s"spark.kubernetes.executor.secrets.$TEST_SECRET_NAME", TEST_SECRET_MOUNT_PATH)
142+
sparkAppConf.set("spark.kubernetes.initContainer.image",
143+
System.getProperty("spark.docker.test.initContainerImage", "spark-init:latest"))
144+
145+
createTestSecret()
146+
147+
runSparkPiAndVerifyCompletion(
148+
appResource = REMOTE_EXAMPLES_JAR_URI,
149+
driverPodChecker = (driverPod: Pod) => {
150+
doBasicDriverPodCheck(driverPod)
151+
checkTestSecret(driverPod, withInitContainer = true)
152+
},
153+
executorPodChecker = (executorPod: Pod) => {
154+
doBasicExecutorPodCheck(executorPod)
155+
checkTestSecret(executorPod, withInitContainer = true)
156+
})
106157
}
107158

108159
private def runSparkPiAndVerifyCompletion(
109160
appResource: String = CONTAINER_LOCAL_SPARK_DISTRO_EXAMPLES_JAR,
110-
driverPodChecker: Pod => Unit = doBasicDriverPodCheck): Unit = {
161+
driverPodChecker: Pod => Unit = doBasicDriverPodCheck,
162+
executorPodChecker: Pod => Unit = doBasicExecutorPodCheck,
163+
appArgs: Array[String] = Array.empty[String]): Unit = {
111164
runSparkApplicationAndVerifyCompletion(
112165
appResource,
113166
SPARK_PI_MAIN_CLASS,
114167
Seq("Pi is roughly 3"),
115-
Array.empty[String],
116-
driverPodChecker)
168+
appArgs,
169+
driverPodChecker,
170+
executorPodChecker)
117171
}
118172

119173
private def runSparkApplicationAndVerifyCompletion(
120174
appResource: String,
121175
mainClass: String,
122176
expectedLogOnCompletion: Seq[String],
123177
appArgs: Array[String],
124-
driverPodChecker: Pod => Unit): Unit = {
178+
driverPodChecker: Pod => Unit,
179+
executorPodChecker: Pod => Unit): Unit = {
125180
val appArguments = SparkAppArguments(
126181
mainAppResource = appResource,
127-
mainClass = mainClass)
182+
mainClass = mainClass,
183+
appArgs = appArgs)
128184
SparkAppLauncher.launch(appArguments, sparkAppConf, TIMEOUT.value.toSeconds.toInt)
185+
129186
val driverPod = kubernetesTestComponents.kubernetesClient
130187
.pods()
131188
.withLabel("spark-app-locator", APP_LOCATOR_LABEL)
189+
.withLabel("spark-role", "driver")
132190
.list()
133191
.getItems
134192
.get(0)
135193
driverPodChecker(driverPod)
194+
195+
val executorPods = kubernetesTestComponents.kubernetesClient
196+
.pods()
197+
.withLabel("spark-app-locator", APP_LOCATOR_LABEL)
198+
.withLabel("spark-role", "executor")
199+
.list()
200+
.getItems
201+
executorPods.asScala.foreach { pod =>
202+
executorPodChecker(pod)
203+
}
204+
136205
Eventually.eventually(TIMEOUT, INTERVAL) {
137206
expectedLogOnCompletion.foreach { e =>
138207
assert(kubernetesTestComponents.kubernetesClient
@@ -145,7 +214,64 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
145214
}
146215

147216
private def doBasicDriverPodCheck(driverPod: Pod): Unit = {
148-
assert(driverPod.getMetadata.getLabels.get("spark-role") === "driver")
217+
assert(driverPod.getSpec.getContainers.get(0).getImage === "spark-driver:latest")
218+
assert(driverPod.getSpec.getContainers.get(0).getName === "spark-kubernetes-driver")
219+
}
220+
221+
private def doBasicExecutorPodCheck(executorPod: Pod): Unit = {
222+
assert(executorPod.getSpec.getContainers.get(0).getImage === "spark-executor:latest")
223+
assert(executorPod.getSpec.getContainers.get(0).getName === "executor")
224+
}
225+
226+
private def checkCustomSettings(pod: Pod): Unit = {
227+
assert(pod.getMetadata.getLabels.get("label1") === "label1-value")
228+
assert(pod.getMetadata.getLabels.get("label2") === "label2-value")
229+
assert(pod.getMetadata.getAnnotations.get("annotation1") === "annotation1-value")
230+
assert(pod.getMetadata.getAnnotations.get("annotation2") === "annotation2-value")
231+
232+
val container = pod.getSpec.getContainers.get(0)
233+
val envVars = container
234+
.getEnv
235+
.asScala
236+
.map { env =>
237+
(env.getName, env.getValue)
238+
}
239+
.toMap
240+
assert(envVars("ENV1") === "VALUE1")
241+
assert(envVars("ENV2") === "VALUE2")
242+
}
243+
244+
private def createTestSecret(): Unit = {
245+
testBackend.getKubernetesClient.secrets
246+
.createNew()
247+
.editOrNewMetadata()
248+
.withName(TEST_SECRET_NAME)
249+
.withNamespace(kubernetesTestComponents.namespace)
250+
.endMetadata()
251+
.addToStringData(TEST_SECRET_KEY, TEST_SECRET_VALUE)
252+
.done()
253+
}
254+
255+
private def checkTestSecret(pod: Pod, withInitContainer: Boolean = false): Unit = {
256+
val testSecretVolume = pod.getSpec.getVolumes.asScala.filter { volume =>
257+
volume.getName == s"$TEST_SECRET_NAME-volume"
258+
}
259+
assert(testSecretVolume.size === 1)
260+
assert(testSecretVolume.head.getSecret.getSecretName === TEST_SECRET_NAME)
261+
262+
checkTestSecretInContainer(pod.getSpec.getContainers.get(0))
263+
264+
if (withInitContainer) {
265+
checkTestSecretInContainer(pod.getSpec.getInitContainers.get(0))
266+
}
267+
}
268+
269+
private def checkTestSecretInContainer(container: Container): Unit = {
270+
val testSecret = container.getVolumeMounts.asScala.filter { mount =>
271+
mount.getName == s"$TEST_SECRET_NAME-volume"
272+
}
273+
assert(testSecret.size === 1)
274+
assert(testSecret.head.getMountPath === TEST_SECRET_MOUNT_PATH)
149275
}
150276
}
151277

@@ -161,5 +287,13 @@ private[spark] object KubernetesSuite {
161287
s"${SPARK_DISTRO_EXAMPLES_JAR_FILE.getName}"
162288
val SPARK_PI_MAIN_CLASS: String = "org.apache.spark.examples.SparkPi"
163289

290+
val TEST_SECRET_NAME = "test-secret"
291+
val TEST_SECRET_KEY = "test-key"
292+
val TEST_SECRET_VALUE = "test-data"
293+
val TEST_SECRET_MOUNT_PATH = "/etc/secrets"
294+
295+
val REMOTE_EXAMPLES_JAR_URI =
296+
"https://storage.googleapis.com/spark-k8s-integration-tests/jars/spark-examples_2.11-2.3.0.jar"
297+
164298
case object ShuffleNotReadyException extends Exception
165299
}

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ private[spark] class KubernetesTestComponents(defaultClient: DefaultKubernetesCl
4747
val namespaceList = defaultClient
4848
.namespaces()
4949
.list()
50-
.getItems()
50+
.getItems
5151
.asScala
5252
require(!namespaceList.exists(_.getMetadata.getName == namespace))
5353
}
@@ -91,7 +91,8 @@ private[spark] class SparkAppConf {
9191

9292
private[spark] case class SparkAppArguments(
9393
mainAppResource: String,
94-
mainClass: String)
94+
mainClass: String,
95+
appArgs: Array[String])
9596

9697
private[spark] object SparkAppLauncher extends Logging {
9798

@@ -104,7 +105,9 @@ private[spark] object SparkAppLauncher extends Logging {
104105
"--deploy-mode", "cluster",
105106
"--class", appArguments.mainClass,
106107
"--master", appConf.get("spark.master")
107-
) ++ appConf.toStringArray :+ appArguments.mainAppResource
108+
) ++ appConf.toStringArray :+
109+
appArguments.mainAppResource :+
110+
appArguments.appArgs.mkString(" ")
108111
logInfo(s"Launching a spark app with command line: ${commandLine.mkString(" ")}")
109112
ProcessUtils.executeProcess(commandLine, timeoutSecs)
110113
}

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/IntegrationTestBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.deploy.k8s.integrationtest.backend.minikube.MinikubeTest
2525
private[spark] trait IntegrationTestBackend {
2626
def name(): String
2727
def initialize(): Unit
28-
def getKubernetesClient(): DefaultKubernetesClient
28+
def getKubernetesClient: DefaultKubernetesClient
2929
def cleanUp(): Unit = {}
3030
}
3131

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,15 @@ import org.apache.spark.deploy.k8s.integrationtest.constants.SPARK_DISTRO_PATH
2828
import org.apache.spark.deploy.k8s.integrationtest.Logging
2929

3030
private[spark] class SparkDockerImageBuilder
31-
(private val dockerEnv: Map[String, String]) extends Logging{
31+
(private val dockerEnv: Map[String, String]) extends Logging {
3232

3333
private val DOCKER_BUILD_PATH = SPARK_DISTRO_PATH
3434
// Dockerfile paths must be relative to the build path.
3535
private val DOCKERFILES_DIR = "kubernetes/dockerfiles/"
3636
private val BASE_DOCKER_FILE = DOCKERFILES_DIR + "spark-base/Dockerfile"
3737
private val DRIVER_DOCKER_FILE = DOCKERFILES_DIR + "driver/Dockerfile"
3838
private val EXECUTOR_DOCKER_FILE = DOCKERFILES_DIR + "executor/Dockerfile"
39+
private val INIT_CONTAINER_DOCKER_FILE = DOCKERFILES_DIR + "init-container/Dockerfile"
3940
private val TIMEOUT = PatienceConfiguration.Timeout(Span(2, Minutes))
4041
private val INTERVAL = PatienceConfiguration.Interval(Span(2, Seconds))
4142
private val dockerHost = dockerEnv.getOrElse("DOCKER_HOST",
@@ -64,6 +65,7 @@ private[spark] class SparkDockerImageBuilder
6465
buildImage("spark-base", BASE_DOCKER_FILE)
6566
buildImage("spark-driver", DRIVER_DOCKER_FILE)
6667
buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
68+
buildImage("spark-init", INIT_CONTAINER_DOCKER_FILE)
6769
}
6870

6971
private def buildImage(name: String, dockerFile: String): Unit = {

0 commit comments

Comments
 (0)