Skip to content

Commit 8dacb19

Browse files
committed
completed unit tests w/o UGI mocking
1 parent 7a0b4e4 commit 8dacb19

File tree

4 files changed

+272
-30
lines changed

4 files changed

+272
-30
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.deploy.kubernetes.submit
18+
19+
import java.io.File
20+
import java.util.UUID
21+
22+
import scala.collection.JavaConverters._
23+
24+
import com.google.common.io.Files
25+
import io.fabric8.kubernetes.api.model._
26+
27+
import org.apache.spark.SparkFunSuite
28+
import org.apache.spark.deploy.kubernetes.{HadoopConfBootstrapImpl, PodWithMainContainer}
29+
import org.apache.spark.deploy.kubernetes.constants._
30+
import org.apache.spark.util.Utils
31+
32+
33+
private[spark] class HadoopConfBootstrapSuite extends SparkFunSuite {
34+
private val CONFIG_MAP_NAME = "config-map"
35+
private val TEMP_HADOOP_FILE = createTempFile("core-site.xml")
36+
private val HADOOP_FILES = Seq(TEMP_HADOOP_FILE)
37+
38+
test("Test of bootstrapping hadoop_conf_dir files") {
39+
val hadoopConfStep = new HadoopConfBootstrapImpl(
40+
CONFIG_MAP_NAME,
41+
HADOOP_FILES)
42+
val expectedKeyPaths = Seq(
43+
new KeyToPathBuilder()
44+
.withKey(TEMP_HADOOP_FILE.toPath.getFileName.toString)
45+
.withPath(TEMP_HADOOP_FILE.toPath.getFileName.toString)
46+
.build())
47+
val expectedPod = new PodBuilder()
48+
.editOrNewSpec()
49+
.addNewVolume()
50+
.withName(HADOOP_FILE_VOLUME)
51+
.withNewConfigMap()
52+
.withName(CONFIG_MAP_NAME)
53+
.withItems(expectedKeyPaths.asJava)
54+
.endConfigMap()
55+
.endVolume()
56+
.endSpec()
57+
.build()
58+
val podWithMain = PodWithMainContainer(
59+
new PodBuilder().withNewSpec().endSpec().build(),
60+
new Container())
61+
val returnedPodContainer = hadoopConfStep.bootstrapMainContainerAndVolumes(podWithMain)
62+
assert(expectedPod === returnedPodContainer.pod)
63+
assert(returnedPodContainer.mainContainer.getVolumeMounts.asScala.map(vm =>
64+
(vm.getName, vm.getMountPath)).head === (HADOOP_FILE_VOLUME, HADOOP_CONF_DIR_PATH))
65+
assert(returnedPodContainer.mainContainer.getEnv.asScala.head ===
66+
new EnvVarBuilder().withName(ENV_HADOOP_CONF_DIR).withValue(HADOOP_CONF_DIR_PATH).build())
67+
assert(returnedPodContainer.mainContainer.getEnv.asScala(1).getName === ENV_SPARK_USER)
68+
}
69+
private def createTempFile(contents: String): File = {
70+
val dir = Utils.createTempDir()
71+
val file = new File(dir, s"${UUID.randomUUID().toString}")
72+
Files.write(contents.getBytes, file)
73+
file
74+
}
75+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.deploy.kubernetes.submit
18+
19+
import java.io.File
20+
import java.util.UUID
21+
22+
import scala.collection.JavaConverters._
23+
import com.google.common.io.Files
24+
import io.fabric8.kubernetes.api.model._
25+
import org.apache.spark.SparkFunSuite
26+
import org.apache.spark.deploy.kubernetes.{HadoopConfBootstrapImpl, KerberosTokenConfBootstrapImpl, PodWithMainContainer}
27+
import org.apache.spark.deploy.kubernetes.constants._
28+
import org.apache.spark.util.Utils
29+
30+
31+
private[spark] class KerberosTokenConfBootstrapSuite extends SparkFunSuite {
32+
private val SECRET_NAME = "dtSecret"
33+
private val SECRET_LABEL = "dtLabel"
34+
private val TEST_SPARK_USER = "hdfs"
35+
36+
test("Test of bootstrapping kerberos secrets and env") {
37+
val kerberosConfStep = new KerberosTokenConfBootstrapImpl(
38+
SECRET_NAME,
39+
SECRET_LABEL,
40+
TEST_SPARK_USER)
41+
val expectedPod = new PodBuilder()
42+
.editOrNewSpec()
43+
.addNewVolume()
44+
.withName(SPARK_APP_HADOOP_SECRET_VOLUME_NAME)
45+
.withNewSecret()
46+
.withSecretName(SECRET_NAME)
47+
.endSecret()
48+
.endVolume()
49+
.endSpec()
50+
.build()
51+
val podWithMain = PodWithMainContainer(
52+
new PodBuilder().withNewSpec().endSpec().build(),
53+
new Container())
54+
val returnedPodContainer = kerberosConfStep.bootstrapMainContainerAndVolumes(podWithMain)
55+
assert(expectedPod === returnedPodContainer.pod)
56+
assert(returnedPodContainer.mainContainer.getVolumeMounts.asScala.map(vm =>
57+
(vm.getName, vm.getMountPath)).head ===
58+
(SPARK_APP_HADOOP_SECRET_VOLUME_NAME, SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR))
59+
assert(returnedPodContainer.mainContainer.getEnv.asScala.head.getName ===
60+
ENV_HADOOP_TOKEN_FILE_LOCATION)
61+
assert(returnedPodContainer.mainContainer.getEnv.asScala(1).getName === ENV_SPARK_USER)
62+
}
63+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.deploy.kubernetes.submit.submitsteps
18+
19+
import scala.collection.JavaConverters._
20+
21+
import io.fabric8.kubernetes.api.model._
22+
import org.mockito.{Mock, MockitoAnnotations}
23+
import org.mockito.Matchers.any
24+
import org.mockito.Mockito.when
25+
import org.scalatest.BeforeAndAfter
26+
27+
import org.apache.spark.{SparkConf, SparkFunSuite}
28+
import org.apache.spark.deploy.kubernetes.constants._
29+
import org.apache.spark.deploy.kubernetes.submit.submitsteps.hadoopsteps.{HadoopConfigSpec, HadoopConfigurationStep}
30+
31+
32+
private[spark] class HadoopConfigBootstrapStepSuite extends SparkFunSuite with BeforeAndAfter{
33+
private val CONFIG_MAP_NAME = "config-map"
34+
private val POD_LABEL = Map("bootstrap" -> "true")
35+
private val DRIVER_CONTAINER_NAME = "driver-container"
36+
private val EXPECTED_SECRET = new SecretBuilder()
37+
.withNewMetadata()
38+
.withName(HADOOP_KERBEROS_SECRET_NAME)
39+
.endMetadata()
40+
.addToData("data", "secretata")
41+
.build()
42+
43+
@Mock
44+
private var hadoopConfigStep : HadoopConfigurationStep = _
45+
46+
before {
47+
MockitoAnnotations.initMocks(this)
48+
when(hadoopConfigStep.configureContainers(any[HadoopConfigSpec])).thenReturn(
49+
HadoopConfigSpec(
50+
configMapProperties = Map("data" -> "dataBytesToString"),
51+
driverPod = new PodBuilder()
52+
.withNewMetadata()
53+
.addToLabels("bootstrap", "true")
54+
.endMetadata()
55+
.withNewSpec().endSpec()
56+
.build(),
57+
driverContainer = new ContainerBuilder().withName(DRIVER_CONTAINER_NAME).build(),
58+
additionalDriverSparkConf = Map("sparkConf" -> "confValue"),
59+
dtSecret =
60+
Some(EXPECTED_SECRET),
61+
dtSecretName = HADOOP_KERBEROS_SECRET_NAME,
62+
dtSecretLabel = ""))
63+
}
64+
65+
test("Test modification of driverSpec with Hadoop Steps") {
66+
val hadoopConfStep = new HadoopConfigBootstrapStep(
67+
Seq(hadoopConfigStep),
68+
CONFIG_MAP_NAME)
69+
val expectedDriverSparkConf = new SparkConf(true)
70+
.set(HADOOP_CONFIG_MAP_SPARK_CONF_NAME, CONFIG_MAP_NAME)
71+
.set("sparkConf", "confValue")
72+
val expectedConfigMap = new ConfigMapBuilder()
73+
.withNewMetadata()
74+
.withName(CONFIG_MAP_NAME)
75+
.endMetadata()
76+
.addToData(Map("data" -> "dataBytesToString").asJava)
77+
.build()
78+
val expectedResources = Seq(expectedConfigMap, EXPECTED_SECRET)
79+
val driverSpec = KubernetesDriverSpec(
80+
driverPod = new Pod(),
81+
driverContainer = new Container(),
82+
driverSparkConf = new SparkConf(true),
83+
otherKubernetesResources = Seq.empty[HasMetadata])
84+
val returnContainerSpec = hadoopConfStep.configureDriver(driverSpec)
85+
assert(expectedDriverSparkConf.getAll === returnContainerSpec.driverSparkConf.getAll)
86+
assert(returnContainerSpec.driverContainer.getName == DRIVER_CONTAINER_NAME)
87+
assert(returnContainerSpec.driverPod.getMetadata.getLabels.asScala === POD_LABEL)
88+
assert(returnContainerSpec.otherKubernetesResources === expectedResources)
89+
}
90+
}

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/hadoopsteps/HadoopStepsOrchestratorSuite.scala

Lines changed: 44 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -38,35 +38,49 @@ private[spark] class HadoopStepsOrchestratorSuite extends SparkFunSuite {
3838
assert(true)
3939
}
4040

41-
// test("Testing with Keytab Kerberos Login") {
42-
// val sparkTestConf2 = new SparkConf(true)
43-
// .set(KUBERNETES_KERBEROS_SUPPORT, true)
44-
// .set(KUBERNETES_KERBEROS_KEYTAB, "keytab.file")
45-
// .set(KUBERNETES_KERBEROS_PRINCIPAL, "user@kerberos")
46-
// val hadoopOrchestrator = new HadoopStepsOrchestrator(
47-
// NAMESPACE,
48-
// HADOOP_CONFIG_MAP,
49-
// sparkTestConf2,
50-
// HADOOP_CONF_DIR_VAL)
51-
// val steps = hadoopOrchestrator.getHadoopSteps()
52-
// assert(steps.length === 2)
53-
// assert(steps.head.isInstanceOf[HadoopConfMounterStep])
54-
// assert(steps(1).isInstanceOf[HadoopKerberosKeytabResolverStep])
55-
// }
41+
test("Testing with Keytab Kerberos Login") {
42+
val sparkTestConf = new SparkConf(true)
43+
.set(KUBERNETES_KERBEROS_SUPPORT, true)
44+
.set(KUBERNETES_KERBEROS_KEYTAB, "keytab.file")
45+
.set(KUBERNETES_KERBEROS_PRINCIPAL, "user@kerberos")
46+
val hadoopOrchestrator = new HadoopStepsOrchestrator(
47+
NAMESPACE,
48+
HADOOP_CONFIG_MAP,
49+
sparkTestConf,
50+
HADOOP_CONF_DIR_VAL)
51+
val steps = hadoopOrchestrator.getHadoopSteps()
52+
assert(steps.length === 2)
53+
assert(steps.head.isInstanceOf[HadoopConfMounterStep])
54+
assert(steps(1).isInstanceOf[HadoopKerberosKeytabResolverStep])
55+
}
56+
57+
test("Testing with kinit Kerberos Login") {
58+
val sparkTestConf = new SparkConf(true)
59+
.set(KUBERNETES_KERBEROS_SUPPORT, true)
60+
val hadoopOrchestrator = new HadoopStepsOrchestrator(
61+
NAMESPACE,
62+
HADOOP_CONFIG_MAP,
63+
sparkTestConf,
64+
HADOOP_CONF_DIR_VAL)
65+
val steps = hadoopOrchestrator.getHadoopSteps()
66+
assert(steps.length === 2)
67+
assert(steps.head.isInstanceOf[HadoopConfMounterStep])
68+
assert(steps(1).isInstanceOf[HadoopKerberosKeytabResolverStep])
69+
}
5670

57-
// test("Testing with Keytab Kerberos Login") {
58-
// val sparkTestConf3 = new SparkConf(true)
59-
// .set(KUBERNETES_KERBEROS_SUPPORT, true)
60-
// .set(KUBERNETES_KERBEROS_DT_SECRET_NAME, "dtSecret")
61-
// .set(KUBERNETES_KERBEROS_DT_SECRET_LABEL, "dtLabel")
62-
// val hadoopOrchestrator = new HadoopStepsOrchestrator(
63-
// NAMESPACE,
64-
// HADOOP_CONFIG_MAP,
65-
// sparkTestConf3,
66-
// HADOOP_CONF_DIR_VAL)
67-
// val steps = hadoopOrchestrator.getHadoopSteps()
68-
// assert(steps.length === 2)
69-
// assert(steps.head.isInstanceOf[HadoopConfMounterStep])
70-
// assert(steps(1).isInstanceOf[HadoopKerberosSecretResolverStep])
71-
// }
71+
test("Testing with Secret stored Kerberos") {
72+
val sparkTestConf = new SparkConf(true)
73+
.set(KUBERNETES_KERBEROS_SUPPORT, true)
74+
.set(KUBERNETES_KERBEROS_DT_SECRET_NAME, "dtSecret")
75+
.set(KUBERNETES_KERBEROS_DT_SECRET_LABEL, "dtLabel")
76+
val hadoopOrchestrator = new HadoopStepsOrchestrator(
77+
NAMESPACE,
78+
HADOOP_CONFIG_MAP,
79+
sparkTestConf,
80+
HADOOP_CONF_DIR_VAL)
81+
val steps = hadoopOrchestrator.getHadoopSteps()
82+
assert(steps.length === 2)
83+
assert(steps.head.isInstanceOf[HadoopConfMounterStep])
84+
assert(steps(1).isInstanceOf[HadoopKerberosSecretResolverStep])
85+
}
7286
}

0 commit comments

Comments
 (0)