Skip to content

Commit d9b7b50

Browse files
committed
cleanup and various small fixes
1 parent 8dacb19 commit d9b7b50

File tree

10 files changed

+36
-111
lines changed

10 files changed

+36
-111
lines changed

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/KerberosTokenConfBootstrapSuite.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,16 +16,13 @@
1616
*/
1717
package org.apache.spark.deploy.kubernetes.submit
1818

19-
import java.io.File
20-
import java.util.UUID
21-
2219
import scala.collection.JavaConverters._
23-
import com.google.common.io.Files
20+
2421
import io.fabric8.kubernetes.api.model._
22+
2523
import org.apache.spark.SparkFunSuite
26-
import org.apache.spark.deploy.kubernetes.{HadoopConfBootstrapImpl, KerberosTokenConfBootstrapImpl, PodWithMainContainer}
24+
import org.apache.spark.deploy.kubernetes.{KerberosTokenConfBootstrapImpl, PodWithMainContainer}
2725
import org.apache.spark.deploy.kubernetes.constants._
28-
import org.apache.spark.util.Utils
2926

3027

3128
private[spark] class KerberosTokenConfBootstrapSuite extends SparkFunSuite {

resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/kerberos-test/Dockerfile

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,5 +24,4 @@ FROM spark-base
2424
RUN apk add --no-cache --update krb5 krb5-libs
2525
COPY examples /opt/spark/examples
2626
COPY test-scripts/test-env.sh /opt/spark/
27-
COPY test-scripts/test-env2.sh /opt/spark/
2827
COPY hconf /opt/spark/hconf

resource-managers/kubernetes/integration-tests/kerberos-yml/namenode-hadoop-pv.yml

Lines changed: 0 additions & 14 deletions
This file was deleted.

resource-managers/kubernetes/integration-tests/kerberos-yml/server-keytab-pv.yml

Lines changed: 0 additions & 14 deletions
This file was deleted.

resource-managers/kubernetes/integration-tests/kerberos-yml/test-env2.sh

Lines changed: 0 additions & 25 deletions
This file was deleted.

resource-managers/kubernetes/integration-tests/pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,6 @@
253253
<directory>kerberos-yml</directory>
254254
<includes>
255255
<include>test-env.sh</include>
256-
<include>test-env2.sh</include>
257256
</includes>
258257
</resource>
259258
</resources>

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -128,39 +128,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
128128
}
129129
}
130130
}
131-
// test("Secure HDFS test with kinit") {
132-
// assume(testBackend.name == MINIKUBE_TEST_BACKEND)
133-
// launchKerberizedCluster()
134-
// createKerberosTestPod(
135-
// CONTAINER_LOCAL_MAIN_APP_RESOURCE,
136-
// HDFS_TEST_CLASS,
137-
// APP_LOCATOR_LABEL,
138-
// "kerberos-yml/kerberos-test2.yml")
139-
// val kubernetesClient = kubernetesTestComponents.kubernetesClient
140-
// val driverWatcherCache = new KerberosDriverWatcherCache(
141-
// kubernetesClient,
142-
// Map("spark-app-locator" -> APP_LOCATOR_LABEL))
143-
// driverWatcherCache.start()
144-
// driverWatcherCache.stop()
145-
// val expectedLogOnCompletion = Seq(
146-
// "Returned length(s) of: 1",
147-
// "File contents: [This is an awesome word count file]")
148-
// val driverPod = kubernetesClient
149-
// .pods()
150-
// .withLabel("spark-app-locator", APP_LOCATOR_LABEL)
151-
// .list()
152-
// .getItems
153-
// .get(0)
154-
// Eventually.eventually(TIMEOUT, INTERVAL) {
155-
// expectedLogOnCompletion.foreach { e =>
156-
// assert(kubernetesClient
157-
// .pods()
158-
// .withName(driverPod.getMetadata.getName)
159-
// .getLog
160-
// .contains(e), "The application did not complete.")
161-
// }
162-
// }
163-
// }
164131

165132
test("Run PySpark Job on file from SUBMITTER with --py-files") {
166133
assume(testBackend.name == MINIKUBE_TEST_BACKEND)

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -87,16 +87,16 @@ private[spark] class SparkDockerImageBuilder
8787
if (exitCode != 0) {
8888
logInfo(s"exitCode: $exitCode")
8989
}
90-
buildImage("spark-base", BASE_DOCKER_FILE)
91-
buildImage("spark-driver", DRIVER_DOCKER_FILE)
92-
buildImage("spark-driver-py", DRIVERPY_DOCKER_FILE)
93-
buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
94-
buildImage("spark-executor-py", EXECUTORPY_DOCKER_FILE)
95-
buildImage("spark-shuffle", SHUFFLE_SERVICE_DOCKER_FILE)
96-
buildImage("spark-resource-staging-server", STAGING_SERVER_DOCKER_FILE)
97-
buildImage("spark-init", INIT_CONTAINER_DOCKER_FILE)
98-
buildImage("spark-integration-test-asset-server", STATIC_ASSET_SERVER_DOCKER_FILE)
99-
buildImage("kerberos-test", KERBEROS_DOCKER_FILE)
90+
// buildImage("spark-base", BASE_DOCKER_FILE)
91+
// buildImage("spark-driver", DRIVER_DOCKER_FILE)
92+
// buildImage("spark-driver-py", DRIVERPY_DOCKER_FILE)
93+
// buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
94+
// buildImage("spark-executor-py", EXECUTORPY_DOCKER_FILE)
95+
// buildImage("spark-shuffle", SHUFFLE_SERVICE_DOCKER_FILE)
96+
// buildImage("spark-resource-staging-server", STAGING_SERVER_DOCKER_FILE)
97+
// buildImage("spark-init", INIT_CONTAINER_DOCKER_FILE)
98+
// buildImage("spark-integration-test-asset-server", STATIC_ASSET_SERVER_DOCKER_FILE)
99+
buildImage("kerberos-test", KERBEROS_DOCKER_FILE)
100100
}
101101

102102
private def buildImage(name: String, dockerFile: String): Unit = {

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/kerberos/KerberosUtils.scala

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,19 +58,35 @@ private[spark] class KerberosUtils(
5858
.withKey(file)
5959
.withPath(file)
6060
.build()).toList
61-
private val pvNN = Seq("namenode-hadoop", "namenode-hadoop-pv")
62-
private val pvKT = Seq("server-keytab", "server-keytab-pv")
63-
private def buildKerberosPV(seqPair: Seq[String]) = {
61+
private def createPVTemplate(name: String, pathType: String) : PersistentVolume =
62+
new PersistentVolumeBuilder()
63+
.withNewMetadata()
64+
.withName(name)
65+
.withLabels(Map(
66+
"type" -> "local",
67+
"job" -> "kerberostest").asJava)
68+
.endMetadata()
69+
.withNewSpec()
70+
.withCapacity(Map("storage" -> new Quantity("1Gi")).asJava)
71+
.withAccessModes("ReadWriteOnce")
72+
.withHostPath(
73+
new HostPathVolumeSource(s"/tmp/$namespace/$pathType"))
74+
.endSpec()
75+
.build()
76+
private val pvNN = "nn-hadoop"
77+
private val pvKT = "server-keytab"
78+
private val persistentVolumeMap: Map[String, PersistentVolume] = Map(
79+
pvNN -> createPVTemplate(pvNN, "nn"),
80+
pvKT -> createPVTemplate(pvKT, "keytab"))
81+
private def buildKerberosPV(pvType: String) = {
6482
KerberosStorage(
65-
kubernetesClient.load(loadFromYaml(seqPair.head))
83+
kubernetesClient.load(loadFromYaml(pvType))
6684
.get().get(0).asInstanceOf[PersistentVolumeClaim],
67-
kubernetesClient.load(loadFromYaml(seqPair(1)))
68-
.get().get(0).asInstanceOf[PersistentVolume])
85+
persistentVolumeMap(pvType))
6986
}
7087
def getNNStorage: KerberosStorage = buildKerberosPV(pvNN)
7188
def getKTStorage: KerberosStorage = buildKerberosPV(pvKT)
7289
def getLabels: Map[String, String] = PV_LABELS
73-
def getPVNN: Seq[String] = pvNN
7490
def getKeyPaths: Seq[KeyToPath] = keyPaths
7591
def getConfigMap: ConfigMap = new ConfigMapBuilder()
7692
.withNewMetadata()

0 commit comments

Comments
 (0)