Skip to content

Commit b321436

Browse files
committed
further testing work
1 parent 3fbf88c commit b321436

File tree

8 files changed

+206
-201
lines changed

8 files changed

+206
-201
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/KerberosTokenConfBootstrap.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,8 @@ import org.apache.spark.internal.Logging
2727
* mounted the DT secret for executors as well.
2828
*/
2929
private[spark] trait KerberosTokenBootstrapConf {
30-
/**
31-
* Bootstraps a main container with the Secret mounted as volumes and an ENV variable
32-
* pointing to the mounted file containing the DT for Secure HDFS interaction
33-
*/
30+
// Bootstraps a main container with the Secret mounted as volumes and an ENV variable
31+
// pointing to the mounted file containing the DT for Secure HDFS interaction
3432
def bootstrapMainContainerAndVolumes(
3533
originalPodWithMainContainer: PodWithMainContainer)
3634
: PodWithMainContainer

resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/HDFSTest.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ private[spark] object HDFSTest{
4040
val end = System.currentTimeMillis()
4141
println("Iteration " + iter + " took " + (end-start) + " ms")
4242
}
43+
println(s"Returned length(s) of: ${file.map(s => s.length).collect().mkString(",")}")
4344
// scalastyle:on println
4445
spark.stop()
4546
}

resource-managers/kubernetes/integration-tests/src/test/resources/hdfs-site.xml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,6 @@
104104
<name>dfs.namenode.datanode.registration.ip-hostname-check</name>
105105
<value>false</value>
106106
</property>
107-
108-
109107
<property>
110108
<name>dfs.datanode.data.dir.perm</name>
111109
<value>700</value>

resource-managers/kubernetes/integration-tests/src/test/resources/krb5.conf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ renew_lifetime = 7d
1212
forwardable = true
1313
rdns = false
1414
default_realm = CLUSTER.LOCAL
15-
# default_ccache_name = KEYRING:persistent:%{uid}
15+
# default_ccache_name = MEMORY
1616

1717
[realms]
1818
CLUSTER.LOCAL = {

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 177 additions & 177 deletions
Large diffs are not rendered by default.

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -87,16 +87,16 @@ private[spark] class SparkDockerImageBuilder
8787
if (exitCode != 0) {
8888
logInfo(s"exitCode: $exitCode")
8989
}
90-
// buildImage("spark-base", BASE_DOCKER_FILE)
91-
// buildImage("spark-driver", DRIVER_DOCKER_FILE)
92-
// buildImage("spark-driver-py", DRIVERPY_DOCKER_FILE)
93-
// buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
94-
// buildImage("spark-executor-py", EXECUTORPY_DOCKER_FILE)
95-
// buildImage("spark-shuffle", SHUFFLE_SERVICE_DOCKER_FILE)
96-
// buildImage("spark-resource-staging-server", STAGING_SERVER_DOCKER_FILE)
97-
// buildImage("spark-init", INIT_CONTAINER_DOCKER_FILE)
98-
// buildImage("spark-integration-test-asset-server", STATIC_ASSET_SERVER_DOCKER_FILE)
99-
// buildImage("kerberos-test", KERBEROS_DOCKER_FILE)
90+
buildImage("spark-base", BASE_DOCKER_FILE)
91+
buildImage("spark-driver", DRIVER_DOCKER_FILE)
92+
buildImage("spark-driver-py", DRIVERPY_DOCKER_FILE)
93+
buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
94+
buildImage("spark-executor-py", EXECUTORPY_DOCKER_FILE)
95+
buildImage("spark-shuffle", SHUFFLE_SERVICE_DOCKER_FILE)
96+
buildImage("spark-resource-staging-server", STAGING_SERVER_DOCKER_FILE)
97+
buildImage("spark-init", INIT_CONTAINER_DOCKER_FILE)
98+
buildImage("spark-integration-test-asset-server", STATIC_ASSET_SERVER_DOCKER_FILE)
99+
buildImage("kerberos-test", KERBEROS_DOCKER_FILE)
100100
}
101101

102102
private def buildImage(name: String, dockerFile: String): Unit = {

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/kerberos/KerberosPodWatcherCache.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@ import io.fabric8.kubernetes.client.Watcher.Action
2828
import org.apache.spark.internal.Logging
2929

3030
/**
31-
* This class if used to ensure that the Hadoop cluster that is launched is executed
31+
* This class is used to ensure that the Hadoop cluster that is launched is executed
3232
* in this order: KDC --> NN --> DN --> Data-Populator and that each one of these nodes
33-
* is running before launching the kerberos test.
33+
* is running before launching the Kerberos test.
3434
*/
3535
private[spark] class KerberosPodWatcherCache(
3636
kerberosUtils: KerberosUtils,
@@ -220,14 +220,12 @@ private[spark] class KerberosPodWatcherCache(
220220
while (!kdcIsUp) kdcRunning.await()
221221
while (!nnIsUp) nnRunning.await()
222222
while (!dnIsUp) dnRunning.await()
223-
while (!hasInLogs(dnName, "Computing capacity for map BlockMap")) {
223+
while (!hasInLogs(dnName, "Got finalize command for block pool")) {
224224
logInfo("Waiting on DN to be formatted")
225225
Thread.sleep(500)
226226
}
227-
Thread.sleep(2000)
228227
dpIsUp = true
229228
logInfo(s"data-populator has signaled")
230-
231229
try {
232230
dpRunning.signalAll()
233231
} finally {
@@ -245,6 +243,7 @@ private[spark] class KerberosPodWatcherCache(
245243
case _ if name.startsWith("data-populator") => "data-populator"
246244
}
247245
}
246+
248247
private def hasInLogs(name: String, expectation: String): Boolean = {
249248
kubernetesClient
250249
.pods()

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/kerberos/KerberosUtils.scala

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,16 +39,21 @@ private[spark] class KerberosUtils(
3939
def loadFromYaml(resource: String): FileInputStream =
4040
new FileInputStream(new File(yamlLocation(resource)))
4141
private val regex = "REPLACE_ME".r
42+
private val regexDP = "# default_ccache_name = MEMORY".r
43+
private val defaultCacheDP = "default_ccache_name = KRBCONF"
4244
private def locationResolver(loc: String) = s"src/test/resources/$loc"
4345
private val kerberosFiles = Seq("krb5.conf", "core-site.xml", "hdfs-site.xml")
4446
private val kerberosConfTupList =
4547
kerberosFiles.map { file =>
46-
(file, regex.replaceAllIn(readFileToString(new File(locationResolver(file))), namespace))}
48+
(file, regex.replaceAllIn(readFileToString(new File(locationResolver(file))), namespace))} ++
49+
Seq(("krb5-dp.conf", regexDP.replaceAllIn(regex.replaceAllIn(readFileToString(
50+
new File(locationResolver("krb5.conf"))), namespace), defaultCacheDP)))
4751
private val KRB_VOLUME = "krb5-conf"
4852
private val KRB_FILE_DIR = "/tmp"
4953
private val KRB_CONFIG_MAP_NAME = "krb-config-map"
5054
private val PV_LABELS = Map("job" -> "kerberostest")
51-
private val keyPaths: Seq[KeyToPath] = kerberosFiles.map(file =>
55+
private val keyPaths: Seq[KeyToPath] = (kerberosFiles ++ Seq("krb5-dp.conf"))
56+
.map(file =>
5257
new KeyToPathBuilder()
5358
.withKey(file)
5459
.withPath(file)
@@ -102,6 +107,10 @@ private[spark] class KerberosUtils(
102107
.withName("TMP_KRB_LOC")
103108
.withValue(s"$KRB_FILE_DIR/${kerberosFiles.head}")
104109
.endEnv()
110+
.addNewEnv()
111+
.withName("TMP_KRB_DP_LOC")
112+
.withValue(s"$KRB_FILE_DIR/krb5-dp.conf")
113+
.endEnv()
105114
.addNewEnv()
106115
.withName("TMP_CORE_LOC")
107116
.withValue(s"$KRB_FILE_DIR/${kerberosFiles(1)}")

0 commit comments

Comments
 (0)