Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 48533ff

Browse files
committed
Mount a hadoop secret in the driver pod
1 parent beb1361 commit 48533ff

File tree

4 files changed

+73
-0
lines changed

4 files changed

+73
-0
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/config.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,8 @@ package object config extends Logging {
6969
private[spark] val CLIENT_CERT_FILE_CONF_SUFFIX = "clientCertFile"
7070
private[spark] val CA_CERT_FILE_CONF_SUFFIX = "caCertFile"
7171

72+
private[spark] val MOUNTED_HADOOP_SECRET_CONF = "spark.kubernetes.mounted.hadoopSecret"
73+
7274
private[spark] val RESOURCE_STAGING_SERVER_USE_SERVICE_ACCOUNT_CREDENTIALS =
7375
ConfigBuilder(
7476
s"$APISERVER_AUTH_RESOURCE_STAGING_SERVER_CONF_PREFIX.useServiceAccountCredentials")

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,13 @@ package object constants {
4343
s"$DRIVER_CREDENTIALS_SECRETS_BASE_DIR/$DRIVER_CREDENTIALS_OAUTH_TOKEN_SECRET_NAME"
4444
private[spark] val DRIVER_CREDENTIALS_SECRET_VOLUME_NAME = "kubernetes-credentials"
4545

46+
// Hadoop credentials secrets for the Spark app.
47+
private[spark] val SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR = "/mnt/secrets/hadoop-credentials"
48+
private[spark] val SPARK_APP_HADOOP_TOKEN_FILE_SECRET_NAME = "hadoop-token-file"
49+
private[spark] val SPARK_APP_HADOOP_TOKEN_FILE_PATH =
50+
s"$SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR/$SPARK_APP_HADOOP_TOKEN_FILE_SECRET_NAME"
51+
private[spark] val SPARK_APP_HADOOP_SECRET_VOLUME_NAME = "hadoop-secret"
52+
4653
// Default and fixed ports
4754
private[spark] val SUBMISSION_SERVER_PORT = 7077
4855
private[spark] val DEFAULT_DRIVER_PORT = 7078
@@ -69,6 +76,7 @@ package object constants {
6976
private[spark] val ENV_MOUNTED_FILES_DIR = "SPARK_MOUNTED_FILES_DIR"
7077
private[spark] val ENV_PYSPARK_FILES = "PYSPARK_FILES"
7178
private[spark] val ENV_PYSPARK_PRIMARY = "PYSPARK_PRIMARY"
79+
private[spark] val ENV_HADOOP_TOKEN_FILE_LOCATION = "HADOOP_TOKEN_FILE_LOCATION"
7280

7381
// Bootstrapping dependencies with the init-container
7482
private[spark] val INIT_CONTAINER_ANNOTATION = "pod.beta.kubernetes.io/init-containers"

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/DriverConfigurationStepsOrchestrator.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
9494
submissionSparkConf)
9595
val kubernetesCredentialsStep = new DriverKubernetesCredentialsStep(
9696
submissionSparkConf, kubernetesResourceNamePrefix)
97+
val hadoopTokensStep = new DriverHadoopTokensStep(submissionSparkConf)
9798
val pythonStep = mainAppResource match {
9899
case PythonMainAppResource(mainPyResource) =>
99100
Option(new PythonStep(mainPyResource, additionalPythonFiles, filesDownloadPath))
@@ -131,6 +132,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
131132
Seq(
132133
initialSubmissionStep,
133134
kubernetesCredentialsStep,
135+
hadoopTokensStep,
134136
dependencyResolutionStep) ++
135137
initContainerBootstrapStep.toSeq ++
136138
pythonStep.toSeq
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.deploy.kubernetes.submit.submitsteps
18+
19+
import io.fabric8.kubernetes.api.model.{ContainerBuilder, PodBuilder}
20+
21+
import org.apache.spark.SparkConf
22+
import org.apache.spark.deploy.kubernetes.config._
23+
import org.apache.spark.deploy.kubernetes.constants._
24+
25+
26+
class DriverHadoopTokensStep(submissionSparkConf: SparkConf) extends DriverConfigurationStep {
27+
28+
private val maybeMountedHadoopSecret = submissionSparkConf.getOption(MOUNTED_HADOOP_SECRET_CONF)
29+
30+
override def configureDriver(driverSpec: KubernetesDriverSpec): KubernetesDriverSpec = {
31+
val driverPodWithMountedHadoopTokens = maybeMountedHadoopSecret.map { secret =>
32+
new PodBuilder(driverSpec.driverPod)
33+
.editOrNewSpec()
34+
.addNewVolume()
35+
.withName(SPARK_APP_HADOOP_SECRET_VOLUME_NAME)
36+
.withNewSecret()
37+
.withSecretName(secret)
38+
.endSecret()
39+
.endVolume()
40+
.endSpec()
41+
.build()
42+
}.getOrElse(driverSpec.driverPod)
43+
val driverContainerWithMountedSecretVolume = maybeMountedHadoopSecret.map { secret =>
44+
new ContainerBuilder(driverSpec.driverContainer)
45+
.addNewVolumeMount()
46+
.withName(SPARK_APP_HADOOP_SECRET_VOLUME_NAME)
47+
.withMountPath(SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR)
48+
.endVolumeMount()
49+
.addNewEnv()
50+
.withName(ENV_HADOOP_TOKEN_FILE_LOCATION)
51+
.withValue(SPARK_APP_HADOOP_TOKEN_FILE_PATH)
52+
.endEnv()
53+
.build()
54+
}.getOrElse(driverSpec.driverContainer)
55+
driverSpec.copy(
56+
driverPod = driverPodWithMountedHadoopTokens,
57+
otherKubernetesResources = driverSpec.otherKubernetesResources,
58+
driverSparkConf = driverSpec.driverSparkConf,
59+
driverContainer = driverContainerWithMountedSecretVolume)
60+
}
61+
}

0 commit comments

Comments
 (0)