Skip to content

Commit 6efa379

Browse files
committed
addresses comments from PR
1 parent ffe7891 commit 6efa379

File tree

14 files changed

+45
-43
lines changed

14 files changed

+45
-43
lines changed

docs/running-on-kubernetes.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -786,7 +786,7 @@ from the other deployment modes. See the [configuration page](configuration.html
786786
<td><code>spark.kubernetes.kerberos.enabled</code></td>
787787
<td>false</td>
788788
<td>
789-
Specify whether your job is a job that will require a Kerberos Authorization to access HDFS. By default, we
789+
Specify whether your job requires a Kerberos Authentication to access HDFS. By default, we
790790
will assume that you will not require secure HDFS access.
791791
</td>
792792
</tr>
@@ -820,12 +820,12 @@ from the other deployment modes. See the [configuration page](configuration.html
820820
</td>
821821
</tr>
822822
<tr>
823-
<td><code>spark.kubernetes.kerberos.tokensecret.label</code></td>
823+
<td><code>spark.kubernetes.kerberos.tokensecret.itemkey</code></td>
824824
<td>spark.kubernetes.kerberos.dt.label</td>
825825
<td>
826826
Assuming you have set <code>spark.kubernetes.kerberos.enabled</code> to be true. This will let you specify
827827
the label within the pre-specified secret where the data of your existing delegation token data is stored.
828-
We have a default value of <code>spark.kubernetes.kerberos.dt.label</code> should you not include it. But
828+
We have a default value of <code>spark.kubernetes.kerberos.tokensecret.itemkey</code> should you not include it. But
829829
you should always include this if you are proposing a pre-existing secret contain the delegation token data.
830830
<td><code>spark.executorEnv.[EnvironmentVariableName]</code></td>
831831
<td>(none)</td>

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/KerberosTokenConfBootstrap.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ private[spark] trait KerberosTokenBootstrapConf {
3636

3737
private[spark] class KerberosTokenConfBootstrapImpl(
3838
secretName: String,
39-
secretLabel: String,
39+
secretItemKey: String,
4040
userName: String) extends KerberosTokenBootstrapConf with Logging{
4141

4242

@@ -62,7 +62,7 @@ private[spark] class KerberosTokenConfBootstrapImpl(
6262
.endVolumeMount()
6363
.addNewEnv()
6464
.withName(ENV_HADOOP_TOKEN_FILE_LOCATION)
65-
.withValue(s"$SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR/$secretLabel")
65+
.withValue(s"$SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR/$secretItemKey")
6666
.endEnv()
6767
.addNewEnv()
6868
.withName(ENV_SPARK_USER)

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/config.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -570,9 +570,9 @@ package object config extends Logging {
570570
.stringConf
571571
.createOptional
572572

573-
private[spark] val KUBERNETES_KERBEROS_DT_SECRET_LABEL =
574-
ConfigBuilder("spark.kubernetes.kerberos.tokensecret.label")
575-
.doc("Specify the label of the data where " +
573+
private[spark] val KUBERNETES_KERBEROS_DT_SECRET_ITEM_KEY =
574+
ConfigBuilder("spark.kubernetes.kerberos.tokensecret.itemkey")
575+
.doc("Specify the item key of the data where " +
576576
" your existing delegation token is stored. This removes the need" +
577577
" for the job user to provide any keytab for launching a job")
578578
.stringConf

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,8 @@ package object constants {
113113
"spark.kubernetes.kerberos.dt"
114114
private[spark] val HADOOP_KERBEROS_CONF_SECRET =
115115
"spark.kubernetes.kerberos.secretname"
116-
private[spark] val HADOOP_KERBEROS_CONF_LABEL =
117-
"spark.kubernetes.kerberos.labelname"
116+
private[spark] val HADOOP_KERBEROS_CONF_ITEM_KEY =
117+
"spark.kubernetes.kerberos.itemkeyname"
118118
private[spark] val KERBEROS_SECRET_LABEL_PREFIX =
119119
"hadoop-tokens"
120120
private[spark] val SPARK_HADOOP_PREFIX = "spark.hadoop."

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/HadoopConfigBootstrapStep.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ private[spark] class HadoopConfigBootstrapStep(
4242
additionalDriverSparkConf = Map.empty[String, String],
4343
dtSecret = None,
4444
dtSecretName = HADOOP_KERBEROS_SECRET_NAME,
45-
dtSecretLabel = "")
45+
dtSecretItemKey = "")
4646
for (nextStep <- hadoopConfigurationSteps) {
4747
currentHadoopSpec = nextStep.configureContainers(currentHadoopSpec)
4848
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/hadoopsteps/HadoopConfigSpec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import io.fabric8.kubernetes.api.model.{Container, Pod, Secret}
3030
* pairs of (path, data)
3131
* - The secret containing a DT, either previously specified or built on the fly
3232
* - The name of the secret where the DT will be stored
33-
* - The label on the secret which correlates with where the current DT data is stored
33+
* - The data item-key on the secret which correlates with where the current DT data is stored
3434
*/
3535
private[spark] case class HadoopConfigSpec(
3636
additionalDriverSparkConf: Map[String, String],
@@ -39,4 +39,4 @@ private[spark] case class HadoopConfigSpec(
3939
configMapProperties: Map[String, String],
4040
dtSecret: Option[Secret],
4141
dtSecretName: String,
42-
dtSecretLabel: String)
42+
dtSecretItemKey: String)

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/hadoopsteps/HadoopKerberosKeytabResolverStep.scala

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -102,17 +102,18 @@ private[spark] class HadoopKerberosKeytabResolverStep(
102102
val data = serialize(credentials)
103103
val renewalTime = getTokenRenewalInterval(tokens, hadoopConf).getOrElse(Long.MaxValue)
104104
val currentTime: Long = System.currentTimeMillis()
105-
val initialTokenLabelName = s"$KERBEROS_SECRET_LABEL_PREFIX-$currentTime-$renewalTime"
105+
val initialTokenDataKeyName = s"$KERBEROS_SECRET_LABEL_PREFIX-$currentTime-$renewalTime"
106106
val secretDT =
107107
new SecretBuilder()
108108
.withNewMetadata()
109109
.withName(HADOOP_KERBEROS_SECRET_NAME)
110+
.withLabels(Map("refresh-hadoop-tokens" -> "yes").asJava)
110111
.endMetadata()
111-
.addToData(initialTokenLabelName, Base64.encodeBase64String(data))
112+
.addToData(initialTokenDataKeyName, Base64.encodeBase64String(data))
112113
.build()
113114
val bootstrapKerberos = new KerberosTokenConfBootstrapImpl(
114115
HADOOP_KERBEROS_SECRET_NAME,
115-
initialTokenLabelName,
116+
initialTokenDataKeyName,
116117
jobUserUGI.getShortUserName)
117118
val withKerberosEnvPod = bootstrapKerberos.bootstrapMainContainerAndVolumes(
118119
PodWithMainContainer(
@@ -121,13 +122,13 @@ private[spark] class HadoopKerberosKeytabResolverStep(
121122
hadoopConfigSpec.copy(
122123
additionalDriverSparkConf =
123124
hadoopConfigSpec.additionalDriverSparkConf ++ Map(
124-
HADOOP_KERBEROS_CONF_LABEL -> initialTokenLabelName,
125+
HADOOP_KERBEROS_CONF_ITEM_KEY -> initialTokenDataKeyName,
125126
HADOOP_KERBEROS_CONF_SECRET -> HADOOP_KERBEROS_SECRET_NAME),
126127
driverPod = withKerberosEnvPod.pod,
127128
driverContainer = withKerberosEnvPod.mainContainer,
128129
dtSecret = Some(secretDT),
129130
dtSecretName = HADOOP_KERBEROS_SECRET_NAME,
130-
dtSecretLabel = initialTokenLabelName)
131+
dtSecretItemKey = initialTokenDataKeyName)
131132
}
132133

133134
// Functions that should be in Core with Rebase to 2.3

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/hadoopsteps/HadoopKerberosSecretResolverStep.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,18 +25,18 @@ import org.apache.spark.deploy.kubernetes.constants._
2525
/**
2626
* This step assumes that you have already done all the heavy lifting in retrieving a
2727
* delegation token and storing the following data in a secret before running this job.
28-
* This step requires that you just specify the secret name and label corresponding to the
29-
* data where the delegation token is stored.
28+
* This step requires that you just specify the secret name and data item-key corresponding
29+
* to the data where the delegation token is stored.
3030
*/
3131
private[spark] class HadoopKerberosSecretResolverStep(
3232
submissionSparkConf: SparkConf,
3333
tokenSecretName: String,
34-
tokenLabelName: String) extends HadoopConfigurationStep {
34+
tokenItemKeyName: String) extends HadoopConfigurationStep {
3535

3636
override def configureContainers(hadoopConfigSpec: HadoopConfigSpec): HadoopConfigSpec = {
3737
val bootstrapKerberos = new KerberosTokenConfBootstrapImpl(
3838
tokenSecretName,
39-
tokenLabelName,
39+
tokenItemKeyName,
4040
UserGroupInformation.getCurrentUser.getShortUserName)
4141
val withKerberosEnvPod = bootstrapKerberos.bootstrapMainContainerAndVolumes(
4242
PodWithMainContainer(
@@ -47,10 +47,10 @@ private[spark] class HadoopKerberosSecretResolverStep(
4747
driverContainer = withKerberosEnvPod.mainContainer,
4848
additionalDriverSparkConf =
4949
hadoopConfigSpec.additionalDriverSparkConf ++ Map(
50-
HADOOP_KERBEROS_CONF_LABEL -> tokenLabelName,
50+
HADOOP_KERBEROS_CONF_ITEM_KEY -> tokenItemKeyName,
5151
HADOOP_KERBEROS_CONF_SECRET -> tokenSecretName),
5252
dtSecret = None,
5353
dtSecretName = tokenSecretName,
54-
dtSecretLabel = tokenLabelName)
54+
dtSecretItemKey = tokenItemKeyName)
5555
}
5656
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/hadoopsteps/HadoopStepsOrchestrator.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ private[spark] class HadoopStepsOrchestrator(
3636
private val maybeKeytab = submissionSparkConf.get(KUBERNETES_KERBEROS_KEYTAB)
3737
.map(k => new File(k))
3838
private val maybeExistingSecret = submissionSparkConf.get(KUBERNETES_KERBEROS_DT_SECRET_NAME)
39-
private val maybeExistingSecretLabel =
40-
submissionSparkConf.get(KUBERNETES_KERBEROS_DT_SECRET_LABEL)
39+
private val maybeExistingSecretItemKey =
40+
submissionSparkConf.get(KUBERNETES_KERBEROS_DT_SECRET_ITEM_KEY)
4141
private val hadoopConfigurationFiles = getHadoopConfFiles(hadoopConfDir)
4242
logInfo(s"Hadoop Conf directory: $hadoopConfDir")
4343

@@ -55,10 +55,10 @@ private[spark] class HadoopStepsOrchestrator(
5555

5656
OptionRequirements.requireBothOrNeitherDefined(
5757
maybeExistingSecret,
58-
maybeExistingSecretLabel,
58+
maybeExistingSecretItemKey,
5959
"If a secret storing a Kerberos Delegation Token is specified you must also" +
6060
" specify the label where the data is stored",
61-
"If a secret label where the data of the Kerberos Delegation Token is specified" +
61+
"If a secret data item-key where the data of the Kerberos Delegation Token is specified" +
6262
" you must also specify the name of the secret")
6363

6464
def getHadoopSteps(): Seq[HadoopConfigurationStep] = {
@@ -72,10 +72,10 @@ private[spark] class HadoopStepsOrchestrator(
7272
hadoopConfDir)
7373
val maybeKerberosStep =
7474
if (isKerberosEnabled) {
75-
maybeExistingSecret.map(secretLabel => Some(new HadoopKerberosSecretResolverStep(
75+
maybeExistingSecret.map(secretItemKey => Some(new HadoopKerberosSecretResolverStep(
7676
submissionSparkConf,
77-
secretLabel,
78-
maybeExistingSecretLabel.get))).getOrElse(Some(
77+
secretItemKey,
78+
maybeExistingSecretItemKey.get))).getOrElse(Some(
7979
new HadoopKerberosKeytabResolverStep(
8080
submissionSparkConf,
8181
maybePrincipal,

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterManager.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ private[spark] class KubernetesClusterManager extends ExternalClusterManager wit
4444
val maybeHadoopConfigMap = sparkConf.getOption(HADOOP_CONFIG_MAP_SPARK_CONF_NAME)
4545
val maybeHadoopConfDir = sparkConf.getOption(HADOOP_CONF_DIR_LOC)
4646
val maybeDTSecretName = sparkConf.getOption(HADOOP_KERBEROS_CONF_SECRET)
47-
val maybeDTLabelName = sparkConf.getOption(HADOOP_KERBEROS_CONF_LABEL)
47+
val maybeDTLabelName = sparkConf.getOption(HADOOP_KERBEROS_CONF_ITEM_KEY)
4848
val maybeInitContainerConfigMap = sparkConf.get(EXECUTOR_INIT_CONTAINER_CONFIG_MAP)
4949
val maybeInitContainerConfigMapKey = sparkConf.get(EXECUTOR_INIT_CONTAINER_CONFIG_MAP_KEY)
5050
val maybeSubmittedFilesSecret = sparkConf.get(EXECUTOR_SUBMITTED_SMALL_FILES_SECRET)
@@ -91,11 +91,11 @@ private[spark] class KubernetesClusterManager extends ExternalClusterManager wit
9191
}
9292
val kerberosBootstrap = for {
9393
secretName <- maybeDTSecretName
94-
secretLabel <- maybeDTLabelName
94+
secretItemKey <- maybeDTLabelName
9595
} yield {
9696
new KerberosTokenConfBootstrapImpl(
9797
secretName,
98-
secretLabel,
98+
secretItemKey,
9999
Utils.getCurrentUserName)
100100
}
101101
val mountSmallFilesBootstrap = for {

0 commit comments

Comments
 (0)