This repository was archived by the owner on Jan 9, 2020. It is now read-only.
File tree Expand file tree Collapse file tree 5 files changed +8
-8
lines changed
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s
submit/submitsteps/hadoopsteps Expand file tree Collapse file tree 5 files changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -784,7 +784,7 @@ from the other deployment modes. See the [configuration page](configuration.html
784
784
<td >(none)</td >
785
785
<td >
786
786
Assuming you have set <code>spark.kubernetes.kerberos.enabled</code> to be true. This will let you specify
787
- the principal that you wish to use to handle renewing of Delegation Tokens. This is optional as you
787
+ the principal that you wish to use to handle renewing of Delegation Tokens. This is optional as
788
788
we will set the principal to be the job users principal by default.
789
789
</td >
790
790
</tr >
Original file line number Diff line number Diff line change @@ -57,9 +57,9 @@ private[spark] class HadoopConfBootstrapImpl(
57
57
.editSpec()
58
58
.addNewVolume()
59
59
.withName(HADOOP_FILE_VOLUME )
60
- .withNewConfigMap()
61
- .withName(hadoopConfConfigMapName)
62
- .withItems(keyPaths.asJava)
60
+ .withNewConfigMap()
61
+ .withName(hadoopConfConfigMapName)
62
+ .withItems(keyPaths.asJava)
63
63
.endConfigMap()
64
64
.endVolume()
65
65
.endSpec()
Original file line number Diff line number Diff line change @@ -67,6 +67,7 @@ private[spark] class HadoopUGIUtil{
67
67
val byteStream = new ByteArrayOutputStream
68
68
val dataStream = new DataOutputStream (byteStream)
69
69
creds.writeTokenStorageToStream(dataStream)
70
+ dataStream.close()
70
71
byteStream.toByteArray
71
72
}
72
73
Original file line number Diff line number Diff line change @@ -81,7 +81,6 @@ package object constants {
81
81
private [spark] val ENV_SPARK_USER = " SPARK_USER"
82
82
83
83
// Bootstrapping dependencies with the init-container
84
- private [spark] val INIT_CONTAINER_ANNOTATION = " pod.beta.kubernetes.io/init-containers"
85
84
private [spark] val INIT_CONTAINER_SECRET_VOLUME_MOUNT_PATH =
86
85
" /mnt/secrets/spark-init"
87
86
private [spark] val INIT_CONTAINER_SUBMITTED_JARS_SECRET_KEY =
@@ -107,7 +106,7 @@ package object constants {
107
106
private [spark] val ENV_HADOOP_CONF_DIR = " HADOOP_CONF_DIR"
108
107
private [spark] val HADOOP_CONF_DIR_LOC = " spark.kubernetes.hadoop.conf.dir"
109
108
private [spark] val HADOOP_CONFIG_MAP_SPARK_CONF_NAME =
110
- " spark.kubernetes.hadoop.executor.hadoopconfigmapname "
109
+ " spark.kubernetes.hadoop.executor.hadoopConfigMapName "
111
110
112
111
// Kerberos Configuration
113
112
private [spark] val HADOOP_KERBEROS_SECRET_NAME =
Original file line number Diff line number Diff line change @@ -50,7 +50,7 @@ private[spark] class HadoopKerberosKeytabResolverStep(
50
50
maybePrincipal : Option [String ],
51
51
maybeKeytab : Option [File ],
52
52
maybeRenewerPrincipal : Option [String ],
53
- hadoopUGI : HadoopUGIUtil ) extends HadoopConfigurationStep with Logging {
53
+ hadoopUGI : HadoopUGIUtil ) extends HadoopConfigurationStep with Logging {
54
54
private var originalCredentials : Credentials = _
55
55
private var dfs : FileSystem = _
56
56
private var renewer : String = _
@@ -59,7 +59,7 @@ private[spark] class HadoopKerberosKeytabResolverStep(
59
59
60
60
override def configureContainers (hadoopConfigSpec : HadoopConfigSpec ): HadoopConfigSpec = {
61
61
val hadoopConf = SparkHadoopUtil .get.newConfiguration(submissionSparkConf)
62
- if (hadoopUGI.isSecurityEnabled) logDebug(" Hadoop not configured with Kerberos" )
62
+ if (! hadoopUGI.isSecurityEnabled) logDebug(" Hadoop not configured with Kerberos" )
63
63
val maybeJobUserUGI =
64
64
for {
65
65
principal <- maybePrincipal
You can’t perform that action at this time.
0 commit comments