@@ -133,7 +133,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
133
133
System .getProperty(" spark.docker.test.driverImage" , " spark-driver-py:latest" ))
134
134
.set(EXECUTOR_DOCKER_IMAGE ,
135
135
System .getProperty(" spark.docker.test.executorImage" , " spark-executor-py:latest" ))
136
- .set(KUBERNETES_KERBEROS_SUPPORT , false )
136
+ .set(KERBEROS_CONF , " simple " )
137
137
138
138
runPySparkPiAndVerifyCompletion(
139
139
PYSPARK_PI_SUBMITTER_LOCAL_FILE_LOCATION ,
@@ -150,7 +150,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
150
150
System .getProperty(" spark.docker.test.driverImage" , " spark-driver-py:latest" ))
151
151
.set(EXECUTOR_DOCKER_IMAGE ,
152
152
System .getProperty(" spark.docker.test.executorImage" , " spark-executor-py:latest" ))
153
- .set(KUBERNETES_KERBEROS_SUPPORT , false )
153
+ .set(KERBEROS_CONF , " simple " )
154
154
155
155
runPySparkPiAndVerifyCompletion(PYSPARK_PI_CONTAINER_LOCAL_FILE_LOCATION , Seq .empty[String ])
156
156
}
@@ -178,7 +178,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
178
178
.set(" spark.ssl.kubernetes.resourceStagingServer.keyStorePassword" , " keyStore" )
179
179
.set(" spark.ssl.kubernetes.resourceStagingServer.keyPassword" , " key" )
180
180
.set(" spark.ssl.kubernetes.resourceStagingServer.trustStorePassword" , " trustStore" )
181
- .set(KUBERNETES_KERBEROS_SUPPORT , false )
181
+ .set(KERBEROS_CONF , " simple " )
182
182
launchStagingServer(SSLOptions (
183
183
enabled = true ,
184
184
keyStore = Some (keyStoreAndTrustStore.keyStore),
@@ -209,7 +209,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
209
209
sparkConf.set(" spark.kubernetes.shuffle.labels" , " app=spark-shuffle-service" )
210
210
sparkConf.set(" spark.kubernetes.shuffle.namespace" , kubernetesTestComponents.namespace)
211
211
sparkConf.set(" spark.app.name" , " group-by-test" )
212
- sparkConf.set(KUBERNETES_KERBEROS_SUPPORT , false )
212
+ sparkConf.set(KERBEROS_CONF , " simple " )
213
213
runSparkApplicationAndVerifyCompletion(
214
214
JavaMainAppResource (SUBMITTER_LOCAL_MAIN_APP_RESOURCE ),
215
215
GROUP_BY_MAIN_CLASS ,
@@ -225,7 +225,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
225
225
sparkConf.setJars(Seq (
226
226
s " $assetServerUri/ ${EXAMPLES_JAR_FILE .getName}" ,
227
227
s " $assetServerUri/ ${HELPER_JAR_FILE .getName}"
228
- )).set(KUBERNETES_KERBEROS_SUPPORT , false )
228
+ )).set(KERBEROS_CONF , " simple " )
229
229
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
230
230
}
231
231
@@ -235,7 +235,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
235
235
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
236
236
sparkConf.setJars(Seq (
237
237
SUBMITTER_LOCAL_MAIN_APP_RESOURCE , s " $assetServerUri/ ${HELPER_JAR_FILE .getName}"
238
- )).set(KUBERNETES_KERBEROS_SUPPORT , false )
238
+ )).set(KERBEROS_CONF , " simple " )
239
239
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
240
240
}
241
241
@@ -248,7 +248,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
248
248
sparkConf.set(RESOURCE_STAGING_SERVER_SSL_ENABLED , true )
249
249
.set(
250
250
RESOURCE_STAGING_SERVER_CLIENT_CERT_PEM .key, keyAndCertificate.certPem.getAbsolutePath)
251
- .set(KUBERNETES_KERBEROS_SUPPORT , false )
251
+ .set(KERBEROS_CONF , " simple " )
252
252
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE )
253
253
}
254
254
@@ -266,7 +266,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
266
266
sparkConf.set(
267
267
s " $APISERVER_AUTH_DRIVER_CONF_PREFIX. $CA_CERT_FILE_CONF_SUFFIX" ,
268
268
kubernetesTestComponents.clientConfig.getCaCertFile)
269
- sparkConf.set(KUBERNETES_KERBEROS_SUPPORT , false )
269
+ sparkConf.set(KERBEROS_CONF , " simple " )
270
270
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
271
271
}
272
272
@@ -276,7 +276,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
276
276
val testExistenceFile = new File (testExistenceFileTempDir, " input.txt" )
277
277
Files .write(TEST_EXISTENCE_FILE_CONTENTS , testExistenceFile, Charsets .UTF_8 )
278
278
launchStagingServer(SSLOptions (), None )
279
- sparkConf.set(" spark.files" , testExistenceFile.getAbsolutePath).set(KUBERNETES_KERBEROS_SUPPORT , false )
279
+ sparkConf.set(" spark.files" , testExistenceFile.getAbsolutePath).set(KERBEROS_CONF , " simple " )
280
280
runSparkApplicationAndVerifyCompletion(
281
281
JavaMainAppResource (SUBMITTER_LOCAL_MAIN_APP_RESOURCE ),
282
282
FILE_EXISTENCE_MAIN_CLASS ,
@@ -290,7 +290,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
290
290
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
291
291
292
292
sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH )).setAppName(" long" * 40 )
293
- .set(KUBERNETES_KERBEROS_SUPPORT , false )
293
+ .set(KERBEROS_CONF , " simple " )
294
294
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE )
295
295
}
296
296
@@ -447,6 +447,7 @@ private[spark] object KubernetesSuite {
447
447
val HDFS_TEST_CLASS = " org.apache.spark.deploy.kubernetes" +
448
448
" .integrationtest.jobs.HDFSTest"
449
449
val TEST_EXISTENCE_FILE_CONTENTS = " contents"
450
+ val KERBEROS_CONF = " spark.hadoop.hadoop.security.authentication"
450
451
451
452
452
453
case object ShuffleNotReadyException extends Exception
0 commit comments