@@ -157,7 +157,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
157
157
158
158
test(" Simple submission test with the resource staging server." ) {
159
159
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
160
- sparkConf.set(KUBERNETES_KERBEROS_SUPPORT , false )
161
160
launchStagingServer(SSLOptions (), None )
162
161
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE )
163
162
}
@@ -178,7 +177,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
178
177
.set(" spark.ssl.kubernetes.resourceStagingServer.keyStorePassword" , " keyStore" )
179
178
.set(" spark.ssl.kubernetes.resourceStagingServer.keyPassword" , " key" )
180
179
.set(" spark.ssl.kubernetes.resourceStagingServer.trustStorePassword" , " trustStore" )
181
- .set(KERBEROS_CONF , " simple" )
182
180
launchStagingServer(SSLOptions (
183
181
enabled = true ,
184
182
keyStore = Some (keyStoreAndTrustStore.keyStore),
@@ -193,7 +191,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
193
191
test(" Use container-local resources without the resource staging server" ) {
194
192
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
195
193
196
- sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH )).set( KUBERNETES_KERBEROS_SUPPORT , false )
194
+ sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH ))
197
195
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE )
198
196
}
199
197
@@ -209,7 +207,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
209
207
sparkConf.set(" spark.kubernetes.shuffle.labels" , " app=spark-shuffle-service" )
210
208
sparkConf.set(" spark.kubernetes.shuffle.namespace" , kubernetesTestComponents.namespace)
211
209
sparkConf.set(" spark.app.name" , " group-by-test" )
212
- sparkConf.set(KERBEROS_CONF , " simple" )
213
210
runSparkApplicationAndVerifyCompletion(
214
211
JavaMainAppResource (SUBMITTER_LOCAL_MAIN_APP_RESOURCE ),
215
212
GROUP_BY_MAIN_CLASS ,
@@ -225,7 +222,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
225
222
sparkConf.setJars(Seq (
226
223
s " $assetServerUri/ ${EXAMPLES_JAR_FILE .getName}" ,
227
224
s " $assetServerUri/ ${HELPER_JAR_FILE .getName}"
228
- )).set( KERBEROS_CONF , " simple " )
225
+ ))
229
226
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
230
227
}
231
228
@@ -235,7 +232,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
235
232
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
236
233
sparkConf.setJars(Seq (
237
234
SUBMITTER_LOCAL_MAIN_APP_RESOURCE , s " $assetServerUri/ ${HELPER_JAR_FILE .getName}"
238
- )).set( KERBEROS_CONF , " simple " )
235
+ ))
239
236
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
240
237
}
241
238
@@ -248,7 +245,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
248
245
sparkConf.set(RESOURCE_STAGING_SERVER_SSL_ENABLED , true )
249
246
.set(
250
247
RESOURCE_STAGING_SERVER_CLIENT_CERT_PEM .key, keyAndCertificate.certPem.getAbsolutePath)
251
- .set(KERBEROS_CONF , " simple" )
252
248
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE )
253
249
}
254
250
@@ -266,7 +262,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
266
262
sparkConf.set(
267
263
s " $APISERVER_AUTH_DRIVER_CONF_PREFIX. $CA_CERT_FILE_CONF_SUFFIX" ,
268
264
kubernetesTestComponents.clientConfig.getCaCertFile)
269
- sparkConf.set(KERBEROS_CONF , " simple" )
270
265
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
271
266
}
272
267
@@ -276,7 +271,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
276
271
val testExistenceFile = new File (testExistenceFileTempDir, " input.txt" )
277
272
Files .write(TEST_EXISTENCE_FILE_CONTENTS , testExistenceFile, Charsets .UTF_8 )
278
273
launchStagingServer(SSLOptions (), None )
279
- sparkConf.set(" spark.files" , testExistenceFile.getAbsolutePath).set( KERBEROS_CONF , " simple " )
274
+ sparkConf.set(" spark.files" , testExistenceFile.getAbsolutePath)
280
275
runSparkApplicationAndVerifyCompletion(
281
276
JavaMainAppResource (SUBMITTER_LOCAL_MAIN_APP_RESOURCE ),
282
277
FILE_EXISTENCE_MAIN_CLASS ,
@@ -290,7 +285,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
290
285
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
291
286
292
287
sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH )).setAppName(" long" * 40 )
293
- .set(KERBEROS_CONF , " simple" )
294
288
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE )
295
289
}
296
290
@@ -307,7 +301,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
307
301
}
308
302
sparkConf.set(RESOURCE_STAGING_SERVER_URI ,
309
303
s " $resourceStagingServerUriScheme:// " +
310
- s " ${Minikube .getMinikubeIp}: $resourceStagingServerPort" )
304
+ s " ${Minikube .getMinikubeIp}: $resourceStagingServerPort" ).set( KERBEROS_CONF , " simple " )
311
305
}
312
306
313
307
private def launchKerberizedCluster (): Unit = {
0 commit comments