@@ -133,6 +133,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
133
133
System .getProperty(" spark.docker.test.driverImage" , " spark-driver-py:latest" ))
134
134
.set(EXECUTOR_DOCKER_IMAGE ,
135
135
System .getProperty(" spark.docker.test.executorImage" , " spark-executor-py:latest" ))
136
+ .set(KUBERNETES_KERBEROS_SUPPORT , false )
136
137
137
138
runPySparkPiAndVerifyCompletion(
138
139
PYSPARK_PI_SUBMITTER_LOCAL_FILE_LOCATION ,
@@ -149,13 +150,14 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
149
150
System .getProperty(" spark.docker.test.driverImage" , " spark-driver-py:latest" ))
150
151
.set(EXECUTOR_DOCKER_IMAGE ,
151
152
System .getProperty(" spark.docker.test.executorImage" , " spark-executor-py:latest" ))
153
+ .set(KUBERNETES_KERBEROS_SUPPORT , false )
152
154
153
155
runPySparkPiAndVerifyCompletion(PYSPARK_PI_CONTAINER_LOCAL_FILE_LOCATION , Seq .empty[String ])
154
156
}
155
157
156
158
test(" Simple submission test with the resource staging server." ) {
157
159
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
158
-
160
+ sparkConf.set( KUBERNETES_KERBEROS_SUPPORT , false )
159
161
launchStagingServer(SSLOptions (), None )
160
162
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE )
161
163
}
@@ -176,6 +178,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
176
178
.set(" spark.ssl.kubernetes.resourceStagingServer.keyStorePassword" , " keyStore" )
177
179
.set(" spark.ssl.kubernetes.resourceStagingServer.keyPassword" , " key" )
178
180
.set(" spark.ssl.kubernetes.resourceStagingServer.trustStorePassword" , " trustStore" )
181
+ .set(KUBERNETES_KERBEROS_SUPPORT , false )
179
182
launchStagingServer(SSLOptions (
180
183
enabled = true ,
181
184
keyStore = Some (keyStoreAndTrustStore.keyStore),
@@ -190,7 +193,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
190
193
test(" Use container-local resources without the resource staging server" ) {
191
194
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
192
195
193
- sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH ))
196
+ sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH )).set( KUBERNETES_KERBEROS_SUPPORT , false )
194
197
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE )
195
198
}
196
199
@@ -206,6 +209,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
206
209
sparkConf.set(" spark.kubernetes.shuffle.labels" , " app=spark-shuffle-service" )
207
210
sparkConf.set(" spark.kubernetes.shuffle.namespace" , kubernetesTestComponents.namespace)
208
211
sparkConf.set(" spark.app.name" , " group-by-test" )
212
+ sparkConf.set(KUBERNETES_KERBEROS_SUPPORT , false )
209
213
runSparkApplicationAndVerifyCompletion(
210
214
JavaMainAppResource (SUBMITTER_LOCAL_MAIN_APP_RESOURCE ),
211
215
GROUP_BY_MAIN_CLASS ,
@@ -221,7 +225,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
221
225
sparkConf.setJars(Seq (
222
226
s " $assetServerUri/ ${EXAMPLES_JAR_FILE .getName}" ,
223
227
s " $assetServerUri/ ${HELPER_JAR_FILE .getName}"
224
- ))
228
+ )).set( KUBERNETES_KERBEROS_SUPPORT , false )
225
229
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
226
230
}
227
231
@@ -231,7 +235,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
231
235
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
232
236
sparkConf.setJars(Seq (
233
237
SUBMITTER_LOCAL_MAIN_APP_RESOURCE , s " $assetServerUri/ ${HELPER_JAR_FILE .getName}"
234
- ))
238
+ )).set( KUBERNETES_KERBEROS_SUPPORT , false )
235
239
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
236
240
}
237
241
@@ -244,6 +248,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
244
248
sparkConf.set(RESOURCE_STAGING_SERVER_SSL_ENABLED , true )
245
249
.set(
246
250
RESOURCE_STAGING_SERVER_CLIENT_CERT_PEM .key, keyAndCertificate.certPem.getAbsolutePath)
251
+ .set(KUBERNETES_KERBEROS_SUPPORT , false )
247
252
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE )
248
253
}
249
254
@@ -261,6 +266,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
261
266
sparkConf.set(
262
267
s " $APISERVER_AUTH_DRIVER_CONF_PREFIX. $CA_CERT_FILE_CONF_SUFFIX" ,
263
268
kubernetesTestComponents.clientConfig.getCaCertFile)
269
+ sparkConf.set(KUBERNETES_KERBEROS_SUPPORT , false )
264
270
runSparkPiAndVerifyCompletion(SparkLauncher .NO_RESOURCE )
265
271
}
266
272
@@ -270,7 +276,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
270
276
val testExistenceFile = new File (testExistenceFileTempDir, " input.txt" )
271
277
Files .write(TEST_EXISTENCE_FILE_CONTENTS , testExistenceFile, Charsets .UTF_8 )
272
278
launchStagingServer(SSLOptions (), None )
273
- sparkConf.set(" spark.files" , testExistenceFile.getAbsolutePath)
279
+ sparkConf.set(" spark.files" , testExistenceFile.getAbsolutePath).set( KUBERNETES_KERBEROS_SUPPORT , false )
274
280
runSparkApplicationAndVerifyCompletion(
275
281
JavaMainAppResource (SUBMITTER_LOCAL_MAIN_APP_RESOURCE ),
276
282
FILE_EXISTENCE_MAIN_CLASS ,
@@ -284,6 +290,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
284
290
assume(testBackend.name == MINIKUBE_TEST_BACKEND )
285
291
286
292
sparkConf.setJars(Seq (CONTAINER_LOCAL_HELPER_JAR_PATH )).setAppName(" long" * 40 )
293
+ .set(KUBERNETES_KERBEROS_SUPPORT , false )
287
294
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE )
288
295
}
289
296
@@ -423,8 +430,8 @@ private[spark] object KubernetesSuite {
423
430
s " integration-tests-jars/ ${EXAMPLES_JAR_FILE .getName}"
424
431
val CONTAINER_LOCAL_HELPER_JAR_PATH = s " local:///opt/spark/examples/ " +
425
432
s " integration-tests-jars/ ${HELPER_JAR_FILE .getName}"
426
- val TIMEOUT = PatienceConfiguration .Timeout (Span (15 , Minutes ))
427
- val INTERVAL = PatienceConfiguration .Interval (Span (15 , Seconds ))
433
+ val TIMEOUT = PatienceConfiguration .Timeout (Span (10 , Minutes ))
434
+ val INTERVAL = PatienceConfiguration .Interval (Span (10 , Seconds ))
428
435
val SPARK_PI_MAIN_CLASS = " org.apache.spark.deploy.kubernetes" +
429
436
" .integrationtest.jobs.SparkPiWithInfiniteWait"
430
437
val PYSPARK_PI_MAIN_CLASS = " org.apache.spark.deploy.PythonRunner"
0 commit comments