Skip to content

Commit d6d0945

Browse files
committed
modify security.authoization
1 parent 8338fdb commit d6d0945

File tree

1 file changed

+11
-10
lines changed
  • resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest

1 file changed

+11
-10
lines changed

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
133133
System.getProperty("spark.docker.test.driverImage", "spark-driver-py:latest"))
134134
.set(EXECUTOR_DOCKER_IMAGE,
135135
System.getProperty("spark.docker.test.executorImage", "spark-executor-py:latest"))
136-
.set(KUBERNETES_KERBEROS_SUPPORT, false)
136+
.set(KERBEROS_CONF, "simple")
137137

138138
runPySparkPiAndVerifyCompletion(
139139
PYSPARK_PI_SUBMITTER_LOCAL_FILE_LOCATION,
@@ -150,7 +150,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
150150
System.getProperty("spark.docker.test.driverImage", "spark-driver-py:latest"))
151151
.set(EXECUTOR_DOCKER_IMAGE,
152152
System.getProperty("spark.docker.test.executorImage", "spark-executor-py:latest"))
153-
.set(KUBERNETES_KERBEROS_SUPPORT, false)
153+
.set(KERBEROS_CONF, "simple")
154154

155155
runPySparkPiAndVerifyCompletion(PYSPARK_PI_CONTAINER_LOCAL_FILE_LOCATION, Seq.empty[String])
156156
}
@@ -178,7 +178,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
178178
.set("spark.ssl.kubernetes.resourceStagingServer.keyStorePassword", "keyStore")
179179
.set("spark.ssl.kubernetes.resourceStagingServer.keyPassword", "key")
180180
.set("spark.ssl.kubernetes.resourceStagingServer.trustStorePassword", "trustStore")
181-
.set(KUBERNETES_KERBEROS_SUPPORT, false)
181+
.set(KERBEROS_CONF, "simple")
182182
launchStagingServer(SSLOptions(
183183
enabled = true,
184184
keyStore = Some(keyStoreAndTrustStore.keyStore),
@@ -209,7 +209,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
209209
sparkConf.set("spark.kubernetes.shuffle.labels", "app=spark-shuffle-service")
210210
sparkConf.set("spark.kubernetes.shuffle.namespace", kubernetesTestComponents.namespace)
211211
sparkConf.set("spark.app.name", "group-by-test")
212-
sparkConf.set(KUBERNETES_KERBEROS_SUPPORT, false)
212+
sparkConf.set(KERBEROS_CONF, "simple")
213213
runSparkApplicationAndVerifyCompletion(
214214
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
215215
GROUP_BY_MAIN_CLASS,
@@ -225,7 +225,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
225225
sparkConf.setJars(Seq(
226226
s"$assetServerUri/${EXAMPLES_JAR_FILE.getName}",
227227
s"$assetServerUri/${HELPER_JAR_FILE.getName}"
228-
)).set(KUBERNETES_KERBEROS_SUPPORT, false)
228+
)).set(KERBEROS_CONF, "simple")
229229
runSparkPiAndVerifyCompletion(SparkLauncher.NO_RESOURCE)
230230
}
231231

@@ -235,7 +235,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
235235
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
236236
sparkConf.setJars(Seq(
237237
SUBMITTER_LOCAL_MAIN_APP_RESOURCE, s"$assetServerUri/${HELPER_JAR_FILE.getName}"
238-
)).set(KUBERNETES_KERBEROS_SUPPORT, false)
238+
)).set(KERBEROS_CONF, "simple")
239239
runSparkPiAndVerifyCompletion(SparkLauncher.NO_RESOURCE)
240240
}
241241

@@ -248,7 +248,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
248248
sparkConf.set(RESOURCE_STAGING_SERVER_SSL_ENABLED, true)
249249
.set(
250250
RESOURCE_STAGING_SERVER_CLIENT_CERT_PEM.key, keyAndCertificate.certPem.getAbsolutePath)
251-
.set(KUBERNETES_KERBEROS_SUPPORT, false)
251+
.set(KERBEROS_CONF, "simple")
252252
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE)
253253
}
254254

@@ -266,7 +266,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
266266
sparkConf.set(
267267
s"$APISERVER_AUTH_DRIVER_CONF_PREFIX.$CA_CERT_FILE_CONF_SUFFIX",
268268
kubernetesTestComponents.clientConfig.getCaCertFile)
269-
sparkConf.set(KUBERNETES_KERBEROS_SUPPORT, false)
269+
sparkConf.set(KERBEROS_CONF, "simple")
270270
runSparkPiAndVerifyCompletion(SparkLauncher.NO_RESOURCE)
271271
}
272272

@@ -276,7 +276,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
276276
val testExistenceFile = new File(testExistenceFileTempDir, "input.txt")
277277
Files.write(TEST_EXISTENCE_FILE_CONTENTS, testExistenceFile, Charsets.UTF_8)
278278
launchStagingServer(SSLOptions(), None)
279-
sparkConf.set("spark.files", testExistenceFile.getAbsolutePath).set(KUBERNETES_KERBEROS_SUPPORT, false)
279+
sparkConf.set("spark.files", testExistenceFile.getAbsolutePath).set(KERBEROS_CONF, "simple")
280280
runSparkApplicationAndVerifyCompletion(
281281
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
282282
FILE_EXISTENCE_MAIN_CLASS,
@@ -290,7 +290,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
290290
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
291291

292292
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH)).setAppName("long" * 40)
293-
.set(KUBERNETES_KERBEROS_SUPPORT, false)
293+
.set(KERBEROS_CONF, "simple")
294294
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE)
295295
}
296296

@@ -447,6 +447,7 @@ private[spark] object KubernetesSuite {
447447
val HDFS_TEST_CLASS = "org.apache.spark.deploy.kubernetes" +
448448
".integrationtest.jobs.HDFSTest"
449449
val TEST_EXISTENCE_FILE_CONTENTS = "contents"
450+
val KERBEROS_CONF = "spark.hadoop.hadoop.security.authentication"
450451

451452

452453
case object ShuffleNotReadyException extends Exception

0 commit comments

Comments
 (0)