Skip to content

Commit 41415d4

Browse files
committed
Rebase to upstream's version of Kubernetes support.
1 parent 5d88c95 commit 41415d4

File tree

157 files changed

+3303
-11905
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

157 files changed

+3303
-11905
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 14 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,13 @@
1818
package org.apache.spark.deploy
1919

2020
import java.io._
21-
import java.lang.reflect.{InvocationTargetException, UndeclaredThrowableException}
21+
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
2222
import java.net.URL
2323
import java.security.PrivilegedExceptionAction
2424
import java.text.ParseException
2525

2626
import scala.annotation.tailrec
27-
import scala.collection.mutable.ArrayBuffer
27+
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
2828
import scala.util.{Properties, Try}
2929

3030
import org.apache.commons.lang3.StringUtils
@@ -99,7 +99,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
9999
private[deploy] val REST_CLUSTER_SUBMIT_CLASS = classOf[RestSubmissionClientApp].getName()
100100
private[deploy] val STANDALONE_CLUSTER_SUBMIT_CLASS = classOf[ClientApp].getName()
101101
private[deploy] val KUBERNETES_CLUSTER_SUBMIT_CLASS =
102-
"org.apache.spark.deploy.k8s.submit.Client"
102+
"org.apache.spark.deploy.k8s.submit.KubernetesClientApplication"
103103

104104
// scalastyle:off println
105105
private[spark] def printVersionAndExit(): Unit = {
@@ -310,10 +310,6 @@ object SparkSubmit extends CommandLineUtils with Logging {
310310

311311
// Fail fast, the following modes are not supported or applicable
312312
(clusterManager, deployMode) match {
313-
case (KUBERNETES, CLIENT) =>
314-
printErrorAndExit("Client mode is currently not supported for Kubernetes.")
315-
case (KUBERNETES, CLUSTER) if args.isR =>
316-
printErrorAndExit("Kubernetes does not currently support R applications.")
317313
case (STANDALONE, CLUSTER) if args.isPython =>
318314
printErrorAndExit("Cluster deploy mode is currently not supported for python " +
319315
"applications on standalone clusters.")
@@ -324,6 +320,8 @@ object SparkSubmit extends CommandLineUtils with Logging {
324320
printErrorAndExit("Python applications are currently not supported for Kubernetes.")
325321
case (KUBERNETES, _) if args.isR =>
326322
printErrorAndExit("R applications are currently not supported for Kubernetes.")
323+
case (KUBERNETES, CLIENT) =>
324+
printErrorAndExit("Client mode is currently not supported for Kubernetes.")
327325
case (LOCAL, CLUSTER) =>
328326
printErrorAndExit("Cluster deploy mode is not compatible with master \"local\"")
329327
case (_, CLUSTER) if isShell(args.primaryResource) =>
@@ -343,8 +341,8 @@ object SparkSubmit extends CommandLineUtils with Logging {
343341
}
344342
val isYarnCluster = clusterManager == YARN && deployMode == CLUSTER
345343
val isMesosCluster = clusterManager == MESOS && deployMode == CLUSTER
346-
val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER
347344
val isStandAloneCluster = clusterManager == STANDALONE && deployMode == CLUSTER
345+
val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER
348346

349347
if (!isMesosCluster && !isStandAloneCluster) {
350348
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
@@ -579,9 +577,6 @@ object SparkSubmit extends CommandLineUtils with Logging {
579577
OptionAssigner(args.principal, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.principal"),
580578
OptionAssigner(args.keytab, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.keytab"),
581579

582-
OptionAssigner(args.kubernetesNamespace, KUBERNETES, ALL_DEPLOY_MODES,
583-
confKey = "spark.kubernetes.namespace"),
584-
585580
// Other options
586581
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
587582
confKey = "spark.executor.cores"),
@@ -649,9 +644,8 @@ object SparkSubmit extends CommandLineUtils with Logging {
649644

650645
// Add the application jar automatically so the user doesn't have to call sc.addJar
651646
// For YARN cluster mode, the jar is already distributed on each node as "app.jar"
652-
// In Kubernetes cluster mode, the jar will be uploaded by the client separately.
653647
// For python and R files, the primary resource is already distributed as a regular file
654-
if (!isYarnCluster && !isKubernetesCluster && !args.isPython && !args.isR) {
648+
if (!isYarnCluster && !args.isPython && !args.isR) {
655649
var jars = sparkConf.getOption("spark.jars").map(x => x.split(",").toSeq).getOrElse(Seq.empty)
656650
if (isUserJar(args.primaryResource)) {
657651
jars = jars ++ Seq(args.primaryResource)
@@ -733,21 +727,14 @@ object SparkSubmit extends CommandLineUtils with Logging {
733727

734728
if (isKubernetesCluster) {
735729
childMainClass = KUBERNETES_CLUSTER_SUBMIT_CLASS
736-
if (args.isPython) {
737-
childArgs ++= Array("--primary-py-file", args.primaryResource)
738-
childArgs ++= Array("--main-class", "org.apache.spark.deploy.PythonRunner")
739-
if (args.pyFiles != null) {
740-
childArgs ++= Array("--other-py-files", args.pyFiles)
741-
}
742-
} else {
743-
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
744-
childArgs ++= Array("--primary-java-resource", args.primaryResource)
745-
}
746-
childArgs ++= Array("--main-class", args.mainClass)
730+
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
731+
childArgs ++= Array("--primary-java-resource", args.primaryResource)
747732
}
748-
args.childArgs.foreach { arg =>
749-
childArgs += "--arg"
750-
childArgs += arg
733+
childArgs ++= Array("--main-class", args.mainClass)
734+
if (args.childArgs != null) {
735+
args.childArgs.foreach { arg =>
736+
childArgs += ("--arg", arg)
737+
}
751738
}
752739
}
753740

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -74,9 +74,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
7474
var principal: String = null
7575
var keytab: String = null
7676

77-
// Kubernetes only
78-
var kubernetesNamespace: String = null
79-
8077
// Standalone cluster mode only
8178
var supervise: Boolean = false
8279
var driverCores: String = null
@@ -201,9 +198,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
201198
queue = Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
202199
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
203200
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
204-
kubernetesNamespace = Option(kubernetesNamespace)
205-
.orElse(sparkProperties.get("spark.kubernetes.namespace"))
206-
.orNull
207201

208202
// Try to set main class from JAR if no --class argument is given
209203
if (mainClass == null && !isPython && !isR && primaryResource != null) {
@@ -460,9 +454,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
460454
case KEYTAB =>
461455
keytab = value
462456

463-
case KUBERNETES_NAMESPACE =>
464-
kubernetesNamespace = value
465-
466457
case HELP =>
467458
printUsageAndExit(0)
468459

resource-managers/kubernetes/README.md

Lines changed: 0 additions & 93 deletions
This file was deleted.

resource-managers/kubernetes/architecture-docs/external-shuffle-service.md

Lines changed: 0 additions & 29 deletions
This file was deleted.

resource-managers/kubernetes/architecture-docs/scheduler-backend.md

Lines changed: 0 additions & 48 deletions
This file was deleted.

0 commit comments

Comments
 (0)