diff --git a/README.md b/README.md
index a5109f4e12897..9905e6c9f5751 100644
--- a/README.md
+++ b/README.md
@@ -10,6 +10,7 @@ This is a collaboratively maintained project working on [SPARK-18278](https://is
- [Usage guide](https://apache-spark-on-k8s.github.io/userdocs/) shows how to run the code
- [Development docs](resource-managers/kubernetes/README.md) shows how to get set up for development
+- [Architecture docs](resource-managers/kubernetes/architecture-docs/) shows the high level architecture of Spark on Kubernetes
- Code is primarily located in the [resource-managers/kubernetes](resource-managers/kubernetes) folder
## Why does this fork exist?
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 13930630f2338..29ab454fee253 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index 2d29bfc8ea89a..f437084ec6dd5 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index b837c8a2be8a4..b80c1df889731 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index 45e128b6e1cfd..ae9ec59180486 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/common/sketch/pom.xml b/common/sketch/pom.xml
index 121b2489fbb72..60549568be0ef 100644
--- a/common/sketch/pom.xml
+++ b/common/sketch/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/common/tags/pom.xml b/common/tags/pom.xml
index 31d0d2efc654a..764be312470cf 100644
--- a/common/tags/pom.xml
+++ b/common/tags/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/common/unsafe/pom.xml b/common/unsafe/pom.xml
index 78eaf8624df93..d0b543137781f 100644
--- a/common/unsafe/pom.xml
+++ b/common/unsafe/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/conf/kubernetes-resource-staging-server.yaml b/conf/kubernetes-resource-staging-server.yaml
index 025b9b125d9e0..80d59b8091903 100644
--- a/conf/kubernetes-resource-staging-server.yaml
+++ b/conf/kubernetes-resource-staging-server.yaml
@@ -32,7 +32,7 @@ spec:
name: spark-resource-staging-server-config
containers:
- name: spark-resource-staging-server
- image: kubespark/spark-resource-staging-server:v2.1.0-kubernetes-0.2.0
+ image: kubespark/spark-resource-staging-server:v2.2.0-kubernetes-0.3.0
resources:
requests:
cpu: 100m
diff --git a/conf/kubernetes-shuffle-service.yaml b/conf/kubernetes-shuffle-service.yaml
index 55c170b01a4f5..8ab0b362ea32e 100644
--- a/conf/kubernetes-shuffle-service.yaml
+++ b/conf/kubernetes-shuffle-service.yaml
@@ -20,14 +20,14 @@ kind: DaemonSet
metadata:
labels:
app: spark-shuffle-service
- spark-version: 2.1.0
+ spark-version: 2.2.0
name: shuffle
spec:
template:
metadata:
labels:
app: spark-shuffle-service
- spark-version: 2.1.0
+ spark-version: 2.2.0
spec:
volumes:
- name: temp-volume
@@ -38,7 +38,7 @@ spec:
# This is an official image that is built
# from the dockerfiles/shuffle directory
# in the spark distribution.
- image: kubespark/spark-shuffle:v2.1.0-kubernetes-0.2.0
+ image: kubespark/spark-shuffle:v2.2.0-kubernetes-0.3.0
imagePullPolicy: IfNotPresent
volumeMounts:
- mountPath: '/tmp'
@@ -51,4 +51,4 @@ spec:
requests:
cpu: "1"
limits:
- cpu: "1"
\ No newline at end of file
+ cpu: "1"
diff --git a/core/pom.xml b/core/pom.xml
index d80b2591d80c3..a152b27284abf 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 1305aeb8c1faf..ed46adcbe9dfb 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -639,7 +639,9 @@ object SparkSubmit extends CommandLineUtils {
if (args.isPython) {
childArgs ++= Array("--primary-py-file", args.primaryResource)
childArgs ++= Array("--main-class", "org.apache.spark.deploy.PythonRunner")
- childArgs ++= Array("--other-py-files", args.pyFiles)
+ if (args.pyFiles != null) {
+ childArgs ++= Array("--other-py-files", args.pyFiles)
+ }
} else {
childArgs ++= Array("--primary-java-resource", args.primaryResource)
childArgs ++= Array("--main-class", args.mainClass)
diff --git a/docs/running-on-kubernetes.md b/docs/running-on-kubernetes.md
index 5e23801e15b10..5346e54e62aec 100644
--- a/docs/running-on-kubernetes.md
+++ b/docs/running-on-kubernetes.md
@@ -17,8 +17,10 @@ cluster, you may setup a test cluster on your local machine using
* You must have appropriate permissions to create and list [pods](https://kubernetes.io/docs/user-guide/pods/),
[ConfigMaps](https://kubernetes.io/docs/tasks/configure-pod-container/configmap/) and
[secrets](https://kubernetes.io/docs/concepts/configuration/secret/) in your cluster. You can verify that
-you can list these resources by running `kubectl get pods` `kubectl get configmap`, and `kubectl get secrets` which
+you can list these resources by running `kubectl get pods`, `kubectl get configmap`, and `kubectl get secrets` which
should give you a list of pods and configmaps (if any) respectively.
+ * The service account or credentials used by the driver pods must have appropriate permissions
+ as well for editing pod spec.
* You must have a spark distribution with Kubernetes support. This may be obtained from the
[release tarball](https://github.com/apache-spark-on-k8s/spark/releases) or by
[building Spark with Kubernetes support](../resource-managers/kubernetes/README.md#building-spark-with-kubernetes-support).
@@ -36,15 +38,15 @@ If you wish to use pre-built docker images, you may use the images published in
Component | Image |
Spark Driver Image |
- kubespark/spark-driver:v2.1.0-kubernetes-0.2.0 |
+ kubespark/spark-driver:v2.2.0-kubernetes-0.3.0 |
Spark Executor Image |
- kubespark/spark-executor:v2.1.0-kubernetes-0.2.0 |
+ kubespark/spark-executor:v2.2.0-kubernetes-0.3.0 |
Spark Initialization Image |
- kubespark/spark-init:v2.1.0-kubernetes-0.2.0 |
+ kubespark/spark-init:v2.2.0-kubernetes-0.3.0 |
@@ -80,9 +82,9 @@ are set up as described above:
--kubernetes-namespace default \
--conf spark.executor.instances=5 \
--conf spark.app.name=spark-pi \
- --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.1.0-kubernetes-0.2.0 \
+ --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.2.0-kubernetes-0.3.0 \
local:///opt/spark/examples/jars/spark_examples_2.11-2.2.0.jar
The Spark master, specified either via passing the `--master` command line argument to `spark-submit` or by setting
@@ -107,6 +109,18 @@ Finally, notice that in the above example we specify a jar with a specific URI w
the location of the example jar that is already in the Docker image. Using dependencies that are on your machine's local
disk is discussed below.
+When Kubernetes [RBAC](https://kubernetes.io/docs/admin/authorization/rbac/) is enabled,
+the `default` service account used by the driver may not have appropriate pod `edit` permissions
+for launching executor pods. We recommend to add another service account, say `spark`, with
+the necessary privilege. For example:
+
+ kubectl create serviceaccount spark
+ kubectl create clusterrolebinding spark-edit --clusterrole edit \
+ --serviceaccount default:spark --namespace default
+
+With this, one can add `--conf spark.kubernetes.authenticate.driver.serviceAccountName=spark` to
+the spark-submit command line above to specify the service account to use.
+
## Dependency Management
Application dependencies that are being submitted from your machine need to be sent to a **resource staging server**
@@ -129,9 +143,9 @@ and then you can compute the value of Pi as follows:
--kubernetes-namespace default \
--conf spark.executor.instances=5 \
--conf spark.app.name=spark-pi \
- --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.1.0-kubernetes-0.2.0 \
+ --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.2.0-kubernetes-0.3.0 \
--conf spark.kubernetes.resourceStagingServer.uri=http://:31000 \
examples/jars/spark_examples_2.11-2.2.0.jar
@@ -170,9 +184,9 @@ If our local proxy were listening on port 8001, we would have our submission loo
--kubernetes-namespace default \
--conf spark.executor.instances=5 \
--conf spark.app.name=spark-pi \
- --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.1.0-kubernetes-0.2.0 \
+ --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.2.0-kubernetes-0.3.0 \
local:///opt/spark/examples/jars/spark_examples_2.11-2.2.0.jar
Communication between Spark and Kubernetes clusters is performed using the fabric8 kubernetes-client library.
@@ -220,7 +234,7 @@ service because there may be multiple shuffle service instances running in a clu
a way to target a particular shuffle service.
For example, if the shuffle service we want to use is in the default namespace, and
-has pods with labels `app=spark-shuffle-service` and `spark-version=2.1.0`, we can
+has pods with labels `app=spark-shuffle-service` and `spark-version=2.2.0`, we can
use those tags to target that particular shuffle service at job launch time. In order to run a job with dynamic allocation enabled,
the command may then look like the following:
@@ -235,7 +249,7 @@ the command may then look like the following:
--conf spark.dynamicAllocation.enabled=true \
--conf spark.shuffle.service.enabled=true \
--conf spark.kubernetes.shuffle.namespace=default \
- --conf spark.kubernetes.shuffle.labels="app=spark-shuffle-service,spark-version=2.1.0" \
+ --conf spark.kubernetes.shuffle.labels="app=spark-shuffle-service,spark-version=2.2.0" \
local:///opt/spark/examples/jars/spark_examples_2.11-2.2.0.jar 10 400000 2
## Advanced
@@ -312,9 +326,9 @@ communicate with the resource staging server over TLS. The trustStore can be set
--kubernetes-namespace default \
--conf spark.executor.instances=5 \
--conf spark.app.name=spark-pi \
- --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.1.0-kubernetes-0.2.0 \
- --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.1.0-kubernetes-0.2.0 \
+ --conf spark.kubernetes.driver.docker.image=kubespark/spark-driver:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.executor.docker.image=kubespark/spark-executor:v2.2.0-kubernetes-0.3.0 \
+ --conf spark.kubernetes.initcontainer.docker.image=kubespark/spark-init:v2.2.0-kubernetes-0.3.0 \
--conf spark.kubernetes.resourceStagingServer.uri=https://:31000 \
--conf spark.ssl.kubernetes.resourceStagingServer.enabled=true \
--conf spark.ssl.kubernetes.resourceStagingServer.clientCertPem=/home/myuser/cert.pem \
@@ -768,6 +782,22 @@ from the other deployment modes. See the [configuration page](configuration.html
myIdentifier
. Multiple node selector keys can be added by setting multiple configurations with this prefix.
+
+ spark.executorEnv.[EnvironmentVariableName] |
+ (none) |
+
+ Add the environment variable specified by EnvironmentVariableName to
+ the Executor process. The user can specify multiple of these to set multiple environment variables.
+ |
+
+
+ spark.kubernetes.driverEnv.[EnvironmentVariableName] |
+ (none) |
+
+ Add the environment variable specified by EnvironmentVariableName to
+ the Driver process. The user can specify multiple of these to set multiple environment variables.
+ |
+
diff --git a/examples/pom.xml b/examples/pom.xml
index d2135bf5ff192..3f7814d060526 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/external/docker-integration-tests/pom.xml b/external/docker-integration-tests/pom.xml
index fdf7611936346..f90eb57c64dfe 100644
--- a/external/docker-integration-tests/pom.xml
+++ b/external/docker-integration-tests/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/flume-assembly/pom.xml b/external/flume-assembly/pom.xml
index 3ae7d254c95a1..8592d85ccbd3f 100644
--- a/external/flume-assembly/pom.xml
+++ b/external/flume-assembly/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml
index 89e61be25d8c9..ef0231990c3c9 100644
--- a/external/flume-sink/pom.xml
+++ b/external/flume-sink/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/flume/pom.xml b/external/flume/pom.xml
index 897af93b6b8a2..606116ad0860c 100644
--- a/external/flume/pom.xml
+++ b/external/flume/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/java8-tests/pom.xml b/external/java8-tests/pom.xml
index 5513dfea0f281..586f87d312ec0 100644
--- a/external/java8-tests/pom.xml
+++ b/external/java8-tests/pom.xml
@@ -20,7 +20,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kafka-0-10-assembly/pom.xml b/external/kafka-0-10-assembly/pom.xml
index d836fffdb56b6..39e1339bbf2a1 100644
--- a/external/kafka-0-10-assembly/pom.xml
+++ b/external/kafka-0-10-assembly/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kafka-0-10-sql/pom.xml b/external/kafka-0-10-sql/pom.xml
index 10419a1275f73..68ee5be2889b2 100644
--- a/external/kafka-0-10-sql/pom.xml
+++ b/external/kafka-0-10-sql/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kafka-0-10/pom.xml b/external/kafka-0-10/pom.xml
index a0a7b26f2b71e..23e4eef8253b2 100644
--- a/external/kafka-0-10/pom.xml
+++ b/external/kafka-0-10/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kafka-0-8-assembly/pom.xml b/external/kafka-0-8-assembly/pom.xml
index 4cd40dbe89689..aa36d4a713e1e 100644
--- a/external/kafka-0-8-assembly/pom.xml
+++ b/external/kafka-0-8-assembly/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kafka-0-8/pom.xml b/external/kafka-0-8/pom.xml
index 57ad12dc70709..af7de9380e271 100644
--- a/external/kafka-0-8/pom.xml
+++ b/external/kafka-0-8/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kinesis-asl-assembly/pom.xml b/external/kinesis-asl-assembly/pom.xml
index 240db2098d1f8..ced0e89bdff16 100644
--- a/external/kinesis-asl-assembly/pom.xml
+++ b/external/kinesis-asl-assembly/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/kinesis-asl/pom.xml b/external/kinesis-asl/pom.xml
index c6a7ceb80c465..0ae897f62dda3 100644
--- a/external/kinesis-asl/pom.xml
+++ b/external/kinesis-asl/pom.xml
@@ -20,7 +20,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/external/spark-ganglia-lgpl/pom.xml b/external/spark-ganglia-lgpl/pom.xml
index cf23a63e23cbe..2744a9fdea489 100644
--- a/external/spark-ganglia-lgpl/pom.xml
+++ b/external/spark-ganglia-lgpl/pom.xml
@@ -20,7 +20,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../../pom.xml
diff --git a/graphx/pom.xml b/graphx/pom.xml
index 8fddc5bfee0f1..e59db233b54d8 100644
--- a/graphx/pom.xml
+++ b/graphx/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/launcher/pom.xml b/launcher/pom.xml
index 705675af7a01f..9df0c2d79a5fb 100644
--- a/launcher/pom.xml
+++ b/launcher/pom.xml
@@ -22,7 +22,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/mllib-local/pom.xml b/mllib-local/pom.xml
index 155adae6afa61..d120f8f3fc4f0 100644
--- a/mllib-local/pom.xml
+++ b/mllib-local/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/mllib/pom.xml b/mllib/pom.xml
index 25c38e2281eff..755f1cfe954f0 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -21,7 +21,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
../pom.xml
diff --git a/pom.xml b/pom.xml
index 7835481531216..d90ad23752f54 100644
--- a/pom.xml
+++ b/pom.xml
@@ -26,7 +26,7 @@
org.apache.spark
spark-parent_2.11
- 2.2.0-k8s-0.3.0-SNAPSHOT
+ 2.2.0-k8s-0.4.0-SNAPSHOT
pom
Spark Project Parent POM
http://spark.apache.org/
@@ -2127,7 +2127,7 @@
${project.build.directory}/surefire-reports
.
SparkTestSuite.txt
- -ea -Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraScalaTestArgs}
+ -ea -Xmx3g -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraScalaTestArgs}