Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 252fa1d

Browse files
authored
Merge branch 'branch-2.2-kubernetes' into add-testing
2 parents e795a20 + 8a0f485 commit 252fa1d

File tree

3 files changed

+154
-2
lines changed

3 files changed

+154
-2
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/initcontainer/InitContainerConfigurationStepsOrchestrator.scala

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ import org.apache.spark.SparkConf
2020
import org.apache.spark.deploy.kubernetes.{InitContainerResourceStagingServerSecretPluginImpl, OptionRequirements, SparkPodInitContainerBootstrapImpl}
2121
import org.apache.spark.deploy.kubernetes.config._
2222
import org.apache.spark.deploy.kubernetes.constants._
23-
import org.apache.spark.deploy.kubernetes.submit.SubmittedDependencyUploaderImpl
23+
import org.apache.spark.deploy.kubernetes.submit.{KubernetesFileUtils, SubmittedDependencyUploaderImpl}
2424
import org.apache.spark.deploy.rest.kubernetes.{ResourceStagingServerSslOptionsProviderImpl, RetrofitClientFactoryImpl}
2525
import org.apache.spark.util.Utils
2626

@@ -62,6 +62,12 @@ private[spark] class InitContainerConfigurationStepsOrchestrator(
6262
submissionSparkConf.get(RESOURCE_STAGING_SERVER_INTERNAL_SSL_ENABLED)
6363
.orElse(submissionSparkConf.get(RESOURCE_STAGING_SERVER_SSL_ENABLED))
6464
.getOrElse(false)
65+
66+
OptionRequirements.requireSecondIfFirstIsDefined(
67+
KubernetesFileUtils.getOnlySubmitterLocalFiles(sparkJars).headOption,
68+
resourceStagingServerUri,
69+
"Local JARs were provided, however no resource staging server URI was found.")
70+
6571
OptionRequirements.requireNandDefined(
6672
maybeResourceStagingServerInternalClientCert,
6773
maybeResourceStagingServerInternalTrustStore,

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/initcontainer/InitContainerConfigurationStepsOrchestratorSuite.scala

Lines changed: 75 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,80 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
4444
private val INIT_CONTAINER_CONFIG_MAP_KEY = "spark-init-config-map-key"
4545
private val STAGING_SERVER_URI = "http://localhost:8000"
4646

47+
test ("error thrown if local jars provided without resource staging server") {
48+
val sparkConf = new SparkConf(true)
49+
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
50+
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
51+
52+
assert(sparkConf.get(RESOURCE_STAGING_SERVER_URI).isEmpty)
53+
54+
val thrown = intercept[IllegalArgumentException] {
55+
val orchestrator = new InitContainerConfigurationStepsOrchestrator(
56+
NAMESPACE,
57+
APP_RESOURCE_PREFIX,
58+
SPARK_JARS,
59+
SPARK_FILES,
60+
JARS_DOWNLOAD_PATH,
61+
FILES_DOWNLOAD_PATH,
62+
DOCKER_IMAGE_PULL_POLICY,
63+
DRIVER_LABELS,
64+
INIT_CONTAINER_CONFIG_MAP_NAME,
65+
INIT_CONTAINER_CONFIG_MAP_KEY,
66+
sparkConf)
67+
}
68+
69+
assert(thrown.getMessage contains "Local JARs were provided, however no resource staging" +
70+
" server URI was found.")
71+
}
72+
73+
test ("error not thrown with non-local jars and resource staging server provided") {
74+
val sparkConf = new SparkConf(true)
75+
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
76+
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
77+
.set(RESOURCE_STAGING_SERVER_URI, STAGING_SERVER_URI)
78+
79+
val orchestrator = new InitContainerConfigurationStepsOrchestrator(
80+
NAMESPACE,
81+
APP_RESOURCE_PREFIX,
82+
SPARK_JARS.take(1),
83+
SPARK_FILES,
84+
JARS_DOWNLOAD_PATH,
85+
FILES_DOWNLOAD_PATH,
86+
DOCKER_IMAGE_PULL_POLICY,
87+
DRIVER_LABELS,
88+
INIT_CONTAINER_CONFIG_MAP_NAME,
89+
INIT_CONTAINER_CONFIG_MAP_KEY,
90+
sparkConf)
91+
val initSteps : Seq[InitContainerConfigurationStep] =
92+
orchestrator.getAllConfigurationSteps()
93+
assert(initSteps.length == 2)
94+
assert(initSteps.head.isInstanceOf[BaseInitContainerConfigurationStep])
95+
assert(initSteps(1).isInstanceOf[SubmittedResourcesInitContainerConfigurationStep])
96+
}
97+
98+
test ("error not thrown with non-local jars and no resource staging server provided") {
99+
val sparkConf = new SparkConf(true)
100+
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
101+
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
102+
103+
val orchestrator = new InitContainerConfigurationStepsOrchestrator(
104+
NAMESPACE,
105+
APP_RESOURCE_PREFIX,
106+
SPARK_JARS.take(1),
107+
SPARK_FILES,
108+
JARS_DOWNLOAD_PATH,
109+
FILES_DOWNLOAD_PATH,
110+
DOCKER_IMAGE_PULL_POLICY,
111+
DRIVER_LABELS,
112+
INIT_CONTAINER_CONFIG_MAP_NAME,
113+
INIT_CONTAINER_CONFIG_MAP_KEY,
114+
sparkConf)
115+
val initSteps : Seq[InitContainerConfigurationStep] =
116+
orchestrator.getAllConfigurationSteps()
117+
assert(initSteps.length == 1)
118+
assert(initSteps.head.isInstanceOf[BaseInitContainerConfigurationStep])
119+
}
120+
47121
test ("including step to contact resource staging server") {
48122
val sparkConf = new SparkConf(true)
49123
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
@@ -77,7 +151,7 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
77151
val orchestrator = new InitContainerConfigurationStepsOrchestrator(
78152
NAMESPACE,
79153
APP_RESOURCE_PREFIX,
80-
SPARK_JARS,
154+
SPARK_JARS.take(1),
81155
SPARK_FILES,
82156
JARS_DOWNLOAD_PATH,
83157
FILES_DOWNLOAD_PATH,

sbin/build-push-docker-images.sh

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
#!/usr/bin/env bash
2+
3+
# Licensed to the Apache Software Foundation (ASF) under one or more
4+
# contributor license agreements. See the NOTICE file distributed with
5+
# this work for additional information regarding copyright ownership.
6+
# The ASF licenses this file to You under the Apache License, Version 2.0
7+
# (the "License"); you may not use this file except in compliance with
8+
# the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing, software
13+
# distributed under the License is distributed on an "AS IS" BASIS,
14+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
# See the License for the specific language governing permissions and
16+
# limitations under the License.
17+
#
18+
19+
# This script builds and pushes docker images when run from a release of Spark
20+
# with Kubernetes support.
21+
22+
declare -A path=( [spark-driver]=dockerfiles/driver/Dockerfile \
23+
[spark-executor]=dockerfiles/executor/Dockerfile \
24+
[spark-driver-py]=dockerfiles/driver-py/Dockerfile \
25+
[spark-executor-py]=dockerfiles/executor-py/Dockerfile \
26+
[spark-init]=dockerfiles/init-container/Dockerfile \
27+
[spark-shuffle]=dockerfiles/shuffle-service/Dockerfile \
28+
[spark-resource-staging-server]=dockerfiles/resource-staging-server/Dockerfile )
29+
30+
function build {
31+
docker build -t spark-base -f dockerfiles/spark-base/Dockerfile .
32+
for image in "${!path[@]}"; do
33+
docker build -t ${REPO}/$image:${TAG} -f ${path[$image]} .
34+
done
35+
}
36+
37+
38+
function push {
39+
for image in "${!path[@]}"; do
40+
docker push ${REPO}/$image:${TAG}
41+
done
42+
}
43+
44+
function usage {
45+
echo "Usage: ./sbin/build-push-docker-images.sh -r <repo> -t <tag> build"
46+
echo " ./sbin/build-push-docker-images.sh -r <repo> -t <tag> push"
47+
echo "for example: ./sbin/build-push-docker-images.sh -r docker.io/kubespark -t v2.2.0 push"
48+
}
49+
50+
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
51+
usage
52+
exit 0
53+
fi
54+
55+
while getopts r:t: option
56+
do
57+
case "${option}"
58+
in
59+
r) REPO=${OPTARG};;
60+
t) TAG=${OPTARG};;
61+
esac
62+
done
63+
64+
if [ -z "$REPO" ] || [ -z "$TAG" ]; then
65+
usage
66+
else
67+
case "${@: -1}" in
68+
build) build;;
69+
push) push;;
70+
*) usage;;
71+
esac
72+
fi

0 commit comments

Comments
 (0)