@@ -24,12 +24,7 @@ usage () {
24
24
echo " The deployment mode can be specified using the 'd' flag."
25
25
}
26
26
27
- # ## Basic Validation ###
28
- if [ ! -d " integration-test" ]; then
29
- echo " This script must be invoked from the top-level directory of the integration-tests repository"
30
- usage
31
- exit 1
32
- fi
27
+ cd " $( dirname " $0 " ) "
33
28
34
29
# ## Set sensible defaults ###
35
30
REPO=" https://github.com/apache/spark"
@@ -79,44 +74,46 @@ echo "Running tests on cluster $MASTER against $REPO."
79
74
echo " Spark images will be created in $IMAGE_REPO "
80
75
81
76
set -ex
82
- root =$( pwd )
83
-
77
+ TEST_ROOT =$( git rev-parse --show-toplevel )
78
+ SPARK_REPO_ROOT= " $TEST_ROOT /spark "
84
79
# clone spark distribution if needed.
85
- if [ -d " spark " ];
80
+ if [ -d " $SPARK_REPO_ROOT " ];
86
81
then
87
- (cd spark && git pull origin $BRANCH );
82
+ (cd $SPARK_REPO_ROOT && git pull origin $BRANCH );
88
83
else
89
- git clone $REPO ;
84
+ git clone $REPO $SPARK_REPO_ROOT
90
85
fi
91
86
92
- cd spark
87
+ cd $SPARK_REPO_ROOT
93
88
git checkout -B $BRANCH origin/$BRANCH
94
89
./dev/make-distribution.sh --tgz -Phadoop-2.7 -Pkubernetes -DskipTests
95
- tag=$( git rev-parse HEAD | cut -c -6)
96
- echo " Spark distribution built at SHA $tag "
90
+ TAG=$( git rev-parse HEAD | cut -c -6)
91
+ echo " Spark distribution built at SHA $TAG "
92
+
93
+ cd $SPARK_REPO_ROOT /dist
97
94
98
95
if [[ $DEPLOY_MODE == cloud ]] ;
99
96
then
100
- cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag build
97
+ ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG build
101
98
if [[ $IMAGE_REPO == gcr.io* ]] ;
102
99
then
103
- gcloud docker -- push $IMAGE_REPO /spark-driver:$tag && \
104
- gcloud docker -- push $IMAGE_REPO /spark-executor:$tag && \
105
- gcloud docker -- push $IMAGE_REPO /spark-init:$tag
100
+ gcloud docker -- push $IMAGE_REPO /spark-driver:$TAG && \
101
+ gcloud docker -- push $IMAGE_REPO /spark-executor:$TAG && \
102
+ gcloud docker -- push $IMAGE_REPO /spark-init:$TAG
106
103
else
107
- ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag push
104
+ ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG push
108
105
fi
109
106
else
110
107
# -m option for minikube.
111
- cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $tag build
108
+ ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $TAG build
112
109
fi
113
110
114
- cd $root /integration-test
115
- $root /spark /build/mvn clean -Ddownload.plugin.skip=true integration-test \
116
- -Dspark-distro-tgz=$root /spark /* .tgz \
111
+ cd $TEST_ROOT /integration-test
112
+ $SPARK_REPO_ROOT /build/mvn clean -Ddownload.plugin.skip=true integration-test \
113
+ -Dspark-distro-tgz=$SPARK_REPO_ROOT /* .tgz \
117
114
-DextraScalaTestArgs=" -Dspark.kubernetes.test.master=k8s://$MASTER \
118
- -Dspark.docker.test.driverImage=$IMAGE_REPO /spark-driver:$tag \
119
- -Dspark.docker.test.executorImage=$IMAGE_REPO /spark-executor:$tag \
120
- -Dspark.docker.test.initContainerImage=$IMAGE_REPO /spark-init:$tag " || :
115
+ -Dspark.docker.test.driverImage=$IMAGE_REPO /spark-driver:$TAG \
116
+ -Dspark.docker.test.executorImage=$IMAGE_REPO /spark-executor:$TAG \
117
+ -Dspark.docker.test.initContainerImage=$IMAGE_REPO /spark-init:$TAG " || :
121
118
122
119
echo " TEST SUITE FINISHED"
0 commit comments