Skip to content

Commit 737c23a

Browse files
mccheahfoxish
authored andcommitted
Use git rev-parse --show-toplevel for the root dir. (#28)
* Use git rev-parse --show-toplevel for the root dir. Makes it so that the test scripts do not necessarily have to be run from the top level of the repository. * Address comments and fix bugs
1 parent dbd5643 commit 737c23a

File tree

2 files changed

+25
-27
lines changed

2 files changed

+25
-27
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
.idea/
2+
spark/
23
integration-test/target/
34
*.class
45
*.log
5-
*.iml
6+
*.iml

e2e/runner.sh

Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,7 @@ usage () {
2424
echo " The deployment mode can be specified using the 'd' flag."
2525
}
2626

27-
### Basic Validation ###
28-
if [ ! -d "integration-test" ]; then
29-
echo "This script must be invoked from the top-level directory of the integration-tests repository"
30-
usage
31-
exit 1
32-
fi
27+
cd "$(dirname "$0")"
3328

3429
### Set sensible defaults ###
3530
REPO="https://github.com/apache/spark"
@@ -79,44 +74,46 @@ echo "Running tests on cluster $MASTER against $REPO."
7974
echo "Spark images will be created in $IMAGE_REPO"
8075

8176
set -ex
82-
root=$(pwd)
83-
77+
TEST_ROOT=$(git rev-parse --show-toplevel)
78+
SPARK_REPO_ROOT="$TEST_ROOT/spark"
8479
# clone spark distribution if needed.
85-
if [ -d "spark" ];
80+
if [ -d "$SPARK_REPO_ROOT" ];
8681
then
87-
(cd spark && git pull origin $BRANCH);
82+
(cd $SPARK_REPO_ROOT && git pull origin $BRANCH);
8883
else
89-
git clone $REPO;
84+
git clone $REPO $SPARK_REPO_ROOT
9085
fi
9186

92-
cd spark
87+
cd $SPARK_REPO_ROOT
9388
git checkout -B $BRANCH origin/$BRANCH
9489
./dev/make-distribution.sh --tgz -Phadoop-2.7 -Pkubernetes -DskipTests
95-
tag=$(git rev-parse HEAD | cut -c -6)
96-
echo "Spark distribution built at SHA $tag"
90+
TAG=$(git rev-parse HEAD | cut -c -6)
91+
echo "Spark distribution built at SHA $TAG"
92+
93+
cd $SPARK_REPO_ROOT/dist
9794

9895
if [[ $DEPLOY_MODE == cloud ]] ;
9996
then
100-
cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag build
97+
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG build
10198
if [[ $IMAGE_REPO == gcr.io* ]] ;
10299
then
103-
gcloud docker -- push $IMAGE_REPO/spark-driver:$tag && \
104-
gcloud docker -- push $IMAGE_REPO/spark-executor:$tag && \
105-
gcloud docker -- push $IMAGE_REPO/spark-init:$tag
100+
gcloud docker -- push $IMAGE_REPO/spark-driver:$TAG && \
101+
gcloud docker -- push $IMAGE_REPO/spark-executor:$TAG && \
102+
gcloud docker -- push $IMAGE_REPO/spark-init:$TAG
106103
else
107-
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag push
104+
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG push
108105
fi
109106
else
110107
# -m option for minikube.
111-
cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $tag build
108+
./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $TAG build
112109
fi
113110

114-
cd $root/integration-test
115-
$root/spark/build/mvn clean -Ddownload.plugin.skip=true integration-test \
116-
-Dspark-distro-tgz=$root/spark/*.tgz \
111+
cd $TEST_ROOT/integration-test
112+
$SPARK_REPO_ROOT/build/mvn clean -Ddownload.plugin.skip=true integration-test \
113+
-Dspark-distro-tgz=$SPARK_REPO_ROOT/*.tgz \
117114
-DextraScalaTestArgs="-Dspark.kubernetes.test.master=k8s://$MASTER \
118-
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$tag \
119-
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$tag \
120-
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$tag" || :
115+
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$TAG \
116+
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$TAG \
117+
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$TAG" || :
121118

122119
echo "TEST SUITE FINISHED"

0 commit comments

Comments
 (0)