Skip to content

Commit ed69645

Browse files
committed
Remove using nginx file server
1 parent d5af7fc commit ed69645

File tree

8 files changed

+5
-189
lines changed

8 files changed

+5
-189
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ is subject to change. Note that currently the integration tests only run with Ja
1111
The simplest way to run the integration tests is to install and run Minikube, then run the following:
1212

1313
build/mvn integration-test
14-
14+
1515
The minimum tested version of Minikube is 0.23.0. The kube-dns addon must be enabled. Minikube should
1616
run with a minimum of 3 CPUs and 4G of memory:
1717

@@ -39,7 +39,7 @@ Therefore the command looks like this:
3939
-Dspark.kubernetes.test.deployMode=cloud \
4040
-Dspark.kubernetes.test.master=https://example.com:8443/apiserver \
4141
-Dspark.kubernetes.test.repo=docker.example.com/spark-images
42-
42+
4343
## Re-using Docker Images
4444

4545
By default, the test framework will build new Docker images on every test execution. A unique image tag is generated,
@@ -75,4 +75,4 @@ source code has to be compiled.
7575

7676
When the tests are cloning a repository and building it, the Spark distribution is placed in
7777
`target/spark/spark-<VERSION>.tgz`. Reuse this tarball to save a significant amount of time if you are iterating on
78-
the development of these integration tests.
78+
the development of these integration tests.

docker-file-server/.gitignore

Lines changed: 0 additions & 1 deletion
This file was deleted.

docker-file-server/Dockerfile

Lines changed: 0 additions & 4 deletions
This file was deleted.

docker-file-server/nginx.conf

Lines changed: 0 additions & 34 deletions
This file was deleted.

scripts/prepare-docker-images.sh

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -33,27 +33,18 @@ then
3333
echo "No unpacked distribution was found at $UNPACKED_SPARK_TGZ. Please run clone-spark.sh and build-spark.sh first." && exit 1;
3434
fi
3535

36-
FILE_SERVER_IMAGE="$IMAGE_REPO/spark-examples-file-server:$IMAGE_TAG"
37-
FILE_SERVER_BUILD_DIR="$TEST_ROOT_DIR/docker-file-server"
38-
rm -rf $FILE_SERVER_BUILD_DIR/jars
39-
mkdir -p $FILE_SERVER_BUILD_DIR/jars
40-
cp $UNPACKED_SPARK_TGZ/examples/jars/spark-examples*.jar $FILE_SERVER_BUILD_DIR/jars/.
4136
cd $UNPACKED_SPARK_TGZ
4237
if [[ $DEPLOY_MODE == cloud ]] ;
4338
then
44-
docker build -t $FILE_SERVER_IMAGE "$FILE_SERVER_BUILD_DIR"
4539
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG build
4640
if [[ $IMAGE_REPO == gcr.io* ]] ;
4741
then
4842
gcloud docker -- push $IMAGE_REPO/spark:$IMAGE_TAG && \
49-
gcloud docker -- push $FILE_SERVER_IMAGE
5043
else
5144
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push
52-
docker push $FILE_SERVER_IMAGE
5345
fi
5446
else
5547
# -m option for minikube.
5648
eval $(minikube docker-env)
57-
docker build -t $FILE_SERVER_IMAGE $FILE_SERVER_BUILD_DIR
5849
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG build
5950
fi

src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,6 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
4141
private var sparkHomeDir: Path = _
4242
private var kubernetesTestComponents: KubernetesTestComponents = _
4343
private var sparkAppConf: SparkAppConf = _
44-
private var remoteExamplesJarUri: URI = _
4544
private var image: String = _
4645
private var containerLocalSparkDistroExamplesJar: String = _
4746

@@ -74,11 +73,6 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
7473
.set("spark.kubernetes.driver.label.spark-app-locator", APP_LOCATOR_LABEL)
7574
.set("spark.kubernetes.executor.label.spark-app-locator", APP_LOCATOR_LABEL)
7675
kubernetesTestComponents.createNamespace()
77-
remoteExamplesJarUri = SparkExamplesFileServerRunner
78-
.launchServerAndGetUriForExamplesJar(
79-
kubernetesTestComponents,
80-
getTestImageTag,
81-
getTestImageRepo)
8276
}
8377

8478
after {
@@ -109,10 +103,6 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
109103
runSparkPiAndVerifyCompletion(appArgs = Array("5"))
110104
}
111105

112-
test("Run SparkPi using the remote example jar.") {
113-
runSparkPiAndVerifyCompletion(appResource = remoteExamplesJarUri.toString)
114-
}
115-
116106
test("Run SparkPi with custom driver pod name, labels, annotations, and environment variables.") {
117107
sparkAppConf
118108
.set("spark.kubernetes.driver.pod.name", "spark-integration-spark-pi")
@@ -177,8 +167,8 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
177167

178168
createTestSecret()
179169

180-
runSparkPiAndVerifyCompletion(
181-
appResource = remoteExamplesJarUri.toString,
170+
runSparkPageRankAndVerifyCompletion(
171+
appArgs = Array(CONTAINER_LOCAL_DOWNLOADED_PAGE_RANK_DATA_FILE),
182172
driverPodChecker = (driverPod: Pod) => {
183173
doBasicDriverPodCheck(driverPod)
184174
checkTestSecret(driverPod, withInitContainer = true)

src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SparkExamplesFileServerRunner.scala

Lines changed: 0 additions & 123 deletions
This file was deleted.

src/test/scala/org/apache/spark/deploy/k8s/integrationtest/Utils.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,6 @@ package org.apache.spark.deploy.k8s.integrationtest
1818

1919
import java.io.Closeable
2020
import java.net.URI
21-
import java.io.{IOException, InputStream, OutputStream}
22-
23-
import com.google.common.io.ByteStreams
2421

2522
object Utils extends Logging {
2623

0 commit comments

Comments
 (0)