diff --git a/README.md b/README.md index c2f507d..b376a04 100644 --- a/README.md +++ b/README.md @@ -27,11 +27,7 @@ top-level dir. For more details, see the related section in [building-spark.md](https://github.com/apache/spark/blob/master/docs/building-spark.md#building-a-runnable-distribution) -The integration tests also need a local path to the directory that -contains `Dockerfile`s. In the main spark repo, the path is -`/spark/resource-managers/kubernetes/docker/src/main/dockerfiles`. - -Once you prepare the inputs, the integration tests can be executed with Maven or +Once you prepare the tarball, the integration tests can be executed with Maven or your IDE. Note that when running tests from an IDE, the `pre-integration-test` phase must be run every time the Spark main code changes. When running tests from the command line, the `pre-integration-test` phase should automatically be @@ -41,8 +37,7 @@ With Maven, the integration test can be run using the following command: ``` $ mvn clean integration-test \ - -Dspark-distro-tgz=spark/spark-2.3.0-SNAPSHOT-bin.tgz \ - -Dspark-dockerfiles-dir=spark/resource-managers/kubernetes/docker/src/main/dockerfiles + -Dspark-distro-tgz=spark/spark-2.3.0-SNAPSHOT-bin.tgz ``` # Running against an arbitrary cluster @@ -51,7 +46,6 @@ In order to run against any cluster, use the following: ```sh $ mvn clean integration-test \ -Dspark-distro-tgz=spark/spark-2.3.0-SNAPSHOT-bin.tgz \ - -Dspark-dockerfiles-dir=spark/resource-managers/kubernetes/docker/src/main/dockerfiles -DextraScalaTestArgs="-Dspark.kubernetes.test.master=k8s://https:// -Dspark.docker.test.driverImage= -Dspark.docker.test.executorImage=" ``` @@ -67,7 +61,6 @@ property `spark.docker.test.persistMinikube` to the test process: ``` $ mvn clean integration-test \ -Dspark-distro-tgz=spark/spark-2.3.0-SNAPSHOT-bin.tgz \ - -Dspark-dockerfiles-dir=spark/resource-managers/kubernetes/docker/src/main/dockerfiles -DextraScalaTestArgs=-Dspark.docker.test.persistMinikube=true ``` @@ -85,6 +78,5 @@ is an example: ``` $ mvn clean integration-test \ -Dspark-distro-tgz=spark/spark-2.3.0-SNAPSHOT-bin.tgz \ - -Dspark-dockerfiles-dir=spark/resource-managers/kubernetes/docker/src/main/dockerfiles "-DextraScalaTestArgs=-Dspark.docker.test.persistMinikube=true -Dspark.docker.test.skipBuildImages=true" ``` diff --git a/integration-test/pom.xml b/integration-test/pom.xml index bf9522c..bf48318 100644 --- a/integration-test/pom.xml +++ b/integration-test/pom.xml @@ -139,22 +139,6 @@ - - - copy-dockerfiles-if-missing - pre-integration-test - - exec - - - ${project.build.directory}/spark-distro - /bin/sh - - -c - test -d dockerfiles || cp -pr ${spark-dockerfiles-dir} dockerfiles - - - diff --git a/integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/SparkDockerImageBuilder.scala b/integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/SparkDockerImageBuilder.scala index 0ae0f3e..b3a359f 100644 --- a/integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/SparkDockerImageBuilder.scala +++ b/integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/SparkDockerImageBuilder.scala @@ -32,9 +32,10 @@ private[spark] class SparkDockerImageBuilder private val DOCKER_BUILD_PATH = SPARK_DISTRO_PATH // Dockerfile paths must be relative to the build path. - private val BASE_DOCKER_FILE = "dockerfiles/spark-base/Dockerfile" - private val DRIVER_DOCKER_FILE = "dockerfiles/driver/Dockerfile" - private val EXECUTOR_DOCKER_FILE = "dockerfiles/executor/Dockerfile" + private val DOCKERFILES_DIR = "kubernetes/dockerfiles/" + private val BASE_DOCKER_FILE = DOCKERFILES_DIR + "spark-base/Dockerfile" + private val DRIVER_DOCKER_FILE = DOCKERFILES_DIR + "driver/Dockerfile" + private val EXECUTOR_DOCKER_FILE = DOCKERFILES_DIR + "executor/Dockerfile" private val TIMEOUT = PatienceConfiguration.Timeout(Span(2, Minutes)) private val INTERVAL = PatienceConfiguration.Interval(Span(2, Seconds)) private val dockerHost = dockerEnv.getOrElse("DOCKER_HOST",