Skip to content

Commit c777d2c

Browse files
committed
resolve issues with minikube and some comment resolution
1 parent 21fc0d1 commit c777d2c

File tree

3 files changed

+16
-47
lines changed

3 files changed

+16
-47
lines changed

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/minikube/MinikubeTestBackend.scala

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,30 +21,32 @@ import java.util.UUID
2121
import io.fabric8.kubernetes.client.DefaultKubernetesClient
2222

2323
import org.apache.spark.deploy.k8s.integrationtest.backend.IntegrationTestBackend
24+
import org.apache.spark.deploy.k8s.integrationtest.config._
2425
import org.apache.spark.deploy.k8s.integrationtest.docker.KubernetesSuiteDockerManager
2526

2627
private[spark] object MinikubeTestBackend extends IntegrationTestBackend {
2728
private var defaultClient: DefaultKubernetesClient = _
28-
private val userSkipBuildImages =
29-
System.getProperty("spark.docker.test.skipBuildImages", "false").toBoolean
29+
private val userProvidedDockerImageTag = Option(
30+
System.getProperty(KUBERNETES_TEST_DOCKER_TAG_SYSTEM_PROPERTY))
3031
private val resolvedDockerImageTag =
31-
UUID.randomUUID().toString.replaceAll("-", "")
32+
userProvidedDockerImageTag.getOrElse(UUID.randomUUID().toString.replaceAll("-", ""))
3233
private val dockerManager = new KubernetesSuiteDockerManager(
3334
Minikube.getDockerEnv, resolvedDockerImageTag)
35+
3436
override def initialize(): Unit = {
3537
val minikubeStatus = Minikube.getMinikubeStatus
3638
require(minikubeStatus == MinikubeStatus.RUNNING,
3739
s"Minikube must be running before integration tests can execute. Current status" +
3840
s" is: $minikubeStatus")
39-
if (!userSkipBuildImages) {
41+
if (userProvidedDockerImageTag.isEmpty) {
4042
dockerManager.buildSparkDockerImages()
4143
}
4244
defaultClient = Minikube.getKubernetesClient
4345
}
4446

4547
override def cleanUp(): Unit = {
4648
super.cleanUp()
47-
if (!userSkipBuildImages) {
49+
if (userProvidedDockerImageTag.isEmpty) {
4850
dockerManager.deleteImages()
4951
}
5052
}

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/config.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
package org.apache.spark.deploy.k8s.integrationtest
1818

1919
package object config {
20+
val KUBERNETES_TEST_DOCKER_TAG_SYSTEM_PROPERTY = "spark.kubernetes.test.imageDockerTag"
2021
val DRIVER_DOCKER_IMAGE = "spark.kubernetes.driver.docker.image"
2122
val EXECUTOR_DOCKER_IMAGE = "spark.kubernetes.executor.docker.image"
2223
val INIT_CONTAINER_DOCKER_IMAGE = "spark.kubernetes.initcontainer.docker.image"

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/KubernetesSuiteDockerManager.scala

Lines changed: 8 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -30,27 +30,21 @@ import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
3030
import org.scalatest.time.{Minutes, Seconds, Span}
3131
import scala.collection.JavaConverters._
3232

33+
import org.apache.spark.deploy.k8s.integrationtest.constants._
3334
import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite
3435
import org.apache.spark.deploy.k8s.integrationtest.Logging
3536
import org.apache.spark.deploy.k8s.integrationtest.Utils.{RedirectThread, tryWithResource}
3637

3738
private[spark] class KubernetesSuiteDockerManager(
3839
dockerEnv: Map[String, String], dockerTag: String) extends Logging {
3940

40-
private val DOCKER_BUILD_PATH = Paths.get("target", "docker")
41+
private val DOCKER_BUILD_PATH = SPARK_DISTRO_PATH
4142
// Dockerfile paths must be relative to the build path.
42-
private val BASE_DOCKER_FILE = "dockerfiles/spark-base/Dockerfile"
43-
private val DRIVER_DOCKER_FILE = "dockerfiles/driver/Dockerfile"
44-
private val DRIVERPY_DOCKER_FILE = "dockerfiles/driver-py/Dockerfile"
45-
private val DRIVERR_DOCKER_FILE = "dockerfiles/driver-r/Dockerfile"
46-
private val EXECUTOR_DOCKER_FILE = "dockerfiles/executor/Dockerfile"
47-
private val EXECUTORPY_DOCKER_FILE = "dockerfiles/executor-py/Dockerfile"
48-
private val EXECUTORR_DOCKER_FILE = "dockerfiles/executor-r/Dockerfile"
49-
private val SHUFFLE_SERVICE_DOCKER_FILE = "dockerfiles/shuffle-service/Dockerfile"
50-
private val INIT_CONTAINER_DOCKER_FILE = "dockerfiles/init-container/Dockerfile"
51-
private val STAGING_SERVER_DOCKER_FILE = "dockerfiles/resource-staging-server/Dockerfile"
52-
private val STATIC_ASSET_SERVER_DOCKER_FILE =
53-
"dockerfiles/integration-test-asset-server/Dockerfile"
43+
private val DOCKERFILES_DIR = "kubernetes/dockerfiles/"
44+
private val BASE_DOCKER_FILE = DOCKERFILES_DIR + "spark-base/Dockerfile"
45+
private val DRIVER_DOCKER_FILE = DOCKERFILES_DIR + "driver/Dockerfile"
46+
private val EXECUTOR_DOCKER_FILE = DOCKERFILES_DIR + "executor/Dockerfile"
47+
private val INIT_CONTAINER_DOCKER_FILE = DOCKERFILES_DIR + "init-container/Dockerfile"
5448
private val TIMEOUT = PatienceConfiguration.Timeout(Span(2, Minutes))
5549
private val INTERVAL = PatienceConfiguration.Interval(Span(2, Seconds))
5650
private val dockerHost = dockerEnv.getOrElse("DOCKER_HOST",
@@ -76,46 +70,18 @@ private[spark] class KubernetesSuiteDockerManager(
7670

7771
def buildSparkDockerImages(): Unit = {
7872
Eventually.eventually(TIMEOUT, INTERVAL) { dockerClient.ping() }
79-
// Building Python distribution environment
80-
val pythonExec = sys.env.get("PYSPARK_DRIVER_PYTHON")
81-
.orElse(sys.env.get("PYSPARK_PYTHON"))
82-
.getOrElse("/usr/bin/python")
83-
val builder = new ProcessBuilder(
84-
Seq(pythonExec, "setup.py", "sdist").asJava)
85-
builder.directory(new File(DOCKER_BUILD_PATH.toFile, "python"))
86-
builder.redirectErrorStream(true) // Ugly but needed for stdout and stderr to synchronize
87-
val process = builder.start()
88-
new RedirectThread(process.getInputStream, System.out, "redirect output").start()
89-
val exitCode = process.waitFor()
90-
if (exitCode != 0) {
91-
logInfo(s"exitCode: $exitCode")
92-
}
9373
buildImage("spark-base", BASE_DOCKER_FILE)
9474
buildImage("spark-driver", DRIVER_DOCKER_FILE)
95-
buildImage("spark-driver-py", DRIVERPY_DOCKER_FILE)
96-
buildImage("spark-driver-r", DRIVERR_DOCKER_FILE)
9775
buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
98-
buildImage("spark-executor-py", EXECUTORPY_DOCKER_FILE)
99-
buildImage("spark-executor-r", EXECUTORR_DOCKER_FILE)
100-
buildImage("spark-shuffle", SHUFFLE_SERVICE_DOCKER_FILE)
101-
buildImage("spark-resource-staging-server", STAGING_SERVER_DOCKER_FILE)
10276
buildImage("spark-init", INIT_CONTAINER_DOCKER_FILE)
103-
buildImage("spark-integration-test-asset-server", STATIC_ASSET_SERVER_DOCKER_FILE)
10477
}
10578

10679
def deleteImages(): Unit = {
10780
removeRunningContainers()
81+
deleteImage("spark-base")
10882
deleteImage("spark-driver")
109-
deleteImage("spark-driver-py")
110-
deleteImage("spark-driver-r")
11183
deleteImage("spark-executor")
112-
deleteImage("spark-executor-py")
113-
deleteImage("spark-executor-r")
114-
deleteImage("spark-shuffle")
115-
deleteImage("spark-resource-staging-server")
11684
deleteImage("spark-init")
117-
deleteImage("spark-integration-test-asset-server")
118-
deleteImage("spark-base")
11985
}
12086

12187
private def buildImage(name: String, dockerFile: String): Unit = {

0 commit comments

Comments
 (0)