@@ -30,27 +30,21 @@ import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
30
30
import org .scalatest .time .{Minutes , Seconds , Span }
31
31
import scala .collection .JavaConverters ._
32
32
33
+ import org .apache .spark .deploy .k8s .integrationtest .constants ._
33
34
import org .apache .spark .deploy .k8s .integrationtest .KubernetesSuite
34
35
import org .apache .spark .deploy .k8s .integrationtest .Logging
35
36
import org .apache .spark .deploy .k8s .integrationtest .Utils .{RedirectThread , tryWithResource }
36
37
37
38
private [spark] class KubernetesSuiteDockerManager (
38
39
dockerEnv : Map [String , String ], dockerTag : String ) extends Logging {
39
40
40
- private val DOCKER_BUILD_PATH = Paths .get( " target " , " docker " )
41
+ private val DOCKER_BUILD_PATH = SPARK_DISTRO_PATH
41
42
// Dockerfile paths must be relative to the build path.
42
- private val BASE_DOCKER_FILE = " dockerfiles/spark-base/Dockerfile"
43
- private val DRIVER_DOCKER_FILE = " dockerfiles/driver/Dockerfile"
44
- private val DRIVERPY_DOCKER_FILE = " dockerfiles/driver-py/Dockerfile"
45
- private val DRIVERR_DOCKER_FILE = " dockerfiles/driver-r/Dockerfile"
46
- private val EXECUTOR_DOCKER_FILE = " dockerfiles/executor/Dockerfile"
47
- private val EXECUTORPY_DOCKER_FILE = " dockerfiles/executor-py/Dockerfile"
48
- private val EXECUTORR_DOCKER_FILE = " dockerfiles/executor-r/Dockerfile"
49
- private val SHUFFLE_SERVICE_DOCKER_FILE = " dockerfiles/shuffle-service/Dockerfile"
50
- private val INIT_CONTAINER_DOCKER_FILE = " dockerfiles/init-container/Dockerfile"
51
- private val STAGING_SERVER_DOCKER_FILE = " dockerfiles/resource-staging-server/Dockerfile"
52
- private val STATIC_ASSET_SERVER_DOCKER_FILE =
53
- " dockerfiles/integration-test-asset-server/Dockerfile"
43
+ private val DOCKERFILES_DIR = " kubernetes/dockerfiles/"
44
+ private val BASE_DOCKER_FILE = DOCKERFILES_DIR + " spark-base/Dockerfile"
45
+ private val DRIVER_DOCKER_FILE = DOCKERFILES_DIR + " driver/Dockerfile"
46
+ private val EXECUTOR_DOCKER_FILE = DOCKERFILES_DIR + " executor/Dockerfile"
47
+ private val INIT_CONTAINER_DOCKER_FILE = DOCKERFILES_DIR + " init-container/Dockerfile"
54
48
private val TIMEOUT = PatienceConfiguration .Timeout (Span (2 , Minutes ))
55
49
private val INTERVAL = PatienceConfiguration .Interval (Span (2 , Seconds ))
56
50
private val dockerHost = dockerEnv.getOrElse(" DOCKER_HOST" ,
@@ -76,46 +70,18 @@ private[spark] class KubernetesSuiteDockerManager(
76
70
77
71
def buildSparkDockerImages (): Unit = {
78
72
Eventually .eventually(TIMEOUT , INTERVAL ) { dockerClient.ping() }
79
- // Building Python distribution environment
80
- val pythonExec = sys.env.get(" PYSPARK_DRIVER_PYTHON" )
81
- .orElse(sys.env.get(" PYSPARK_PYTHON" ))
82
- .getOrElse(" /usr/bin/python" )
83
- val builder = new ProcessBuilder (
84
- Seq (pythonExec, " setup.py" , " sdist" ).asJava)
85
- builder.directory(new File (DOCKER_BUILD_PATH .toFile, " python" ))
86
- builder.redirectErrorStream(true ) // Ugly but needed for stdout and stderr to synchronize
87
- val process = builder.start()
88
- new RedirectThread (process.getInputStream, System .out, " redirect output" ).start()
89
- val exitCode = process.waitFor()
90
- if (exitCode != 0 ) {
91
- logInfo(s " exitCode: $exitCode" )
92
- }
93
73
buildImage(" spark-base" , BASE_DOCKER_FILE )
94
74
buildImage(" spark-driver" , DRIVER_DOCKER_FILE )
95
- buildImage(" spark-driver-py" , DRIVERPY_DOCKER_FILE )
96
- buildImage(" spark-driver-r" , DRIVERR_DOCKER_FILE )
97
75
buildImage(" spark-executor" , EXECUTOR_DOCKER_FILE )
98
- buildImage(" spark-executor-py" , EXECUTORPY_DOCKER_FILE )
99
- buildImage(" spark-executor-r" , EXECUTORR_DOCKER_FILE )
100
- buildImage(" spark-shuffle" , SHUFFLE_SERVICE_DOCKER_FILE )
101
- buildImage(" spark-resource-staging-server" , STAGING_SERVER_DOCKER_FILE )
102
76
buildImage(" spark-init" , INIT_CONTAINER_DOCKER_FILE )
103
- buildImage(" spark-integration-test-asset-server" , STATIC_ASSET_SERVER_DOCKER_FILE )
104
77
}
105
78
106
79
def deleteImages (): Unit = {
107
80
removeRunningContainers()
81
+ deleteImage(" spark-base" )
108
82
deleteImage(" spark-driver" )
109
- deleteImage(" spark-driver-py" )
110
- deleteImage(" spark-driver-r" )
111
83
deleteImage(" spark-executor" )
112
- deleteImage(" spark-executor-py" )
113
- deleteImage(" spark-executor-r" )
114
- deleteImage(" spark-shuffle" )
115
- deleteImage(" spark-resource-staging-server" )
116
84
deleteImage(" spark-init" )
117
- deleteImage(" spark-integration-test-asset-server" )
118
- deleteImage(" spark-base" )
119
85
}
120
86
121
87
private def buildImage (name : String , dockerFile : String ): Unit = {
0 commit comments