Skip to content

Commit f86cdc5

Browse files
mccheahRobert Kruszewski
authored andcommitted
Use a custom task to deploy the files from the class loader. (apache-spark-on-k8s#440)
It's impossible to use zipTree or fileTree to deploy files out of the resources folder. To make matters cleaner, build the resources into a zip file in the jar, then use a simple stream copy to write the zip file to disk. Then use a sync task to actually unpack the zip file. The result is avoiding any need to do any zip-specific magic to extract the files.
1 parent b9bf045 commit f86cdc5

File tree

6 files changed

+57
-12
lines changed

6 files changed

+57
-12
lines changed

gradle/publish.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ bintray {
3636
name = rootProject.name
3737
userOrg = 'palantir'
3838
licenses = ['Apache-2.0']
39-
publications = ['nebula']
39+
publications = ['nebula', 'dockerBundle']
4040
}
4141
}
4242

settings.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,3 +19,4 @@
1919
rootProject.name = 'spark'
2020

2121
include ':spark-docker-image-generator'
22+
enableFeaturePreview('STABLE_PUBLISHING')

spark-docker-image-generator/build.gradle

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ test {
4141
maxHeapSize = "512m"
4242
}
4343

44-
task prepareDockerBundleDir(type: Sync) {
44+
task prepareDockerBundleDir(type: Zip) {
4545
from("${rootDir}/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile") {
4646
into 'kubernetes/dockerfiles/spark'
4747
rename 'Dockerfile', 'Dockerfile.original'
@@ -57,9 +57,33 @@ task prepareDockerBundleDir(type: Sync) {
5757
from(fileTree("${rootDir}/sbin")) {
5858
into 'sbin'
5959
}
60-
into file("src/main/resources/docker-resources")
60+
destinationDir file("${buildDir}/docker-resources")
61+
archiveName "docker-resources.zip"
6162
includeEmptyDirs = false
6263
}
6364

6465
tasks.compileJava.dependsOn tasks.prepareDockerBundleDir
6566
tasks.idea.dependsOn tasks.prepareDockerBundleDir
67+
68+
publishing {
69+
publications {
70+
dockerBundle(MavenPublication) {
71+
artifactId 'spark-docker-resources'
72+
artifact tasks.prepareDockerBundleDir
73+
}
74+
}
75+
76+
repositories {
77+
maven {
78+
name = "mavenTest"
79+
url = "${buildDir}/mavenTest"
80+
}
81+
}
82+
}
83+
84+
test {
85+
systemProperty 'docker-resources-zip-path', "${buildDir}/docker-resources/docker-resources.zip"
86+
}
87+
88+
tasks.test.dependsOn tasks.publishDockerBundlePublicationToMavenTestRepository
89+
tasks.test.environment 'MAVEN_REPO', "${buildDir.getAbsolutePath()}/mavenTest"

spark-docker-image-generator/plugin-test-project/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ plugins {
2929
repositories {
3030
jcenter()
3131
maven { url "http://palantir.bintray.com/releases" }
32+
maven { url System.env.MAVEN_REPO }
3233
}
3334

3435
apply plugin: 'java'

spark-docker-image-generator/src/main/java/org/apache/spark/deploy/kubernetes/docker/gradle/SparkDockerPlugin.java

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,11 @@
1717
package org.apache.spark.deploy.kubernetes.docker.gradle;
1818

1919
import java.io.File;
20-
import java.net.URL;
2120
import java.nio.file.Paths;
2221
import java.util.ArrayList;
2322
import java.util.Collections;
2423
import java.util.List;
24+
import java.util.Optional;
2525
import java.util.Set;
2626
import java.util.stream.Collectors;
2727

@@ -77,10 +77,13 @@ public void apply(Project project) {
7777
sparkAppJar)
7878
.into(jarsDirProvider));
7979
copySparkAppLibTask.dependsOn(jarTask);
80-
URL dockerResourcesUrl = getClass().getResource("/docker-resources");
80+
String version = Optional.ofNullable(getClass().getPackage().getImplementationVersion())
81+
.orElse("latest.release");
82+
Configuration dockerResourcesConf = project.getConfigurations().detachedConfiguration(
83+
project.getDependencies().create("org.apache.spark:spark-docker-resources:" + version));
8184
Sync deployScriptsTask = project.getTasks().create(
8285
"sparkDockerDeployScripts", Sync.class, task -> {
83-
task.from(project.fileTree(dockerResourcesUrl));
86+
task.from(project.zipTree(dockerResourcesConf.getSingleFile()));
8487
task.setIncludeEmptyDirs(false);
8588
task.into(dockerBuildDirectory);
8689
});

spark-docker-image-generator/src/test/java/org/apache/spark/deploy/kubernetes/docker/gradle/GenerateDockerFileTaskSuite.java

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
import java.io.BufferedReader;
2020
import java.io.File;
21+
import java.io.FileInputStream;
2122
import java.io.FileOutputStream;
2223
import java.io.IOException;
2324
import java.io.InputStream;
@@ -26,6 +27,8 @@
2627
import java.nio.file.Files;
2728
import java.util.List;
2829
import java.util.stream.Collectors;
30+
import java.util.zip.ZipEntry;
31+
import java.util.zip.ZipInputStream;
2932

3033
import org.apache.commons.io.IOUtils;
3134
import org.assertj.core.api.Assertions;
@@ -37,7 +40,6 @@
3740
import org.mockito.Mockito;
3841

3942
public final class GenerateDockerFileTaskSuite {
40-
4143
@Rule
4244
public TemporaryFolder tempFolder = new TemporaryFolder();
4345

@@ -49,10 +51,24 @@ public void before() throws IOException {
4951
File dockerFileDir = tempFolder.newFolder("docker");
5052
destDockerFile = new File(dockerFileDir, "Dockerfile");
5153
srcDockerFile = tempFolder.newFile("Dockerfile.original");
52-
try (InputStream originalDockerFileResource = getClass().getResourceAsStream(
53-
"/docker-resources/kubernetes/dockerfiles/spark/Dockerfile.original");
54-
FileOutputStream srcDockerFileStream = new FileOutputStream(srcDockerFile)) {
55-
IOUtils.copy(originalDockerFileResource, srcDockerFileStream);
54+
55+
try (InputStream originalDockerBundleZipped = new FileInputStream(
56+
new File(System.getProperty("docker-resources-zip-path")));
57+
ZipInputStream unzipped = new ZipInputStream(originalDockerBundleZipped);
58+
FileOutputStream srcDockerFileStream = new FileOutputStream(srcDockerFile)) {
59+
ZipEntry currentEntry = unzipped.getNextEntry();
60+
boolean foundDockerFile = false;
61+
while (currentEntry != null && !foundDockerFile) {
62+
if (currentEntry.getName().equals("kubernetes/dockerfiles/spark/Dockerfile.original")) {
63+
IOUtils.copy(unzipped, srcDockerFileStream);
64+
foundDockerFile = true;
65+
} else {
66+
currentEntry = unzipped.getNextEntry();
67+
}
68+
}
69+
if (!foundDockerFile) {
70+
throw new IllegalStateException("Dockerfile not found.");
71+
}
5672
}
5773
}
5874

@@ -73,7 +89,7 @@ public void testGenerateDockerFile() throws IOException {
7389
getClass().getResourceAsStream("/ExpectedDockerfile");
7490
InputStreamReader expectedDockerFileReader =
7591
new InputStreamReader(expectedDockerFileInput, StandardCharsets.UTF_8);
76-
BufferedReader expectedDockerFileBuffered =
92+
BufferedReader expectedDockerFileBuffered =
7793
new BufferedReader(expectedDockerFileReader)) {
7894
List<String> expectedFileLines = expectedDockerFileBuffered
7995
.lines()

0 commit comments

Comments
 (0)