Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.elasticsearch.core.Tuple;
import org.elasticsearch.env.Environment;
import org.elasticsearch.jdk.JarHell;
import org.elasticsearch.jdk.RuntimeVersionFeature;
import org.elasticsearch.plugin.scanner.ClassReaders;
import org.elasticsearch.plugin.scanner.NamedComponentScanner;
import org.elasticsearch.plugins.Platforms;
Expand Down Expand Up @@ -922,10 +923,12 @@ void jarHellCheck(PluginDescriptor candidateInfo, Path candidateDir, Path plugin
*/
private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List<Path> deleteOnFailure) throws Exception {
final PluginDescriptor info = loadPluginInfo(tmpRoot);
PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir());
if (pluginPolicy != null) {
Set<String> permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir());
PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch);
if (RuntimeVersionFeature.isSecurityManagerAvailable()) {
PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir());
if (pluginPolicy != null) {
Set<String> permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir());
PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch);
}
}

// Validate that the downloaded plugin's ID matches what we expect from the descriptor. The
Expand Down
12 changes: 3 additions & 9 deletions plugins/repository-hdfs/hadoop-client-api/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,26 @@ import org.gradle.api.file.ArchiveOperations

apply plugin: 'elasticsearch.java'

sourceSets {
patcher
}

configurations {
thejar {
canBeResolved = true
}
patcher
}

dependencies {
thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") {
transitive = false
}

patcherImplementation 'org.ow2.asm:asm:9.7.1'
patcherImplementation 'org.ow2.asm:asm-tree:9.7.1'
patcher(project(':plugins:repository-hdfs:hadoop-common-patcher'))
}

def outputDir = layout.buildDirectory.dir("patched-classes")

def patchTask = tasks.register("patchClasses", JavaExec) {
inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE)
inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE)
outputs.dir(outputDir)
classpath = sourceSets.patcher.runtimeClasspath
classpath = configurations.patcher
mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher'
def thejar = configurations.thejar
doFirst {
Expand Down
9 changes: 9 additions & 0 deletions plugins/repository-hdfs/hadoop-common-patcher/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import org.gradle.api.file.ArchiveOperations

apply plugin: 'elasticsearch.java'

dependencies {
implementation 'org.ow2.asm:asm:9.7.1'
implementation 'org.ow2.asm:asm-tree:9.7.1'
}

Original file line number Diff line number Diff line change
Expand Up @@ -41,18 +41,17 @@ public static void main(String[] args) throws Exception {
try (JarFile jarFile = new JarFile(new File(jarPath))) {
for (var patcher : patchers.entrySet()) {
JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey());
if (jarEntry == null) {
throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]");
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This had to go, as some classes are not present in both jars; it's unfortunate, but I think acceptable. If it's not, should we pass the classes to patch via the command line, so it is configurable in each gradle JavaExec task?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should pass the files to patch down, it should be part of the configuration of the task. we can keep how to actually patch the classes here, but the list of classes to patch be passed in.

However, why would the same class be present in two different jars? That's jarhell, which we should be rejecting already?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I should have said both projects; the hdsf-fixture projects needs a subset of the patches, as it pulls in a different (smaller) set of dependencies.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm still missing something. Why do dependencies matter? We should only be patching a given class once.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The classes we need to patch are in the 2 project's dependencies: for hadoop-client-api, they are in both hadoop-common and hadoop-auth; hdfs-fixture however depends only on hadoop-common (via hadoop-minicluster), so trying to patch classes in hadoop-auth result in an error (the class is not there).
But like you said, this can be solved by making the patcher get a list of the class names.

}
byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes();
if (jarEntry != null) {
byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes();

ClassReader classReader = new ClassReader(classToPatch);
ClassWriter classWriter = new ClassWriter(classReader, 0);
classReader.accept(patcher.getValue().apply(classWriter), 0);
ClassReader classReader = new ClassReader(classToPatch);
ClassWriter classWriter = new ClassWriter(classReader, 0);
classReader.accept(patcher.getValue().apply(classWriter), 0);

Path outputFile = outputDir.resolve(patcher.getKey());
Files.createDirectories(outputFile.getParent());
Files.write(outputFile, classWriter.toByteArray());
Path outputFile = outputDir.resolve(patcher.getKey());
Files.createDirectories(outputFile.getParent());
Files.write(outputFile, classWriter.toByteArray());
}
}
}
}
Expand Down
17 changes: 17 additions & 0 deletions test/fixtures/hdfs-fixture/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
configurations {
hdfs2
hdfs3
patcher
consumable("shadowedHdfs2")
}

Expand Down Expand Up @@ -85,6 +86,21 @@ dependencies {
exclude group: "org.apache.geronimo.specs", module: "geronimo-jcache_1.0_spec"
exclude group: "org.xerial.snappy", module: "snappy-java"
}

patcher(project(':plugins:repository-hdfs:hadoop-common-patcher'))
}

def outputDir = layout.buildDirectory.dir("patched-classes")

def patchTask = tasks.register("patchClasses", JavaExec) {
inputs.files(configurations.hdfs2).withPathSensitivity(PathSensitivity.RELATIVE)
outputs.dir(outputDir)
classpath = configurations.patcher
mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher'
def jarToPatch = configurations.hdfs2.getFiles().find { f -> f.getName().endsWith("hadoop-common-2.8.5.jar")}
doFirst {
args(jarToPatch, outputDir.get().asFile)
}
}

tasks.named("shadowJar").configure {
Expand All @@ -104,6 +120,7 @@ def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) {
}
archiveClassifier.set("hdfs2")
from sourceSets.main.output
from patchTask
configurations.add(project.configurations.hdfs2)
}

Expand Down