Skip to content

Commit 3fc401d

Browse files
committed
Use hadoop patcher for hdfs-fixture too
1 parent c7ad022 commit 3fc401d

File tree

9 files changed

+45
-23
lines changed

9 files changed

+45
-23
lines changed

distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
import org.elasticsearch.core.Tuple;
3939
import org.elasticsearch.env.Environment;
4040
import org.elasticsearch.jdk.JarHell;
41+
import org.elasticsearch.jdk.RuntimeVersionFeature;
4142
import org.elasticsearch.plugin.scanner.ClassReaders;
4243
import org.elasticsearch.plugin.scanner.NamedComponentScanner;
4344
import org.elasticsearch.plugins.Platforms;
@@ -922,10 +923,12 @@ void jarHellCheck(PluginDescriptor candidateInfo, Path candidateDir, Path plugin
922923
*/
923924
private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List<Path> deleteOnFailure) throws Exception {
924925
final PluginDescriptor info = loadPluginInfo(tmpRoot);
925-
PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir());
926-
if (pluginPolicy != null) {
927-
Set<String> permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir());
928-
PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch);
926+
if (RuntimeVersionFeature.isSecurityManagerAvailable()) {
927+
PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir());
928+
if (pluginPolicy != null) {
929+
Set<String> permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir());
930+
PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch);
931+
}
929932
}
930933

931934
// Validate that the downloaded plugin's ID matches what we expect from the descriptor. The

plugins/repository-hdfs/hadoop-client-api/build.gradle

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,32 +2,26 @@ import org.gradle.api.file.ArchiveOperations
22

33
apply plugin: 'elasticsearch.java'
44

5-
sourceSets {
6-
patcher
7-
}
8-
95
configurations {
106
thejar {
117
canBeResolved = true
128
}
9+
patcher
1310
}
1411

1512
dependencies {
1613
thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") {
1714
transitive = false
1815
}
19-
20-
patcherImplementation 'org.ow2.asm:asm:9.7.1'
21-
patcherImplementation 'org.ow2.asm:asm-tree:9.7.1'
16+
patcher(project(':plugins:repository-hdfs:hadoop-common-patcher'))
2217
}
2318

2419
def outputDir = layout.buildDirectory.dir("patched-classes")
2520

2621
def patchTask = tasks.register("patchClasses", JavaExec) {
2722
inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE)
28-
inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE)
2923
outputs.dir(outputDir)
30-
classpath = sourceSets.patcher.runtimeClasspath
24+
classpath = configurations.patcher
3125
mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher'
3226
def thejar = configurations.thejar
3327
doFirst {
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import org.gradle.api.file.ArchiveOperations
2+
3+
apply plugin: 'elasticsearch.java'
4+
5+
dependencies {
6+
implementation 'org.ow2.asm:asm:9.7.1'
7+
implementation 'org.ow2.asm:asm-tree:9.7.1'
8+
}
9+

plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java renamed to plugins/repository-hdfs/hadoop-common-patcher/src/main/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -41,18 +41,17 @@ public static void main(String[] args) throws Exception {
4141
try (JarFile jarFile = new JarFile(new File(jarPath))) {
4242
for (var patcher : patchers.entrySet()) {
4343
JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey());
44-
if (jarEntry == null) {
45-
throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]");
46-
}
47-
byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes();
44+
if (jarEntry != null) {
45+
byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes();
4846

49-
ClassReader classReader = new ClassReader(classToPatch);
50-
ClassWriter classWriter = new ClassWriter(classReader, 0);
51-
classReader.accept(patcher.getValue().apply(classWriter), 0);
47+
ClassReader classReader = new ClassReader(classToPatch);
48+
ClassWriter classWriter = new ClassWriter(classReader, 0);
49+
classReader.accept(patcher.getValue().apply(classWriter), 0);
5250

53-
Path outputFile = outputDir.resolve(patcher.getKey());
54-
Files.createDirectories(outputFile.getParent());
55-
Files.write(outputFile, classWriter.toByteArray());
51+
Path outputFile = outputDir.resolve(patcher.getKey());
52+
Files.createDirectories(outputFile.getParent());
53+
Files.write(outputFile, classWriter.toByteArray());
54+
}
5655
}
5756
}
5857
}

plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java renamed to plugins/repository-hdfs/hadoop-common-patcher/src/main/java/org/elasticsearch/hdfs/patch/MethodReplacement.java

File renamed without changes.

plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java renamed to plugins/repository-hdfs/hadoop-common-patcher/src/main/java/org/elasticsearch/hdfs/patch/ShellPatcher.java

File renamed without changes.

plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java renamed to plugins/repository-hdfs/hadoop-common-patcher/src/main/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java

File renamed without changes.

plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/SubjectGetSubjectPatcher.java renamed to plugins/repository-hdfs/hadoop-common-patcher/src/main/java/org/elasticsearch/hdfs/patch/SubjectGetSubjectPatcher.java

File renamed without changes.

test/fixtures/hdfs-fixture/build.gradle

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
1616
configurations {
1717
hdfs2
1818
hdfs3
19+
patcher
1920
consumable("shadowedHdfs2")
2021
}
2122

@@ -85,6 +86,21 @@ dependencies {
8586
exclude group: "org.apache.geronimo.specs", module: "geronimo-jcache_1.0_spec"
8687
exclude group: "org.xerial.snappy", module: "snappy-java"
8788
}
89+
90+
patcher(project(':plugins:repository-hdfs:hadoop-common-patcher'))
91+
}
92+
93+
def outputDir = layout.buildDirectory.dir("patched-classes")
94+
95+
def patchTask = tasks.register("patchClasses", JavaExec) {
96+
inputs.files(configurations.hdfs2).withPathSensitivity(PathSensitivity.RELATIVE)
97+
outputs.dir(outputDir)
98+
classpath = configurations.patcher
99+
mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher'
100+
def jarToPatch = configurations.hdfs2.getFiles().find { f -> f.getName().endsWith("hadoop-common-2.8.5.jar")}
101+
doFirst {
102+
args(jarToPatch, outputDir.get().asFile)
103+
}
88104
}
89105

90106
tasks.named("shadowJar").configure {
@@ -104,6 +120,7 @@ def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) {
104120
}
105121
archiveClassifier.set("hdfs2")
106122
from sourceSets.main.output
123+
from patchTask
107124
configurations.add(project.configurations.hdfs2)
108125
}
109126

0 commit comments

Comments
 (0)