Skip to content

Commit 980bc79

Browse files
committed
Merge branch 'main' into feat/extended-request
2 parents 077d788 + 8fb154d commit 980bc79

File tree

11 files changed

+364
-303
lines changed

11 files changed

+364
-303
lines changed

.github/workflows/maven.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@ jobs:
1919

2020
steps:
2121
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
22-
- name: Set up JDK 11
22+
- name: Set up JDK 17
2323
uses: actions/setup-java@99b8673ff64fbf99d8d325f52d9a5bdedb8483e9 # v4.2.1
2424
with:
25-
java-version: '11'
25+
java-version: '17'
2626
distribution: 'temurin'
2727
cache: maven
2828
- name: Build with Maven

CHANGELOG.md

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,21 @@ All notable changes to this project will be documented in this file.
77
### Changed
88

99
- BREAKING: Only send a subset of the fields sufficient for most use-cases to OPA for performance reasons.
10-
The old behavior of sending all fields can be restored by setting `hadoop.security.authorization.opa.extended-requests` to `true` ([#49]).
11-
- Bump `okio` to 1.17.6 and to 3.9.1 afterwards to get rid of CVE-2023-3635 ([#46], [#49]).
10+
The old behavior of sending all fields can be restored by setting `hadoop.security.authorization.opa.extended-requests`
11+
to `true` ([#49]).
12+
- Performance fixes ([#50])
13+
- Updates various dependencies and do a full spotless run. This will now require JDK 17 or later to build
14+
(required by later error-prone versions), the build target is still Java 11 [#51]
15+
- Bump okio to 1.17.6 to get rid of CVE-2023-3635 ([#46])
1216

1317
### Fixed
1418

1519
- Set path to `/` when the operation `contentSummary` is called on `/`. Previously path was set to `null` ([#49]).
1620

1721
[#46]: https://github.com/stackabletech/hdfs-utils/pull/46
1822
[#49]: https://github.com/stackabletech/hdfs-utils/pull/49
23+
[#50]: https://github.com/stackabletech/hdfs-utils/pull/50
24+
[#51]: https://github.com/stackabletech/hdfs-utils/pull/51
1925

2026
## [0.3.0] - 2024-07-04
2127

@@ -26,3 +32,4 @@ All notable changes to this project will be documented in this file.
2632

2733
[#28]: https://github.com/stackabletech/hdfs-utils/pull/28
2834
[#29]: https://github.com/stackabletech/hdfs-utils/pull/29
35+

pom.xml

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -35,23 +35,29 @@
3535
<maven.compiler.release>${java.version}</maven.compiler.release>
3636
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
3737

38-
<!-- Tip: Use "mvn versions:display-dependency-updates" to check for updates -->
39-
<cleanthat.version>2.17</cleanthat.version>
40-
<error-prone.version>2.28.0</error-prone.version>
41-
<google-java-format.version>1.19.2</google-java-format.version>
38+
<!-- Tip:
39+
Use "mvn versions:display-dependency-updates" and
40+
"mvn versions:display-plugin-updates"
41+
to check for updates -->
42+
<cleanthat.version>2.22</cleanthat.version>
43+
<error-prone.version>2.35.1</error-prone.version>
44+
<google-java-format.version>1.24.0</google-java-format.version>
4245

46+
<cyclonedx-maven-plugin.version>2.9.0</cyclonedx-maven-plugin.version>
4347
<maven-clean-plugin.version>3.4.0</maven-clean-plugin.version>
4448
<maven-compiler-plugin.version>3.13.0</maven-compiler-plugin.version>
45-
<maven-deploy-plugin.version>3.1.2</maven-deploy-plugin.version>
49+
<maven-deploy-plugin.version>3.1.3</maven-deploy-plugin.version>
4650
<maven-enforcer-plugin.version>3.5.0</maven-enforcer-plugin.version>
47-
<maven-install-plugin.version>3.1.2</maven-install-plugin.version>
51+
<maven-install-plugin.version>3.1.3</maven-install-plugin.version>
4852
<maven-jar-plugin.version>3.4.2</maven-jar-plugin.version>
4953
<maven-resources-plugin.version>3.3.1</maven-resources-plugin.version>
54+
<maven-shade-plugin.version>3.6.0</maven-shade-plugin.version>
5055
<maven-site-plugin.version>3.12.1</maven-site-plugin.version>
51-
<maven-surefire-plugin.version>3.3.1</maven-surefire-plugin.version>
56+
<maven-surefire-plugin.version>3.5.2</maven-surefire-plugin.version>
5257
<spotless-maven-plugin.version>2.43.0</spotless-maven-plugin.version>
53-
<kubernetes-client.version>6.13.4</kubernetes-client.version>
54-
<okio.version>3.9.1</okio.version>
58+
59+
<kubernetes-client.version>6.13.1</kubernetes-client.version>
60+
<okio.version>1.17.6</okio.version>
5561
</properties>
5662

5763
<dependencies>
@@ -129,6 +135,7 @@
129135
<configuration>
130136
<compilerArgs>
131137
<arg>-XDcompilePolicy=simple</arg>
138+
<arg>--should-stop=ifError=FLOW</arg>
132139
<arg>-Xplugin:ErrorProne</arg>
133140
</compilerArgs>
134141
<annotationProcessorPaths>
@@ -159,7 +166,7 @@
159166
<version>${java.version}</version>
160167
</requireJavaVersion>
161168
<requireMavenVersion>
162-
<version>3.3.9</version>
169+
<version>3.6.3</version>
163170
</requireMavenVersion>
164171
<requirePluginVersions/>
165172
</rules>
@@ -190,7 +197,7 @@
190197
<plugin>
191198
<groupId>org.apache.maven.plugins</groupId>
192199
<artifactId>maven-shade-plugin</artifactId>
193-
<version>3.6.0</version>
200+
<version>${maven-shade-plugin.version}</version>
194201
<executions>
195202
<execution>
196203
<goals>
@@ -252,14 +259,14 @@
252259
<goals>
253260
<goal>apply</goal>
254261
</goals>
255-
<phase>verify</phase>
262+
<phase>compile</phase>
256263
</execution>
257264
</executions>
258265
</plugin>
259266
<plugin>
260267
<groupId>org.cyclonedx</groupId>
261268
<artifactId>cyclonedx-maven-plugin</artifactId>
262-
<version>2.8.0</version>
269+
<version>${cyclonedx-maven-plugin.version}</version>
263270
<configuration>
264271
<projectType>application</projectType>
265272
<schemaVersion>1.5</schemaVersion>

src/main/java/tech/stackable/hadoop/HadoopConfigSingleton.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@
33
import org.apache.hadoop.conf.Configuration;
44

55
public enum HadoopConfigSingleton {
6-
INSTANCE;
7-
private final Configuration configuration = new Configuration();
6+
INSTANCE;
7+
private final Configuration configuration = new Configuration();
88

9-
public Configuration getConfiguration() {
10-
return this.configuration;
11-
}
9+
public Configuration getConfiguration() {
10+
return this.configuration;
11+
}
1212
}
Lines changed: 34 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
package tech.stackable.hadoop;
22

3+
import org.apache.hadoop.fs.permission.FsAction;
4+
import org.apache.hadoop.hdfs.server.namenode.INode;
35
import org.apache.hadoop.hdfs.server.namenode.INodeAttributeProvider;
6+
import org.apache.hadoop.hdfs.server.namenode.INodeAttributes;
7+
import org.apache.hadoop.ipc.CallerContext;
48
import org.apache.hadoop.security.UserGroupInformation;
59

610
public class OpaAllowQuery {
@@ -11,47 +15,47 @@ public OpaAllowQuery(OpaAllowQueryInput input) {
1115
}
1216

1317
/**
14-
* Wrapper around {@link INodeAttributeProvider.AuthorizationContext}, which uses our custom wrapper around
15-
* {@link UserGroupInformation}, {@link OpaQueryUgi}.
18+
* Wrapper around {@link INodeAttributeProvider.AuthorizationContext}, which uses our custom
19+
* wrapper around {@link UserGroupInformation}, {@link OpaQueryUgi}.
1620
*/
1721
public static class OpaAllowQueryInput {
18-
public java.lang.String fsOwner;
19-
public java.lang.String supergroup;
22+
public String fsOwner;
23+
public String supergroup;
2024
// Wrapping this
2125
public OpaQueryUgi callerUgi;
22-
public org.apache.hadoop.hdfs.server.namenode.INodeAttributes[] inodeAttrs;
23-
public org.apache.hadoop.hdfs.server.namenode.INode[] inodes;
26+
public INodeAttributes[] inodeAttrs;
27+
public INode[] inodes;
2428
public byte[][] pathByNameArr;
2529
public int snapshotId;
26-
public java.lang.String path;
30+
public String path;
2731
public int ancestorIndex;
2832
public boolean doCheckOwner;
29-
public org.apache.hadoop.fs.permission.FsAction ancestorAccess;
30-
public org.apache.hadoop.fs.permission.FsAction parentAccess;
31-
public org.apache.hadoop.fs.permission.FsAction access;
32-
public org.apache.hadoop.fs.permission.FsAction subAccess;
33+
public FsAction ancestorAccess;
34+
public FsAction parentAccess;
35+
public FsAction access;
36+
public FsAction subAccess;
3337
public boolean ignoreEmptyDir;
34-
public java.lang.String operationName;
35-
public org.apache.hadoop.ipc.CallerContext callerContext;
38+
public String operationName;
39+
public CallerContext callerContext;
3640

3741
public OpaAllowQueryInput(INodeAttributeProvider.AuthorizationContext context) {
38-
this.fsOwner = context.getFsOwner();
39-
this.supergroup = context.getSupergroup();
40-
this.callerUgi = new OpaQueryUgi(context.getCallerUgi());
41-
this.inodeAttrs = context.getInodeAttrs();
42-
this.inodes = context.getInodes();
43-
this.pathByNameArr = context.getPathByNameArr();
44-
this.snapshotId = context.getSnapshotId();
45-
this.path = context.getPath();
46-
this.ancestorIndex = context.getAncestorIndex();
47-
this.doCheckOwner = context.isDoCheckOwner();
48-
this.ancestorAccess = context.getAncestorAccess();
49-
this.parentAccess = context.getParentAccess();
50-
this.access = context.getAccess();
51-
this.subAccess = context.getSubAccess();
52-
this.ignoreEmptyDir = context.isIgnoreEmptyDir();
53-
this.operationName = context.getOperationName();
54-
this.callerContext = context.getCallerContext();
42+
fsOwner = context.getFsOwner();
43+
supergroup = context.getSupergroup();
44+
callerUgi = new OpaQueryUgi(context.getCallerUgi());
45+
inodeAttrs = context.getInodeAttrs();
46+
inodes = context.getInodes();
47+
pathByNameArr = context.getPathByNameArr();
48+
snapshotId = context.getSnapshotId();
49+
path = context.getPath();
50+
ancestorIndex = context.getAncestorIndex();
51+
doCheckOwner = context.isDoCheckOwner();
52+
ancestorAccess = context.getAncestorAccess();
53+
parentAccess = context.getParentAccess();
54+
access = context.getAccess();
55+
subAccess = context.getSubAccess();
56+
ignoreEmptyDir = context.isIgnoreEmptyDir();
57+
operationName = context.getOperationName();
58+
callerContext = context.getCallerContext();
5559
}
5660
}
5761
}

src/main/java/tech/stackable/hadoop/OpaException.java

Lines changed: 8 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
package tech.stackable.hadoop;
22

3-
import static tech.stackable.hadoop.StackableAccessControlEnforcer.EXTENDED_REQUESTS_PROP;
43
import static tech.stackable.hadoop.StackableGroupMapper.OPA_MAPPING_URL_PROP;
54

6-
import java.net.URI;
75
import java.net.http.HttpResponse;
86

97
public abstract class OpaException extends RuntimeException {
@@ -15,32 +13,21 @@ protected OpaException(String message, Throwable cause) {
1513
public static final class UriMissing extends OpaException {
1614
public UriMissing(String configuration) {
1715
super(
18-
"No Open Policy Agent URI provided (must be set in the configuration \""
19-
+ configuration
20-
+ "\")",
16+
"No Open Policy Agent URI provided (must be set in the configuration \""
17+
+ configuration
18+
+ "\")",
2119
null);
2220
}
2321
}
2422

2523
public static final class UriInvalid extends OpaException {
2624
public UriInvalid(String uri, Throwable cause) {
2725
super(
28-
"Open Policy Agent URI is invalid (see configuration property \""
29-
+ OPA_MAPPING_URL_PROP
30-
+ "\"): "
31-
+ uri,
32-
cause);
33-
}
34-
}
35-
36-
public static final class ExtendedRequestsConfigNotABoolean extends OpaException {
37-
public ExtendedRequestsConfigNotABoolean(String extendedRequests, Throwable cause) {
38-
super(
39-
"The extended-requests property is not a boolean (see configuration property \""
40-
+ EXTENDED_REQUESTS_PROP
41-
+ "\"): "
42-
+ extendedRequests,
43-
cause);
26+
"Open Policy Agent URI is invalid (see configuration property \""
27+
+ OPA_MAPPING_URL_PROP
28+
+ "\"): "
29+
+ uri,
30+
cause);
4431
}
4532
}
4633

Lines changed: 33 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,44 @@
11
package tech.stackable.hadoop;
22

3-
import org.apache.hadoop.security.UserGroupInformation;
4-
53
import java.io.IOException;
64
import java.util.List;
5+
import org.apache.hadoop.security.UserGroupInformation;
76

87
public class OpaQueryUgi {
9-
// Wrapping this
10-
public OpaQueryUgi realUser;
11-
public String userName;
12-
public String shortUserName;
8+
// Wrapping this
9+
public OpaQueryUgi realUser;
10+
public String userName;
11+
public String shortUserName;
1312

14-
public String primaryGroup;
15-
public List<String> groups;
13+
public String primaryGroup;
14+
public List<String> groups;
1615

17-
public UserGroupInformation.AuthenticationMethod authenticationMethod;
18-
public UserGroupInformation.AuthenticationMethod realAuthenticationMethod;
16+
public UserGroupInformation.AuthenticationMethod authenticationMethod;
17+
public UserGroupInformation.AuthenticationMethod realAuthenticationMethod;
1918

20-
/**
21-
* Wrapper around {@link UserGroupInformation}, which does not throw random errors during serialization when no primary
22-
* group is known for the user.
23-
* "Caused by: com.fasterxml.jackson.databind.JsonMappingException: Unexpected IOException (of type java.io.IOException): There is no primary group for UGI hive/[email protected] (auth:KERBEROS)"
24-
*/
25-
public OpaQueryUgi(UserGroupInformation ugi) {
26-
UserGroupInformation realUser = ugi.getRealUser();
27-
if (realUser != null) {
28-
this.realUser = new OpaQueryUgi(ugi.getRealUser());
29-
} else {
30-
this.realUser = null;
31-
}
32-
this.userName = ugi.getUserName();
33-
this.shortUserName = ugi.getShortUserName();
34-
try {
35-
this.primaryGroup = ugi.getPrimaryGroupName();
36-
} catch (IOException e) {
37-
this.primaryGroup = null;
38-
}
39-
this.groups = ugi.getGroups();
40-
this.authenticationMethod = ugi.getAuthenticationMethod();
41-
this.realAuthenticationMethod = ugi.getRealAuthenticationMethod();
19+
/**
20+
* Wrapper around {@link UserGroupInformation}, which does not throw random errors during
21+
* serialization when no primary group is known for the user. "Caused by:
22+
* com.fasterxml.jackson.databind.JsonMappingException: Unexpected IOException (of type
23+
* java.io.IOException): There is no primary group for UGI
24+
* hive/[email protected] (auth:KERBEROS)"
25+
*/
26+
public OpaQueryUgi(UserGroupInformation ugi) {
27+
UserGroupInformation realUser = ugi.getRealUser();
28+
if (realUser != null) {
29+
this.realUser = new OpaQueryUgi(ugi.getRealUser());
30+
} else {
31+
this.realUser = null;
32+
}
33+
userName = ugi.getUserName();
34+
shortUserName = ugi.getShortUserName();
35+
try {
36+
primaryGroup = ugi.getPrimaryGroupName();
37+
} catch (IOException e) {
38+
primaryGroup = null;
4239
}
40+
groups = ugi.getGroups();
41+
authenticationMethod = ugi.getAuthenticationMethod();
42+
realAuthenticationMethod = ugi.getRealAuthenticationMethod();
43+
}
4344
}

src/main/java/tech/stackable/hadoop/OpaReducedAllowQuery.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package tech.stackable.hadoop;
22

33
import org.apache.hadoop.hdfs.server.namenode.INodeAttributeProvider;
4-
import org.apache.hadoop.security.UserGroupInformation;
54

65
public class OpaReducedAllowQuery {
76
public final OpaReducedAllowQueryInput input;
@@ -12,8 +11,8 @@ public OpaReducedAllowQuery(OpaReducedAllowQueryInput input) {
1211

1312
/**
1413
* Similar to {@link OpaAllowQuery.OpaAllowQueryInput}, but this class only contains a subset of
15-
* fields that should be sufficient for most use-cases, but offer a much better performance.
16-
* See <a href="https://github.com/stackabletech/hdfs-utils/issues/48">this issue</a> for details.
14+
* fields that should be sufficient for most use-cases, but offer a much better performance. See
15+
* <a href="https://github.com/stackabletech/hdfs-utils/issues/48">this issue</a> for details.
1716
*/
1817
public static class OpaReducedAllowQueryInput {
1918
public String fsOwner;

0 commit comments

Comments
 (0)