Skip to content

Commit ad12f69

Browse files
authored
HADOOP-19427. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-compat-bench part2. (#7948)
* HADOOP-19427. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-compat-bench part2. Signed-off-by: Shilun Fan <[email protected]>
1 parent 7cf1534 commit ad12f69

File tree

12 files changed

+172
-142
lines changed

12 files changed

+172
-142
lines changed

hadoop-tools/hadoop-compat-bench/pom.xml

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,6 @@
4040
<artifactId>hadoop-hdfs</artifactId>
4141
<scope>provided</scope>
4242
</dependency>
43-
<dependency>
44-
<groupId>junit</groupId>
45-
<artifactId>junit</artifactId>
46-
<scope>compile</scope>
47-
</dependency>
4843

4944
<!-- For test -->
5045
<dependency>
@@ -72,7 +67,7 @@
7267
<dependency>
7368
<groupId>org.junit.jupiter</groupId>
7469
<artifactId>junit-jupiter-api</artifactId>
75-
<scope>test</scope>
70+
<scope>compile</scope>
7671
</dependency>
7772
<dependency>
7873
<groupId>org.junit.jupiter</groupId>

hadoop-tools/hadoop-compat-bench/src/main/java/org/apache/hadoop/fs/compat/cases/HdfsCompatAcl.java

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,14 @@
2424
import org.apache.hadoop.fs.permission.AclEntryScope;
2525
import org.apache.hadoop.fs.permission.AclStatus;
2626
import org.apache.hadoop.fs.permission.FsAction;
27-
import org.junit.Assert;
2827

2928
import java.io.IOException;
3029
import java.util.List;
3130

31+
import static org.junit.jupiter.api.Assertions.assertEquals;
32+
import static org.junit.jupiter.api.Assertions.assertFalse;
33+
import static org.junit.jupiter.api.Assertions.assertTrue;
34+
3235
@HdfsCompatCaseGroup(name = "ACL")
3336
public class HdfsCompatAcl extends AbstractHdfsCompatCase {
3437
private static final String INIT_FILE_ACL =
@@ -63,10 +66,10 @@ public void modifyAclEntries() throws IOException {
6366
for (AclEntry acl : acls) {
6467
if ("foo".equals(acl.getName())) {
6568
++count;
66-
Assert.assertEquals(FsAction.NONE, acl.getPermission());
69+
assertEquals(FsAction.NONE, acl.getPermission());
6770
}
6871
}
69-
Assert.assertEquals(1, count);
72+
assertEquals(1, count);
7073
}
7174

7275
@HdfsCompatCase
@@ -76,45 +79,45 @@ public void removeAclEntries() throws IOException {
7679
entries = AclEntry.parseAclSpec("user:foo:---", true);
7780
fs().removeAclEntries(file, entries);
7881
List<AclEntry> acls = fs().getAclStatus(file).getEntries();
79-
Assert.assertTrue(acls.stream().noneMatch(e -> "foo".equals(e.getName())));
80-
Assert.assertTrue(acls.stream().anyMatch(e -> "bar".equals(e.getName())));
82+
assertTrue(acls.stream().noneMatch(e -> "foo".equals(e.getName())));
83+
assertTrue(acls.stream().anyMatch(e -> "bar".equals(e.getName())));
8184
}
8285

8386
@HdfsCompatCase
8487
public void removeDefaultAcl() throws IOException {
8588
fs().removeDefaultAcl(dir);
8689
List<AclEntry> acls = fs().getAclStatus(dir).getEntries();
87-
Assert.assertTrue(acls.stream().noneMatch(
90+
assertTrue(acls.stream().noneMatch(
8891
e -> (e.getScope() == AclEntryScope.DEFAULT)));
8992
}
9093

9194
@HdfsCompatCase
9295
public void removeAcl() throws IOException {
9396
fs().removeAcl(file);
9497
List<AclEntry> acls = fs().getAclStatus(file).getEntries();
95-
Assert.assertTrue(acls.stream().noneMatch(e -> "foo".equals(e.getName())));
98+
assertTrue(acls.stream().noneMatch(e -> "foo".equals(e.getName())));
9699
}
97100

98101
@HdfsCompatCase
99102
public void setAcl() throws IOException {
100103
List<AclEntry> acls = fs().getAclStatus(file).getEntries();
101-
Assert.assertTrue(acls.stream().anyMatch(e -> "foo".equals(e.getName())));
104+
assertTrue(acls.stream().anyMatch(e -> "foo".equals(e.getName())));
102105
}
103106

104107
@HdfsCompatCase
105108
public void getAclStatus() throws IOException {
106109
AclStatus status = fs().getAclStatus(dir);
107-
Assert.assertFalse(status.getOwner().isEmpty());
108-
Assert.assertFalse(status.getGroup().isEmpty());
110+
assertFalse(status.getOwner().isEmpty());
111+
assertFalse(status.getGroup().isEmpty());
109112
List<AclEntry> acls = status.getEntries();
110-
Assert.assertTrue(acls.stream().anyMatch(e ->
113+
assertTrue(acls.stream().anyMatch(e ->
111114
e.getScope() == AclEntryScope.DEFAULT));
112115

113116
status = fs().getAclStatus(file);
114-
Assert.assertFalse(status.getOwner().isEmpty());
115-
Assert.assertFalse(status.getGroup().isEmpty());
117+
assertFalse(status.getOwner().isEmpty());
118+
assertFalse(status.getGroup().isEmpty());
116119
acls = status.getEntries();
117-
Assert.assertTrue(acls.stream().anyMatch(e ->
120+
assertTrue(acls.stream().anyMatch(e ->
118121
e.getScope() == AclEntryScope.ACCESS));
119122
}
120123
}

hadoop-tools/hadoop-compat-bench/src/main/java/org/apache/hadoop/fs/compat/cases/HdfsCompatCreate.java

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,16 @@
2020
import org.apache.hadoop.fs.*;
2121
import org.apache.hadoop.fs.compat.common.*;
2222
import org.apache.hadoop.io.IOUtils;
23-
import org.junit.Assert;
2423

2524
import java.io.IOException;
2625
import java.nio.charset.StandardCharsets;
2726
import java.util.concurrent.CompletableFuture;
2827

28+
import static org.junit.jupiter.api.Assertions.assertEquals;
29+
import static org.junit.jupiter.api.Assertions.assertFalse;
30+
import static org.junit.jupiter.api.Assertions.assertTrue;
31+
import static org.junit.jupiter.api.Assertions.fail;
32+
2933
@HdfsCompatCaseGroup(name = "Create")
3034
public class HdfsCompatCreate extends AbstractHdfsCompatCase {
3135
private Path path;
@@ -43,15 +47,15 @@ public void cleanup() {
4347
@HdfsCompatCase
4448
public void mkdirs() throws IOException {
4549
fs().mkdirs(path);
46-
Assert.assertTrue(fs().exists(path));
50+
assertTrue(fs().exists(path));
4751
}
4852

4953
@HdfsCompatCase
5054
public void create() throws IOException {
5155
FSDataOutputStream out = null;
5256
try {
5357
out = fs().create(path, true);
54-
Assert.assertTrue(fs().exists(path));
58+
assertTrue(fs().exists(path));
5559
} finally {
5660
IOUtils.closeStream(out);
5761
}
@@ -62,15 +66,15 @@ public void createNonRecursive() {
6266
Path file = new Path(path, "file-no-parent");
6367
try {
6468
fs().createNonRecursive(file, true, 1024, (short) 1, 1048576, null);
65-
Assert.fail("Should fail since parent does not exist");
69+
fail("Should fail since parent does not exist");
6670
} catch (IOException ignored) {
6771
}
6872
}
6973

7074
@HdfsCompatCase
7175
public void createNewFile() throws IOException {
7276
HdfsCompatUtil.createFile(fs(), path, 0);
73-
Assert.assertFalse(fs().createNewFile(path));
77+
assertFalse(fs().createNewFile(path));
7478
}
7579

7680
@HdfsCompatCase
@@ -84,7 +88,7 @@ public void append() throws IOException {
8488
out.close();
8589
out = null;
8690
FileStatus fileStatus = fs().getFileStatus(path);
87-
Assert.assertEquals(128 + 64, fileStatus.getLen());
91+
assertEquals(128 + 64, fileStatus.getLen());
8892
} finally {
8993
IOUtils.closeStream(out);
9094
}
@@ -101,7 +105,7 @@ public void createFile() throws IOException {
101105
out.write("Hello World!".getBytes(StandardCharsets.UTF_8));
102106
out.close();
103107
out = null;
104-
Assert.assertTrue(fs().exists(file));
108+
assertTrue(fs().exists(file));
105109
} finally {
106110
IOUtils.closeStream(out);
107111
}
@@ -119,7 +123,7 @@ public void appendFile() throws IOException {
119123
out.close();
120124
out = null;
121125
FileStatus fileStatus = fs().getFileStatus(path);
122-
Assert.assertEquals(128 + 64, fileStatus.getLen());
126+
assertEquals(128 + 64, fileStatus.getLen());
123127
} finally {
124128
IOUtils.closeStream(out);
125129
}

hadoop-tools/hadoop-compat-bench/src/main/java/org/apache/hadoop/fs/compat/cases/HdfsCompatDirectory.java

Lines changed: 29 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,16 @@
1919

2020
import org.apache.hadoop.fs.*;
2121
import org.apache.hadoop.fs.compat.common.*;
22-
import org.junit.Assert;
2322

2423
import java.io.IOException;
2524
import java.util.ArrayList;
2625
import java.util.List;
2726

27+
import static org.junit.jupiter.api.Assertions.assertEquals;
28+
import static org.junit.jupiter.api.Assertions.assertFalse;
29+
import static org.junit.jupiter.api.Assertions.assertTrue;
30+
import static org.junit.jupiter.api.Assertions.assertNotNull;
31+
2832
@HdfsCompatCaseGroup(name = "Directory")
2933
public class HdfsCompatDirectory extends AbstractHdfsCompatCase {
3034
private static final int FILE_LEN = 128;
@@ -45,101 +49,101 @@ public void cleanup() throws IOException {
4549

4650
@HdfsCompatCase
4751
public void isDirectory() throws IOException {
48-
Assert.assertTrue(fs().isDirectory(dir));
52+
assertTrue(fs().isDirectory(dir));
4953
}
5054

5155
@HdfsCompatCase
5256
public void listStatus() throws IOException {
5357
FileStatus[] files = fs().listStatus(dir);
54-
Assert.assertNotNull(files);
55-
Assert.assertEquals(1, files.length);
56-
Assert.assertEquals(file.getName(), files[0].getPath().getName());
58+
assertNotNull(files);
59+
assertEquals(1, files.length);
60+
assertEquals(file.getName(), files[0].getPath().getName());
5761
}
5862

5963
@HdfsCompatCase
6064
public void globStatus() throws IOException {
6165
FileStatus[] files = fs().globStatus(new Path(dir, "*ile"));
62-
Assert.assertNotNull(files);
63-
Assert.assertEquals(1, files.length);
64-
Assert.assertEquals(file.getName(), files[0].getPath().getName());
66+
assertNotNull(files);
67+
assertEquals(1, files.length);
68+
assertEquals(file.getName(), files[0].getPath().getName());
6569
}
6670

6771
@HdfsCompatCase
6872
public void listLocatedStatus() throws IOException {
6973
RemoteIterator<LocatedFileStatus> locatedFileStatuses =
7074
fs().listLocatedStatus(dir);
71-
Assert.assertNotNull(locatedFileStatuses);
75+
assertNotNull(locatedFileStatuses);
7276
List<LocatedFileStatus> files = new ArrayList<>();
7377
while (locatedFileStatuses.hasNext()) {
7478
files.add(locatedFileStatuses.next());
7579
}
76-
Assert.assertEquals(1, files.size());
80+
assertEquals(1, files.size());
7781
LocatedFileStatus fileStatus = files.get(0);
78-
Assert.assertEquals(file.getName(), fileStatus.getPath().getName());
82+
assertEquals(file.getName(), fileStatus.getPath().getName());
7983
}
8084

8185
@HdfsCompatCase
8286
public void listStatusIterator() throws IOException {
8387
RemoteIterator<FileStatus> fileStatuses = fs().listStatusIterator(dir);
84-
Assert.assertNotNull(fileStatuses);
88+
assertNotNull(fileStatuses);
8589
List<FileStatus> files = new ArrayList<>();
8690
while (fileStatuses.hasNext()) {
8791
files.add(fileStatuses.next());
8892
}
89-
Assert.assertEquals(1, files.size());
93+
assertEquals(1, files.size());
9094
FileStatus fileStatus = files.get(0);
91-
Assert.assertEquals(file.getName(), fileStatus.getPath().getName());
95+
assertEquals(file.getName(), fileStatus.getPath().getName());
9296
}
9397

9498
@HdfsCompatCase
9599
public void listFiles() throws IOException {
96100
RemoteIterator<LocatedFileStatus> iter = fs().listFiles(dir, true);
97-
Assert.assertNotNull(iter);
101+
assertNotNull(iter);
98102
List<LocatedFileStatus> files = new ArrayList<>();
99103
while (iter.hasNext()) {
100104
files.add(iter.next());
101105
}
102-
Assert.assertEquals(1, files.size());
106+
assertEquals(1, files.size());
103107
}
104108

105109
@HdfsCompatCase
106110
public void listCorruptFileBlocks() throws IOException {
107111
RemoteIterator<Path> iter = fs().listCorruptFileBlocks(dir);
108-
Assert.assertNotNull(iter);
109-
Assert.assertFalse(iter.hasNext()); // No corrupted file
112+
assertNotNull(iter);
113+
assertFalse(iter.hasNext()); // No corrupted file
110114
}
111115

112116
@HdfsCompatCase
113117
public void getContentSummary() throws IOException {
114118
ContentSummary summary = fs().getContentSummary(dir);
115-
Assert.assertEquals(1, summary.getFileCount());
116-
Assert.assertEquals(1, summary.getDirectoryCount());
117-
Assert.assertEquals(FILE_LEN, summary.getLength());
119+
assertEquals(1, summary.getFileCount());
120+
assertEquals(1, summary.getDirectoryCount());
121+
assertEquals(FILE_LEN, summary.getLength());
118122
}
119123

120124
@HdfsCompatCase
121125
public void getUsed() throws IOException {
122126
long used = fs().getUsed(dir);
123-
Assert.assertTrue(used >= FILE_LEN);
127+
assertTrue(used >= FILE_LEN);
124128
}
125129

126130
@HdfsCompatCase
127131
public void getQuotaUsage() throws IOException {
128132
QuotaUsage usage = fs().getQuotaUsage(dir);
129-
Assert.assertEquals(2, usage.getFileAndDirectoryCount());
133+
assertEquals(2, usage.getFileAndDirectoryCount());
130134
}
131135

132136
@HdfsCompatCase
133137
public void setQuota() throws IOException {
134138
fs().setQuota(dir, 1048576L, 1073741824L);
135139
QuotaUsage usage = fs().getQuotaUsage(dir);
136-
Assert.assertEquals(1048576L, usage.getQuota());
140+
assertEquals(1048576L, usage.getQuota());
137141
}
138142

139143
@HdfsCompatCase
140144
public void setQuotaByStorageType() throws IOException {
141145
fs().setQuotaByStorageType(dir, StorageType.DISK, 1048576L);
142146
QuotaUsage usage = fs().getQuotaUsage(dir);
143-
Assert.assertEquals(1048576L, usage.getTypeQuota(StorageType.DISK));
147+
assertEquals(1048576L, usage.getTypeQuota(StorageType.DISK));
144148
}
145149
}

0 commit comments

Comments
 (0)