Skip to content

Commit ba14965

Browse files
committed
[SPARK-53236][CORE][EXAMPLE] Use Java ArrayList constructors instead of Lists.newArrayList in Java code
### What changes were proposed in this pull request? This PR aims to use Java `ArrayList` constructors instead of `Lists.newArrayList` in Java code. ### Why are the changes needed? Java native usage is simpler than the `Lists.newArrayList` wrapper. ```java - List<Object> out = Lists.newArrayList(); + List<Object> out = new ArrayList<>(); ``` ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51962 from dongjoon-hyun/SPARK-53236. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 7d96524 commit ba14965

File tree

10 files changed

+24
-20
lines changed

10 files changed

+24
-20
lines changed

common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -499,7 +499,9 @@ private KVStoreView<CustomType1> view() throws Exception {
499499

500500
private List<CustomType1> collect(KVStoreView<CustomType1> view) throws Exception {
501501
try (KVStoreIterator<CustomType1> iterator = view.closeableIterator()) {
502-
return Lists.newArrayList(iterator);
502+
List<CustomType1> list = new ArrayList<>();
503+
iterator.forEachRemaining(list::add);
504+
return list;
503505
}
504506
}
505507

common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import java.io.IOException;
2222
import java.net.InetSocketAddress;
2323
import java.net.SocketAddress;
24+
import java.util.ArrayList;
2425
import java.util.List;
2526
import java.util.Objects;
2627
import java.util.Random;
@@ -30,7 +31,6 @@
3031
import com.codahale.metrics.MetricSet;
3132
import com.google.common.annotations.VisibleForTesting;
3233
import com.google.common.base.Throwables;
33-
import com.google.common.collect.Lists;
3434
import io.netty.bootstrap.Bootstrap;
3535
import io.netty.buffer.PooledByteBufAllocator;
3636
import io.netty.channel.Channel;
@@ -102,7 +102,7 @@ public TransportClientFactory(
102102
List<TransportClientBootstrap> clientBootstraps) {
103103
this.context = Objects.requireNonNull(context);
104104
this.conf = context.getConf();
105-
this.clientBootstraps = Lists.newArrayList(Objects.requireNonNull(clientBootstraps));
105+
this.clientBootstraps = new ArrayList<>(Objects.requireNonNull(clientBootstraps));
106106
this.connectionPool = new ConcurrentHashMap<>();
107107
this.numConnectionsPerPeer = conf.numConnectionsPerPeer();
108108
this.rand = new Random();

common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@
1919

2020
import java.io.Closeable;
2121
import java.net.InetSocketAddress;
22+
import java.util.ArrayList;
2223
import java.util.List;
2324
import java.util.Objects;
2425
import java.util.concurrent.TimeUnit;
2526

2627
import com.codahale.metrics.Counter;
2728
import com.codahale.metrics.MetricSet;
28-
import com.google.common.collect.Lists;
2929
import io.netty.bootstrap.ServerBootstrap;
3030
import io.netty.buffer.PooledByteBufAllocator;
3131
import io.netty.channel.ChannelFuture;
@@ -76,7 +76,7 @@ public TransportServer(
7676
this.pooledAllocator = NettyUtils.createPooledByteBufAllocator(
7777
conf.preferDirectBufs(), true /* allowCache */, conf.serverThreads());
7878
}
79-
this.bootstraps = Lists.newArrayList(Objects.requireNonNull(bootstraps));
79+
this.bootstraps = new ArrayList<>(Objects.requireNonNull(bootstraps));
8080

8181
boolean shouldClose = true;
8282
try {

common/network-common/src/test/java/org/apache/spark/network/protocol/MergedBlockMetaSuccessSuite.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,9 @@
2121
import java.io.File;
2222
import java.io.FileOutputStream;
2323
import java.nio.file.Files;
24+
import java.util.ArrayList;
2425
import java.util.List;
2526

26-
import com.google.common.collect.Lists;
2727
import io.netty.buffer.ByteBuf;
2828
import io.netty.buffer.ByteBufAllocator;
2929
import io.netty.buffer.Unpooled;
@@ -65,7 +65,7 @@ public void testMergedBlocksMetaEncodeDecode() throws Exception {
6565
MergedBlockMetaSuccess expectedMeta = new MergedBlockMetaSuccess(requestId, 2,
6666
new FileSegmentManagedBuffer(conf, chunkMetaFile, 0, chunkMetaFile.length()));
6767

68-
List<Object> out = Lists.newArrayList();
68+
List<Object> out = new ArrayList<>();
6969
ChannelHandlerContext context = mock(ChannelHandlerContext.class);
7070
when(context.alloc()).thenReturn(ByteBufAllocator.DEFAULT);
7171

common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockStoreClient.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919

2020
import java.io.IOException;
2121
import java.nio.ByteBuffer;
22+
import java.util.ArrayList;
2223
import java.util.Arrays;
2324
import java.util.HashMap;
2425
import java.util.List;
@@ -27,7 +28,6 @@
2728
import java.util.concurrent.Future;
2829

2930
import com.codahale.metrics.MetricSet;
30-
import com.google.common.collect.Lists;
3131

3232
import org.apache.spark.internal.LogKeys;
3333
import org.apache.spark.internal.MDC;
@@ -82,7 +82,7 @@ public void init(String appId) {
8282
this.appId = appId;
8383
TransportContext context = new TransportContext(
8484
transportConf, new NoOpRpcHandler(), true, true);
85-
List<TransportClientBootstrap> bootstraps = Lists.newArrayList();
85+
List<TransportClientBootstrap> bootstraps = new ArrayList<>();
8686
if (authEnabled) {
8787
bootstraps.add(new AuthClientBootstrap(transportConf, appId, secretKeyHolder));
8888
}

common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ShuffleTransportContextSuite.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,11 @@
1818
package org.apache.spark.network.shuffle;
1919

2020
import java.io.IOException;
21+
import java.util.ArrayList;
2122
import java.util.HashMap;
2223
import java.util.List;
2324
import java.util.Map;
2425

25-
import com.google.common.collect.Lists;
26-
2726
import io.netty.buffer.ByteBuf;
2827
import io.netty.buffer.ByteBufAllocator;
2928
import io.netty.buffer.Unpooled;
@@ -74,7 +73,7 @@ ShuffleTransportContext createShuffleTransportContext(boolean separateFinalizeTh
7473
}
7574

7675
private ByteBuf getDecodableMessageBuf(Message req) throws Exception {
77-
List<Object> out = Lists.newArrayList();
76+
List<Object> out = new ArrayList<>();
7877
ChannelHandlerContext context = mock(ChannelHandlerContext.class);
7978
when(context.alloc()).thenReturn(ByteBufAllocator.DEFAULT);
8079
MessageEncoder.INSTANCE.encode(context, req, out);
@@ -118,7 +117,7 @@ public void testDecodeOfFinalizeShuffleMessage() throws Exception {
118117
try (ShuffleTransportContext shuffleTransportContext = createShuffleTransportContext(true)) {
119118
ShuffleTransportContext.ShuffleMessageDecoder decoder =
120119
(ShuffleTransportContext.ShuffleMessageDecoder) shuffleTransportContext.getDecoder();
121-
List<Object> out = Lists.newArrayList();
120+
List<Object> out = new ArrayList<>();
122121
decoder.decode(mock(ChannelHandlerContext.class), messageBuf, out);
123122

124123
Assertions.assertEquals(1, out.size());
@@ -137,7 +136,7 @@ public void testDecodeOfAnyOtherRpcMessage() throws Exception {
137136
try (ShuffleTransportContext shuffleTransportContext = createShuffleTransportContext(true)) {
138137
ShuffleTransportContext.ShuffleMessageDecoder decoder =
139138
(ShuffleTransportContext.ShuffleMessageDecoder) shuffleTransportContext.getDecoder();
140-
List<Object> out = Lists.newArrayList();
139+
List<Object> out = new ArrayList<>();
141140
decoder.decode(mock(ChannelHandlerContext.class), messageBuf, out);
142141

143142
Assertions.assertEquals(1, out.size());

common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import java.net.URL;
2323
import java.nio.charset.StandardCharsets;
2424
import java.nio.ByteBuffer;
25+
import java.util.ArrayList;
2526
import java.util.List;
2627
import java.util.Map;
2728
import java.util.Objects;
@@ -31,7 +32,6 @@
3132
import com.fasterxml.jackson.core.type.TypeReference;
3233
import com.fasterxml.jackson.databind.ObjectMapper;
3334
import com.google.common.annotations.VisibleForTesting;
34-
import com.google.common.collect.Lists;
3535
import org.apache.hadoop.conf.Configuration;
3636
import org.apache.hadoop.fs.FileSystem;
3737
import org.apache.hadoop.fs.Path;
@@ -290,7 +290,7 @@ protected void serviceInit(Configuration externalConf) throws Exception {
290290

291291
// If authentication is enabled, set up the shuffle server to use a
292292
// special RPC handler that filters out unauthenticated fetch requests
293-
List<TransportServerBootstrap> bootstraps = Lists.newArrayList();
293+
List<TransportServerBootstrap> bootstraps = new ArrayList<>();
294294
boolean authEnabled = _conf.getBoolean(SPARK_AUTHENTICATE_KEY, DEFAULT_SPARK_AUTHENTICATE);
295295
if (authEnabled) {
296296
secretManager = new ShuffleSecretManager();

core/src/test/java/test/org/apache/spark/JavaAPISuite.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@
4747

4848
import com.google.common.collect.Iterables;
4949
import com.google.common.collect.Iterators;
50-
import com.google.common.collect.Lists;
5150
import com.google.common.base.Throwables;
5251
import org.apache.hadoop.fs.Path;
5352
import org.apache.hadoop.io.IntWritable;
@@ -333,7 +332,8 @@ public void foreachPartition() {
333332
public void toLocalIterator() {
334333
List<Integer> correct = Arrays.asList(1, 2, 3, 4);
335334
JavaRDD<Integer> rdd = sc.parallelize(correct);
336-
List<Integer> result = Lists.newArrayList(rdd.toLocalIterator());
335+
List<Integer> result = new ArrayList<>();
336+
rdd.toLocalIterator().forEachRemaining(result::add);
337337
assertEquals(correct, result);
338338
}
339339

dev/checkstyle.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -261,6 +261,10 @@
261261
<property name="format" value="Preconditions\.checkNotNull"/>
262262
<property name="message" value="Use requireNonNull of java.util.Objects instead." />
263263
</module>
264+
<module name="RegexpSinglelineJava">
265+
<property name="format" value="Lists\.newArrayList"/>
266+
<property name="message" value="Use ArrayList constructor instead." />
267+
</module>
264268
<module name="RegexpSinglelineJava">
265269
<property name="format" value="ImmutableMap\.of"/>
266270
<property name="message" value="Use Map.of instead." />

examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
package org.apache.spark.examples.sql;
1818

1919
// $example on:schema_merging$
20-
import com.google.common.collect.Lists;
2120
import java.io.Serializable;
2221
import java.util.ArrayList;
2322
import java.util.Arrays;
@@ -534,7 +533,7 @@ private static void runXmlDatasetExample(SparkSession spark) {
534533
"<person>" +
535534
"<name>laglangyue</name><job>Developer</job><age>28</age>" +
536535
"</person>");
537-
Dataset<String> otherPeopleDataset = spark.createDataset(Lists.newArrayList(xmlData),
536+
Dataset<String> otherPeopleDataset = spark.createDataset(new ArrayList<>(xmlData),
538537
Encoders.STRING());
539538

540539
Dataset<Row> otherPeople = spark.read()

0 commit comments

Comments
 (0)