This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ba1496507044 [SPARK-53236][CORE][EXAMPLE] Use Java `ArrayList` 
constructors instead of `Lists.newArrayList` in Java code
ba1496507044 is described below

commit ba14965070448a09523a82577e3ed8be2431c00c
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun Aug 10 20:16:28 2025 -0700

    [SPARK-53236][CORE][EXAMPLE] Use Java `ArrayList` constructors instead of 
`Lists.newArrayList` in Java code
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use Java `ArrayList` constructors instead of 
`Lists.newArrayList` in Java code.
    
    ### Why are the changes needed?
    
    Java native usage is simpler than the `Lists.newArrayList` wrapper.
    
    ```java
    - List<Object> out = Lists.newArrayList();
    + List<Object> out = new ArrayList<>();
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #51962 from dongjoon-hyun/SPARK-53236.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java | 4 +++-
 .../org/apache/spark/network/client/TransportClientFactory.java  | 4 ++--
 .../java/org/apache/spark/network/server/TransportServer.java    | 4 ++--
 .../spark/network/protocol/MergedBlockMetaSuccessSuite.java      | 4 ++--
 .../apache/spark/network/shuffle/ExternalBlockStoreClient.java   | 4 ++--
 .../spark/network/shuffle/ShuffleTransportContextSuite.java      | 9 ++++-----
 .../java/org/apache/spark/network/yarn/YarnShuffleService.java   | 4 ++--
 core/src/test/java/test/org/apache/spark/JavaAPISuite.java       | 4 ++--
 dev/checkstyle.xml                                               | 4 ++++
 .../org/apache/spark/examples/sql/JavaSQLDataSourceExample.java  | 3 +--
 10 files changed, 24 insertions(+), 20 deletions(-)

diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java
 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java
index 72c3690d1a18..dd99d4876763 100644
--- 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java
+++ 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java
@@ -499,7 +499,9 @@ public abstract class DBIteratorSuite {
 
   private List<CustomType1> collect(KVStoreView<CustomType1> view) throws 
Exception {
     try (KVStoreIterator<CustomType1> iterator = view.closeableIterator()) {
-      return Lists.newArrayList(iterator);
+      List<CustomType1> list = new ArrayList<>();
+      iterator.forEachRemaining(list::add);
+      return list;
     }
   }
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
index a6fedf5896c7..162fe8bd4866 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
@@ -21,6 +21,7 @@ import java.io.Closeable;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.SocketAddress;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Objects;
 import java.util.Random;
@@ -30,7 +31,6 @@ import java.util.concurrent.atomic.AtomicReference;
 import com.codahale.metrics.MetricSet;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
 import io.netty.bootstrap.Bootstrap;
 import io.netty.buffer.PooledByteBufAllocator;
 import io.netty.channel.Channel;
@@ -102,7 +102,7 @@ public class TransportClientFactory implements Closeable {
       List<TransportClientBootstrap> clientBootstraps) {
     this.context = Objects.requireNonNull(context);
     this.conf = context.getConf();
-    this.clientBootstraps = 
Lists.newArrayList(Objects.requireNonNull(clientBootstraps));
+    this.clientBootstraps = new 
ArrayList<>(Objects.requireNonNull(clientBootstraps));
     this.connectionPool = new ConcurrentHashMap<>();
     this.numConnectionsPerPeer = conf.numConnectionsPerPeer();
     this.rand = new Random();
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
index 1a1f4723f572..be5d9e03c45c 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java
@@ -19,13 +19,13 @@ package org.apache.spark.network.server;
 
 import java.io.Closeable;
 import java.net.InetSocketAddress;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Objects;
 import java.util.concurrent.TimeUnit;
 
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.MetricSet;
-import com.google.common.collect.Lists;
 import io.netty.bootstrap.ServerBootstrap;
 import io.netty.buffer.PooledByteBufAllocator;
 import io.netty.channel.ChannelFuture;
@@ -76,7 +76,7 @@ public class TransportServer implements Closeable {
       this.pooledAllocator = NettyUtils.createPooledByteBufAllocator(
           conf.preferDirectBufs(), true /* allowCache */, 
conf.serverThreads());
     }
-    this.bootstraps = Lists.newArrayList(Objects.requireNonNull(bootstraps));
+    this.bootstraps = new ArrayList<>(Objects.requireNonNull(bootstraps));
 
     boolean shouldClose = true;
     try {
diff --git 
a/common/network-common/src/test/java/org/apache/spark/network/protocol/MergedBlockMetaSuccessSuite.java
 
b/common/network-common/src/test/java/org/apache/spark/network/protocol/MergedBlockMetaSuccessSuite.java
index a3750ce11172..41b84f389587 100644
--- 
a/common/network-common/src/test/java/org/apache/spark/network/protocol/MergedBlockMetaSuccessSuite.java
+++ 
b/common/network-common/src/test/java/org/apache/spark/network/protocol/MergedBlockMetaSuccessSuite.java
@@ -21,9 +21,9 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.nio.file.Files;
+import java.util.ArrayList;
 import java.util.List;
 
-import com.google.common.collect.Lists;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufAllocator;
 import io.netty.buffer.Unpooled;
@@ -65,7 +65,7 @@ public class MergedBlockMetaSuccessSuite {
     MergedBlockMetaSuccess expectedMeta = new 
MergedBlockMetaSuccess(requestId, 2,
       new FileSegmentManagedBuffer(conf, chunkMetaFile, 0, 
chunkMetaFile.length()));
 
-    List<Object> out = Lists.newArrayList();
+    List<Object> out = new ArrayList<>();
     ChannelHandlerContext context = mock(ChannelHandlerContext.class);
     when(context.alloc()).thenReturn(ByteBufAllocator.DEFAULT);
 
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockStoreClient.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockStoreClient.java
index 808bfc2726c3..4fdd39c3471f 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockStoreClient.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockStoreClient.java
@@ -19,6 +19,7 @@ package org.apache.spark.network.shuffle;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -27,7 +28,6 @@ import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.Future;
 
 import com.codahale.metrics.MetricSet;
-import com.google.common.collect.Lists;
 
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
@@ -82,7 +82,7 @@ public class ExternalBlockStoreClient extends 
BlockStoreClient {
     this.appId = appId;
     TransportContext context = new TransportContext(
       transportConf, new NoOpRpcHandler(), true, true);
-    List<TransportClientBootstrap> bootstraps = Lists.newArrayList();
+    List<TransportClientBootstrap> bootstraps = new ArrayList<>();
     if (authEnabled) {
       bootstraps.add(new AuthClientBootstrap(transportConf, appId, 
secretKeyHolder));
     }
diff --git 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ShuffleTransportContextSuite.java
 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ShuffleTransportContextSuite.java
index aef3bc51bcd4..bd9884e81ba9 100644
--- 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ShuffleTransportContextSuite.java
+++ 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ShuffleTransportContextSuite.java
@@ -18,12 +18,11 @@
 package org.apache.spark.network.shuffle;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufAllocator;
 import io.netty.buffer.Unpooled;
@@ -74,7 +73,7 @@ public class ShuffleTransportContextSuite {
   }
 
   private ByteBuf getDecodableMessageBuf(Message req) throws Exception {
-    List<Object> out = Lists.newArrayList();
+    List<Object> out = new ArrayList<>();
     ChannelHandlerContext context = mock(ChannelHandlerContext.class);
     when(context.alloc()).thenReturn(ByteBufAllocator.DEFAULT);
     MessageEncoder.INSTANCE.encode(context, req, out);
@@ -118,7 +117,7 @@ public class ShuffleTransportContextSuite {
     try (ShuffleTransportContext shuffleTransportContext = 
createShuffleTransportContext(true)) {
       ShuffleTransportContext.ShuffleMessageDecoder decoder =
         (ShuffleTransportContext.ShuffleMessageDecoder) 
shuffleTransportContext.getDecoder();
-      List<Object> out = Lists.newArrayList();
+      List<Object> out = new ArrayList<>();
       decoder.decode(mock(ChannelHandlerContext.class), messageBuf, out);
 
       Assertions.assertEquals(1, out.size());
@@ -137,7 +136,7 @@ public class ShuffleTransportContextSuite {
     try (ShuffleTransportContext shuffleTransportContext = 
createShuffleTransportContext(true)) {
       ShuffleTransportContext.ShuffleMessageDecoder decoder =
         (ShuffleTransportContext.ShuffleMessageDecoder) 
shuffleTransportContext.getDecoder();
-      List<Object> out = Lists.newArrayList();
+      List<Object> out = new ArrayList<>();
       decoder.decode(mock(ChannelHandlerContext.class), messageBuf, out);
 
       Assertions.assertEquals(1, out.size());
diff --git 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
index 06e1efbb87e5..b5718946252e 100644
--- 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
+++ 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
@@ -31,7 +32,6 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -290,7 +290,7 @@ public class YarnShuffleService extends AuxiliaryService {
 
       // If authentication is enabled, set up the shuffle server to use a
       // special RPC handler that filters out unauthenticated fetch requests
-      List<TransportServerBootstrap> bootstraps = Lists.newArrayList();
+      List<TransportServerBootstrap> bootstraps = new ArrayList<>();
       boolean authEnabled = _conf.getBoolean(SPARK_AUTHENTICATE_KEY, 
DEFAULT_SPARK_AUTHENTICATE);
       if (authEnabled) {
         secretManager = new ShuffleSecretManager();
diff --git a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java 
b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java
index 1d8fe14d1c26..ac0d26edd193 100644
--- a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java
@@ -47,7 +47,6 @@ import scala.jdk.javaapi.CollectionConverters;
 
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Iterators;
-import com.google.common.collect.Lists;
 import com.google.common.base.Throwables;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
@@ -333,7 +332,8 @@ public class JavaAPISuite implements Serializable {
   public void toLocalIterator() {
     List<Integer> correct = Arrays.asList(1, 2, 3, 4);
     JavaRDD<Integer> rdd = sc.parallelize(correct);
-    List<Integer> result = Lists.newArrayList(rdd.toLocalIterator());
+    List<Integer> result = new ArrayList<>();
+    rdd.toLocalIterator().forEachRemaining(result::add);
     assertEquals(correct, result);
   }
 
diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml
index 7e47a395b1ac..1f877027f5b7 100644
--- a/dev/checkstyle.xml
+++ b/dev/checkstyle.xml
@@ -261,6 +261,10 @@
             <property name="format" value="Preconditions\.checkNotNull"/>
             <property name="message" value="Use requireNonNull of 
java.util.Objects instead." />
         </module>
+        <module name="RegexpSinglelineJava">
+            <property name="format" value="Lists\.newArrayList"/>
+            <property name="message" value="Use ArrayList constructor 
instead." />
+        </module>
         <module name="RegexpSinglelineJava">
             <property name="format" value="ImmutableMap\.of"/>
             <property name="message" value="Use Map.of instead." />
diff --git 
a/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
 
b/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
index efbed9e591ec..e7d52ce03e86 100644
--- 
a/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
+++ 
b/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
@@ -17,7 +17,6 @@
 package org.apache.spark.examples.sql;
 
 // $example on:schema_merging$
-import com.google.common.collect.Lists;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -534,7 +533,7 @@ public class JavaSQLDataSourceExample {
             "<person>" +
             "<name>laglangyue</name><job>Developer</job><age>28</age>" +
             "</person>");
-    Dataset<String> otherPeopleDataset = 
spark.createDataset(Lists.newArrayList(xmlData),
+    Dataset<String> otherPeopleDataset = spark.createDataset(new 
ArrayList<>(xmlData),
             Encoders.STRING());
 
     Dataset<Row> otherPeople = spark.read()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to