This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e994667644f0 [SPARK-53228][CORE][SQL] Use Java `Map` constructors 
instead of `Maps.new*HashMap()`
e994667644f0 is described below

commit e994667644f0e1f724ebdcc1443a93b4f14751cf
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun Aug 10 15:00:29 2025 +0800

    [SPARK-53228][CORE][SQL] Use Java `Map` constructors instead of 
`Maps.new*HashMap()`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use Java `Map` constructors directly instead of 
`Maps.new*HashMap` which are straight-forward wrappers.
    
    ### Why are the changes needed?
    
    Previously, Apache Spark uses `HashMap`, `LinkedHashMap`, and 
`ConcurrentHashMap` from `Maps` API and this PR changes like the following.
    
      ```scala
      - Map<String, String> properties = Maps.newHashMap();
      + Map<String, String> properties = new HashMap<>();
      ```
    
      ```scala
      - LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
      + LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
      ```
    
      ```scala
      - ConcurrentMap<AppExecId, ExecutorShuffleInfo> registeredExecutors = 
Maps.newConcurrentMap();
      + ConcurrentMap<AppExecId, ExecutorShuffleInfo> registeredExecutors = new 
ConcurrentHashMap<>();
      ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #51953 from dongjoon-hyun/SPARK-53228.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 .../shuffle/ExternalShuffleBlockResolver.java      |  6 +++---
 .../shuffle/OneForOneBlockFetcherSuite.java        | 25 +++++++++++-----------
 .../network/shuffle/OneForOneBlockPusherSuite.java |  9 ++++----
 .../spark/sql/connector/catalog/TableInfo.java     |  4 ++--
 .../sql/connect/planner/SparkConnectPlanner.scala  | 11 +++++-----
 5 files changed, 26 insertions(+), 29 deletions(-)

diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
index e30dc69df51c..b3002833fce1 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
@@ -20,6 +20,7 @@ package org.apache.spark.network.shuffle;
 import java.io.*;
 import java.nio.charset.StandardCharsets;
 import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Executor;
@@ -34,7 +35,6 @@ import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.common.cache.Weigher;
-import com.google.common.collect.Maps;
 
 import org.apache.spark.internal.SparkLogger;
 import org.apache.spark.internal.SparkLoggerFactory;
@@ -136,7 +136,7 @@ public class ExternalShuffleBlockResolver {
         MDC.of(LogKeys.SHUFFLE_DB_BACKEND_KEY, 
Constants.SHUFFLE_SERVICE_DB_BACKEND));
       executors = reloadRegisteredExecutors(db);
     } else {
-      executors = Maps.newConcurrentMap();
+      executors = new ConcurrentHashMap<>();
     }
     this.directoryCleaner = directoryCleaner;
   }
@@ -472,7 +472,7 @@ public class ExternalShuffleBlockResolver {
   @VisibleForTesting
   static ConcurrentMap<AppExecId, ExecutorShuffleInfo> 
reloadRegisteredExecutors(DB db)
       throws IOException {
-    ConcurrentMap<AppExecId, ExecutorShuffleInfo> registeredExecutors = 
Maps.newConcurrentMap();
+    ConcurrentMap<AppExecId, ExecutorShuffleInfo> registeredExecutors = new 
ConcurrentHashMap<>();
     if (db != null) {
       try (DBIterator itr = db.iterator()) {
         itr.seek(APP_KEY_PREFIX.getBytes(StandardCharsets.UTF_8));
diff --git 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
index 7151d044105c..f127568c8a33 100644
--- 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
+++ 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
@@ -23,7 +23,6 @@ import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import com.google.common.collect.Maps;
 import io.netty.buffer.Unpooled;
 import org.junit.jupiter.api.Test;
 
@@ -57,7 +56,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testFetchOne() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[0])));
     String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
 
@@ -72,7 +71,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testUseOldProtocol() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[0])));
     String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
 
@@ -91,7 +90,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testFetchThreeShuffleBlocks() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[12])));
     blocks.put("shuffle_0_0_1", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[23])));
     blocks.put("shuffle_0_0_2", new 
NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[23])));
@@ -112,7 +111,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testBatchFetchThreeShuffleBlocks() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffle_0_0_0_3", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[58])));
     String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
 
@@ -129,7 +128,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testFetchThree() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("b0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
     blocks.put("b1", new NioManagedBuffer(ByteBuffer.wrap(new byte[23])));
     blocks.put("b2", new NettyManagedBuffer(Unpooled.wrappedBuffer(new 
byte[23])));
@@ -148,7 +147,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testFailure() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("b0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
     blocks.put("b1", null);
     blocks.put("b2", null);
@@ -168,7 +167,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testFailureAndSuccess() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("b0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
     blocks.put("b1", null);
     blocks.put("b2", new NioManagedBuffer(ByteBuffer.wrap(new byte[21])));
@@ -190,14 +189,14 @@ public class OneForOneBlockFetcherSuite {
   @Test
   public void testEmptyBlockFetch() {
     IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
-      () -> fetchBlocks(Maps.newLinkedHashMap(), new String[] {},
+      () -> fetchBlocks(new LinkedHashMap<>(), new String[] {},
         new OpenBlocks("app-id", "exec-id", new String[] {}), conf));
     assertEquals("Zero-sized blockIds array", e.getMessage());
   }
 
   @Test
   public void testFetchShuffleBlocksOrder() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[1])));
     blocks.put("shuffle_0_2_1", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[2])));
     blocks.put("shuffle_0_10_2", new 
NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[3])));
@@ -217,7 +216,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testBatchFetchShuffleBlocksOrder() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffle_0_0_1_2", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[1])));
     blocks.put("shuffle_0_2_2_3", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[2])));
     blocks.put("shuffle_0_10_3_4", new 
NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[3])));
@@ -237,7 +236,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testShuffleBlockChunksFetch() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffleChunk_0_0_0_0", new 
NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
     blocks.put("shuffleChunk_0_0_0_1", new 
NioManagedBuffer(ByteBuffer.wrap(new byte[23])));
     blocks.put("shuffleChunk_0_0_0_2",
@@ -255,7 +254,7 @@ public class OneForOneBlockFetcherSuite {
 
   @Test
   public void testShuffleBlockChunkFetchFailure() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shuffleChunk_0_0_0_0", new 
NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
     blocks.put("shuffleChunk_0_0_0_1", null);
     blocks.put("shuffleChunk_0_0_0_2",
diff --git 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockPusherSuite.java
 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockPusherSuite.java
index 32c6a8cd37ea..345ac7546af4 100644
--- 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockPusherSuite.java
+++ 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockPusherSuite.java
@@ -23,7 +23,6 @@ import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
-import com.google.common.collect.Maps;
 import io.netty.buffer.Unpooled;
 import org.junit.jupiter.api.Test;
 
@@ -47,7 +46,7 @@ public class OneForOneBlockPusherSuite {
 
   @Test
   public void testPushOne() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shufflePush_0_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[1])));
     String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
 
@@ -61,7 +60,7 @@ public class OneForOneBlockPusherSuite {
 
   @Test
   public void testPushThree() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shufflePush_0_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[12])));
     blocks.put("shufflePush_0_0_1_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[23])));
     blocks.put("shufflePush_0_0_2_0",
@@ -82,7 +81,7 @@ public class OneForOneBlockPusherSuite {
 
   @Test
   public void testServerFailures() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shufflePush_0_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[12])));
     blocks.put("shufflePush_0_0_1_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[0])));
     blocks.put("shufflePush_0_0_2_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[0])));
@@ -102,7 +101,7 @@ public class OneForOneBlockPusherSuite {
 
   @Test
   public void testHandlingRetriableFailures() {
-    LinkedHashMap<String, ManagedBuffer> blocks = Maps.newLinkedHashMap();
+    LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
     blocks.put("shufflePush_0_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[12])));
     blocks.put("shufflePush_0_0_1_0", null);
     blocks.put("shufflePush_0_0_2_0", new NioManagedBuffer(ByteBuffer.wrap(new 
byte[0])));
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableInfo.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableInfo.java
index fff5c9ebbd3d..a5b4e333afa8 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableInfo.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableInfo.java
@@ -16,10 +16,10 @@
  */
 package org.apache.spark.sql.connector.catalog;
 
+import java.util.HashMap;
 import java.util.Map;
 import java.util.Objects;
 
-import com.google.common.collect.Maps;
 import org.apache.spark.sql.connector.catalog.constraints.Constraint;
 import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.types.StructType;
@@ -62,7 +62,7 @@ public class TableInfo {
 
   public static class Builder {
     private Column[] columns;
-    private Map<String, String> properties = Maps.newHashMap();
+    private Map<String, String> properties = new HashMap<>();
     private Transform[] partitions = new Transform[0];
     private Constraint[] constraints = new Constraint[0];
 
diff --git 
a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
 
b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
index 4e2707a488e3..16066e74d17f 100644
--- 
a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
+++ 
b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
@@ -17,8 +17,7 @@
 
 package org.apache.spark.sql.connect.planner
 
-import java.util.Properties
-import java.util.UUID
+import java.util.{HashMap, Properties, UUID}
 
 import scala.collection.mutable
 import scala.jdk.CollectionConverters._
@@ -26,7 +25,7 @@ import scala.util.Try
 import scala.util.control.NonFatal
 
 import com.google.common.base.Throwables
-import com.google.common.collect.{Lists, Maps}
+import com.google.common.collect.Lists
 import com.google.protobuf.{Any => ProtoAny, ByteString}
 import io.grpc.{Context, Status, StatusRuntimeException}
 import io.grpc.stub.StreamObserver
@@ -1190,7 +1189,7 @@ class SparkConnectPlanner(
     SimplePythonFunction(
       command = fun.getCommand.toByteArray.toImmutableArraySeq,
       // Empty environment variables
-      envVars = Maps.newHashMap(),
+      envVars = new HashMap[String, String](),
       pythonIncludes = sessionHolder.artifactManager.getPythonIncludes.asJava,
       pythonExec = pythonExec,
       pythonVer = fun.getPythonVer,
@@ -1204,7 +1203,7 @@ class SparkConnectPlanner(
     SimplePythonFunction(
       command = ds.getCommand.toByteArray.toImmutableArraySeq,
       // Empty environment variables
-      envVars = Maps.newHashMap(),
+      envVars = new HashMap[String, String](),
       pythonIncludes = sessionHolder.artifactManager.getPythonIncludes.asJava,
       pythonExec = pythonExec,
       pythonVer = ds.getPythonVer,
@@ -2033,7 +2032,7 @@ class SparkConnectPlanner(
     SimplePythonFunction(
       command = fun.getCommand.toByteArray.toImmutableArraySeq,
       // Empty environment variables
-      envVars = Maps.newHashMap(),
+      envVars = new HashMap[String, String](),
       pythonExec = pythonExec,
       // Merge the user specified includes with the includes managed by the 
artifact manager.
       pythonIncludes = (fun.getAdditionalIncludesList.asScala.toSeq ++


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to