This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new dc15e1b  [SPARK-53990] Use Java `(Map|Set).of` instead of 
`Collections.(empty|singleton)(Map|Set)`
dc15e1b is described below

commit dc15e1b076b1ac3312bf16c6202d857e6046f6f2
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Oct 22 14:08:48 2025 -0700

    [SPARK-53990] Use Java `(Map|Set).of` instead of 
`Collections.(empty|singleton)(Map|Set)`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use Java 9+ `Map.of` and `Set.of` APIs instead of the 
following.
    - `Collections.emptySet`
    - `Collections.emptyMap`
    - `Collections.singletonMap`
    - `Collections.singletonSet`
    
    ### Why are the changes needed?
    
    New Java APIs are concise and more intuitive.
    
    ```java
    - return Collections.emptyMap();
    + return Map.of();
    ```
    
    ```java
    - Collections.singletonMap(getCurrentStateId() + 1, state),
    + Map.of(getCurrentStateId() + 1, state),
    ```
    
    In addition, this is aligned with
    - apache/spark#51942
    - apache/spark#51961
    - apache/spark#51954
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #401 from dongjoon-hyun/SPARK-53990.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../apache/spark/k8s/operator/status/ApplicationStatus.java   |  3 +--
 .../k8s/operator/reconciler/SparkAppResourceSpecFactory.java  |  9 ++++-----
 .../main/java/org/apache/spark/k8s/operator/utils/Utils.java  | 11 +++++------
 .../java/org/apache/spark/k8s/operator/SparkOperatorTest.java |  5 ++---
 .../k8s/operator/config/SparkOperatorConfManagerTest.java     |  6 +++---
 .../k8s/operator/metrics/healthcheck/SentinelManagerTest.java |  5 ++---
 .../apache/spark/k8s/operator/SparkClusterResourceSpec.java   | 10 +++++-----
 .../spark/k8s/operator/SparkAppSubmissionWorkerTest.java      | 11 +++++------
 .../spark/k8s/operator/SparkClusterSubmissionWorkerTest.java  |  5 ++---
 9 files changed, 29 insertions(+), 36 deletions(-)

diff --git 
a/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java
 
b/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java
index 3b98eb6..3d2d713 100644
--- 
a/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java
+++ 
b/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java
@@ -21,7 +21,6 @@ package org.apache.spark.k8s.operator.status;
 
 import static 
org.apache.spark.k8s.operator.Constants.EXCEED_MAX_RETRY_ATTEMPT_MESSAGE;
 
-import java.util.Collections;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -151,7 +150,7 @@ public class ApplicationStatus
               currentAttemptSummary.getAttemptInfo(), stateTransitionHistory);
       return new ApplicationStatus(
           state,
-          Collections.singletonMap(getCurrentStateId() + 1, state),
+          Map.of(getCurrentStateId() + 1, state),
           newPrevSummary,
           nextAttemptSummary);
     } else {
diff --git 
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java
 
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java
index a1c3353..bd7e33e 100644
--- 
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java
+++ 
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java
@@ -29,7 +29,6 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Optional;
@@ -172,10 +171,10 @@ public final class SparkAppResourceSpecFactory {
             createLocalFileForPodTemplateSpec(
                 app.getSpec().getDriverSpec().getPodTemplateSpec(),
                 app.getMetadata().getUid() + "-driver-");
-        return Collections.singletonMap(DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY, 
filePath);
+        return Map.of(DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY, filePath);
       }
     }
-    return Collections.emptyMap();
+    return Map.of();
   }
 
   /**
@@ -196,10 +195,10 @@ public final class SparkAppResourceSpecFactory {
             createLocalFileForPodTemplateSpec(
                 app.getSpec().getExecutorSpec().getPodTemplateSpec(),
                 app.getMetadata().getUid() + "-executor-");
-        return Collections.singletonMap(EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY, 
filePath);
+        return Map.of(EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY, filePath);
       }
     }
-    return Collections.emptyMap();
+    return Map.of();
   }
 
   /**
diff --git 
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java 
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java
index 6272e8d..a6ca076 100644
--- 
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java
+++ 
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java
@@ -31,7 +31,6 @@ import static 
org.apache.spark.k8s.operator.config.SparkOperatorConf.SPARK_APP_S
 import static 
org.apache.spark.k8s.operator.config.SparkOperatorConf.SPARK_CLUSTER_STATUS_LISTENER_CLASS_NAMES;
 
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -62,10 +61,10 @@ public final class Utils {
    */
   public static Set<String> sanitizeCommaSeparatedStrAsSet(String str) {
     if (StringUtils.isBlank(str)) {
-      return Collections.emptySet();
+      return Set.of();
     }
     if ("*".equals(str)) {
-      return Collections.emptySet();
+      return Set.of();
     }
     return Arrays.stream(str.split(","))
         .map(String::trim)
@@ -240,15 +239,15 @@ public final class Utils {
     return resource -> {
       final var metadata = resource.getMetadata();
       if (metadata == null) {
-        return Collections.emptySet();
+        return Set.of();
       } else {
         final var map = metadata.getLabels();
         if (map == null) {
-          return Collections.emptySet();
+          return Set.of();
         }
         var name = map.get(nameKey);
         if (name == null) {
-          return Collections.emptySet();
+          return Set.of();
         }
         var namespace = resource.getMetadata().getNamespace();
         return Set.of(new ResourceID(name, namespace));
diff --git 
a/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java
 
b/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java
index ca3b285..b8b5799 100644
--- 
a/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java
+++ 
b/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java
@@ -29,7 +29,6 @@ import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
 import static org.mockito.Mockito.when;
 
-import java.util.Collections;
 import java.util.Set;
 import java.util.function.Consumer;
 
@@ -84,7 +83,7 @@ class SparkOperatorTest {
       mockKubernetesClientFactory
           .when(() -> KubernetesClientFactory.buildKubernetesClient(any()))
           .thenReturn(mockClient);
-      
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Collections.singleton("namespace-1"));
+      
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Set.of("namespace-1"));
 
       SparkOperator sparkOperator = new SparkOperator();
       Assertions.assertEquals(1, 
sparkOperator.registeredSparkControllers.size());
@@ -182,7 +181,7 @@ class SparkOperatorTest {
       mockKubernetesClientFactory
           .when(() -> KubernetesClientFactory.buildKubernetesClient(any()))
           .thenReturn(mockClient);
-      
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Collections.singleton("namespace-1"));
+      
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Set.of("namespace-1"));
       SparkOperator sparkOperator = new SparkOperator();
       Set<String> updatedNamespaces = Set.of("namespace-1", "namespace-2");
       
Assertions.assertTrue(sparkOperator.updateWatchingNamespaces(updatedNamespaces));
diff --git 
a/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java
 
b/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java
index a9ac740..7baf5e1 100644
--- 
a/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java
+++ 
b/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java
@@ -20,7 +20,7 @@
 package org.apache.spark.k8s.operator.config;
 
 import java.io.IOException;
-import java.util.Collections;
+import java.util.Map;
 
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
@@ -58,11 +58,11 @@ class SparkOperatorConfManagerTest {
       Assertions.assertEquals("bar", 
confManager.getInitialValue("spark.kubernetes.operator.foo"));
       Assertions.assertEquals("bar", 
confManager.getValue("spark.kubernetes.operator.foo"));
 
-      
confManager.refresh(Collections.singletonMap("spark.kubernetes.operator.foo", 
"barbar"));
+      confManager.refresh(Map.of("spark.kubernetes.operator.foo", "barbar"));
       Assertions.assertEquals("bar", 
confManager.getInitialValue("spark.kubernetes.operator.foo"));
       Assertions.assertEquals("barbar", 
confManager.getValue("spark.kubernetes.operator.foo"));
 
-      
confManager.refresh(Collections.singletonMap("spark.kubernetes.operator.foo", 
"barbarbar"));
+      confManager.refresh(Map.of("spark.kubernetes.operator.foo", 
"barbarbar"));
       Assertions.assertEquals("bar", 
confManager.getInitialValue("spark.kubernetes.operator.foo"));
       Assertions.assertEquals("barbarbar", 
confManager.getValue("spark.kubernetes.operator.foo"));
 
diff --git 
a/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java
 
b/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java
index b9e7e06..26ef1c9 100644
--- 
a/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java
+++ 
b/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java
@@ -28,7 +28,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.mockStatic;
 
 import java.time.Duration;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -73,7 +72,7 @@ class SentinelManagerTest {
   @BeforeAll
   static void beforeAll() {
     Map<String, String> overrideValue =
-        Collections.singletonMap(
+        Map.of(
             SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(),
             
Duration.ofSeconds(SENTINEL_RESOURCE_RECONCILIATION_DELAY_SECONDS).toString());
     SparkOperatorConfManager.INSTANCE.refresh(overrideValue);
@@ -99,7 +98,7 @@ class SentinelManagerTest {
   void testHandleSentinelResourceReconciliation() throws InterruptedException {
     // Reduce the SENTINEL_RESOURCE_RECONCILIATION_DELAY time to 0
     SparkOperatorConfManager.INSTANCE.refresh(
-        Collections.singletonMap(
+        Map.of(
             SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(), 
"10"));
 
     // Before Spark Reconciler Started
diff --git 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
index efa5f45..5397c52 100644
--- 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
+++ 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
@@ -21,7 +21,7 @@ package org.apache.spark.k8s.operator;
 
 import static org.apache.spark.k8s.operator.Constants.*;
 
-import java.util.Collections;
+import java.util.Map;
 import java.util.Optional;
 
 import scala.Tuple2;
@@ -133,9 +133,9 @@ public class SparkClusterResourceSpec {
         .endMetadata()
         .withNewSpecLike(serviceSpec)
         .withClusterIP("None")
-        .addToSelector(Collections.singletonMap(LABEL_SPARK_CLUSTER_NAME, 
name))
+        .addToSelector(Map.of(LABEL_SPARK_CLUSTER_NAME, name))
         .addToSelector(
-            Collections.singletonMap(LABEL_SPARK_ROLE_NAME, 
LABEL_SPARK_ROLE_MASTER_VALUE))
+            Map.of(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_MASTER_VALUE))
         .addNewPort()
         .withName("web")
         .withPort(8080)
@@ -176,9 +176,9 @@ public class SparkClusterResourceSpec {
         .endMetadata()
         .withNewSpecLike(serviceSpec)
         .withClusterIP("None")
-        .addToSelector(Collections.singletonMap(LABEL_SPARK_CLUSTER_NAME, 
name))
+        .addToSelector(Map.of(LABEL_SPARK_CLUSTER_NAME, name))
         .addToSelector(
-            Collections.singletonMap(LABEL_SPARK_ROLE_NAME, 
LABEL_SPARK_ROLE_WORKER_VALUE))
+            Map.of(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_WORKER_VALUE))
         .addNewPort()
         .withName("web")
         .withPort(8081)
diff --git 
a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
 
b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
index 6d080cf..71351cf 100644
--- 
a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
+++ 
b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
@@ -29,7 +29,6 @@ import static org.mockito.Mockito.mockConstruction;
 import static org.mockito.Mockito.when;
 
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -120,7 +119,7 @@ class SparkAppSubmissionWorkerTest {
       when(mockSpec.getPyFiles()).thenReturn("foo");
 
       SparkAppSubmissionWorker submissionWorker = new 
SparkAppSubmissionWorker();
-      SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Collections.emptyMap());
+      SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Map.of());
       assertEquals(7, constructorArgs.get(conf).size());
 
       // validate main resources
@@ -146,7 +145,7 @@ class SparkAppSubmissionWorkerTest {
       when(mockSpec.getPyFiles()).thenReturn("main.py,lib.py");
 
       SparkAppSubmissionWorker submissionWorker = new 
SparkAppSubmissionWorker();
-      SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Collections.emptyMap());
+      SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Map.of());
       assertEquals(7, constructorArgs.get(conf).size());
       assertEquals(
           "lib.py", ((SparkConf) 
constructorArgs.get(conf).get(0)).get("spark.submit.pyFiles"));
@@ -173,7 +172,7 @@ class SparkAppSubmissionWorkerTest {
       when(mockSpec.getSparkRFiles()).thenReturn("foo");
 
       SparkAppSubmissionWorker submissionWorker = new 
SparkAppSubmissionWorker();
-      SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Collections.emptyMap());
+      SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Map.of());
       assertEquals(7, constructorArgs.get(conf).size());
 
       // validate main resources
@@ -260,7 +259,7 @@ class SparkAppSubmissionWorkerTest {
     when(mockApp.getMetadata()).thenReturn(appMeta);
 
     SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
-    SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Collections.emptyMap());
+    SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Map.of());
     assertEquals("foo", conf.appId());
   }
 
@@ -282,7 +281,7 @@ class SparkAppSubmissionWorkerTest {
     when(mockRuntimeVersions.getSparkVersion()).thenReturn("dev");
 
     SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
-    SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Collections.emptyMap());
+    SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Map.of());
     assertEquals("apache/spark:dev", 
conf.get("spark.kubernetes.container.image"));
     assertEquals("apache/spark:dev", 
conf.get("spark.kubernetes.driver.container.image"));
     assertEquals("apache/spark:dev", 
conf.get("spark.kubernetes.executor.container.image"));
diff --git 
a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java
 
b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java
index 3391755..694e02b 100644
--- 
a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java
+++ 
b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java
@@ -22,7 +22,6 @@ package org.apache.spark.k8s.operator;
 import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.Mockito.*;
 
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -70,7 +69,7 @@ class SparkClusterSubmissionWorkerTest {
   @Test
   void testGetResourceSpec() {
     SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker();
-    SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, 
Collections.emptyMap());
+    SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Map.of());
     // SparkClusterResourceSpecTest will cover the detail information of easy 
resources
     assertNotNull(spec.getMasterService());
     assertNotNull(spec.getMasterStatefulSet());
@@ -81,7 +80,7 @@ class SparkClusterSubmissionWorkerTest {
   @Test
   void supportSparkVersionPlaceHolder() {
     SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker();
-    SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, 
Collections.emptyMap());
+    SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Map.of());
     assertEquals(
         "apache/spark:dev",
         spec.getMasterStatefulSet()


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to