This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new c70a95f  [SPARK-54092] Use Java-friendly `KubernetesClientUtils` APIs
c70a95f is described below

commit c70a95f83dc8381b6c7096d1041d061dcb63f461
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Oct 30 09:07:33 2025 -0700

    [SPARK-54092] Use Java-friendly `KubernetesClientUtils` APIs
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use Java-friendly `KubernetesClientUtils` APIs.
    
    ### Why are the changes needed?
    
    Apache Spark 4.1.0-preview3 introduced new Java friendly APIs. We had 
better take advantage of them.
    - https://github.com/apache/spark/pull/52542
    
    | Scala Version | New Java-friendly Version |
    | - | - |
    | `buildConfigMap` (Since 3.1.0) | `buildConfigMapJava` (Since 4.1.0) |
    | `buildKeyToPathObjects` (Since 3.1.0) | `buildKeyToPathObjectsJava` 
(Since 4.1.0) |
    | `buildSparkConfDirFilesMap` (Since 3.1.1) | 
`buildSparkConfDirFilesMapJava` (Since 4.1.0) |
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #410 from dongjoon-hyun/SPARK-54092.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../spark/k8s/operator/SparkAppResourceSpec.java   | 37 ++++++++--------------
 1 file changed, 13 insertions(+), 24 deletions(-)

diff --git 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppResourceSpec.java
 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppResourceSpec.java
index 39ed3f0..51b305f 100644
--- 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppResourceSpec.java
+++ 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppResourceSpec.java
@@ -19,15 +19,10 @@
 
 package org.apache.spark.k8s.operator;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.stream.Collectors;
+import static scala.jdk.javaapi.CollectionConverters.asJava;
 
-import scala.Tuple2;
-import scala.collection.immutable.HashMap;
-import scala.collection.immutable.Map;
-import scala.jdk.CollectionConverters;
+import java.util.*;
+import java.util.stream.Collectors;
 
 import io.fabric8.kubernetes.api.model.Container;
 import io.fabric8.kubernetes.api.model.ContainerBuilder;
@@ -79,12 +74,13 @@ public class SparkAppResourceSpec {
       List<ConfigMapSpec> configMapSpecs) {
     this.kubernetesDriverConf = kubernetesDriverConf;
     String namespace = 
kubernetesDriverConf.sparkConf().get(Config.KUBERNETES_NAMESPACE().key());
-    Map<String, String> confFilesMap =
-        KubernetesClientUtils.buildSparkConfDirFilesMap(
+    Map<String, String> originalConfFilesMap =
+        KubernetesClientUtils.buildSparkConfDirFilesMapJava(
                 kubernetesDriverConf.configMapNameDriver(),
                 kubernetesDriverConf.sparkConf(),
-                kubernetesDriverSpec.systemProperties())
-            .$plus(new Tuple2<>(Config.KUBERNETES_NAMESPACE().key(), 
namespace));
+                asJava(kubernetesDriverSpec.systemProperties()));
+    Map<String, String> confFilesMap = new HashMap<>(originalConfFilesMap);
+    confFilesMap.put(Config.KUBERNETES_NAMESPACE().key(), namespace);
     SparkPod sparkPod = addConfigMap(kubernetesDriverSpec.pod(), confFilesMap);
     this.configuredPod =
         new PodBuilder(sparkPod.pod())
@@ -93,16 +89,12 @@ public class SparkAppResourceSpec {
             .endSpec()
             .build();
     this.driverPreResources =
-        new ArrayList<>(
-            
CollectionConverters.SeqHasAsJava(kubernetesDriverSpec.driverPreKubernetesResources())
-                .asJava());
+        new 
ArrayList<>(asJava(kubernetesDriverSpec.driverPreKubernetesResources()));
     this.driverResources =
-        new ArrayList<>(
-            
CollectionConverters.SeqHasAsJava(kubernetesDriverSpec.driverKubernetesResources())
-                .asJava());
+        new 
ArrayList<>(asJava(kubernetesDriverSpec.driverKubernetesResources()));
     this.driverResources.add(
-        KubernetesClientUtils.buildConfigMap(
-            kubernetesDriverConf.configMapNameDriver(), confFilesMap, new 
HashMap<>()));
+        KubernetesClientUtils.buildConfigMapJava(
+            kubernetesDriverConf.configMapNameDriver(), confFilesMap, 
Map.of()));
     
this.driverPreResources.addAll(ConfigMapSpecUtils.buildConfigMaps(configMapSpecs));
     this.driverResources.addAll(configureDriverServerIngress(sparkPod, 
driverServiceIngressList));
     this.driverPreResources.forEach(r -> setNamespaceIfMissing(r, namespace));
@@ -147,10 +139,7 @@ public class SparkAppResourceSpec {
             .addNewVolume()
             .withName(Constants.SPARK_CONF_VOLUME_DRIVER())
             .withNewConfigMap()
-            .withItems(
-                CollectionConverters.SeqHasAsJava(
-                        
KubernetesClientUtils.buildKeyToPathObjects(confFilesMap))
-                    .asJava())
+            
.withItems(KubernetesClientUtils.buildKeyToPathObjectsJava(confFilesMap))
             .withName(kubernetesDriverConf.configMapNameDriver())
             .endConfigMap()
             .endVolume()


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to