This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 57a9fc9efd81 [SPARK-53832][K8S] Make `KubernetesClientUtils`
Java-friendly
57a9fc9efd81 is described below
commit 57a9fc9efd81b217cc771e71136a0ac289f8a29d
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Oct 8 00:09:00 2025 -0700
[SPARK-53832][K8S] Make `KubernetesClientUtils` Java-friendly
### What changes were proposed in this pull request?
This PR aims to make `KubernetesClientUtils` more Java-friendly by adding
three additional APIs. All the other public APIs are already Java-friendly.
| Scala Version | New Java-friendly Version |
| - | - |
| `buildConfigMap` (Since 3.1.0) | `buildConfigMapJava` (Since 4.1.0) |
| `buildKeyToPathObjects` (Since 3.1.0) | `buildKeyToPathObjectsJava`
(Since 4.1.0) |
| `buildSparkConfDirFilesMap` (Since 3.1.1) |
`buildSparkConfDirFilesMapJava` (Since 4.1.0) |
### Why are the changes needed?
Java-based downstream project like `Apache Spark K8s Operator` can take
advantage of this improvement.
### Does this PR introduce _any_ user-facing change?
No behavior change.
### How was this patch tested?
Pass the CIs with the newly added test case.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #52542 from dongjoon-hyun/SPARK-53832.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../deploy/k8s/submit/KubernetesClientUtils.scala | 28 ++++++++++++++++++++++
.../k8s/submit/KubernetesClientUtilsSuite.scala | 25 +++++++++++++++++++
2 files changed, 53 insertions(+)
diff --git
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala
index 729e45c3eb1d..2b7db0b2f09b 100644
---
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala
+++
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala
@@ -19,6 +19,7 @@ package org.apache.spark.deploy.k8s.submit
import java.io.{File, StringWriter}
import java.nio.charset.MalformedInputException
+import java.util.{List => JList, Map => JMap}
import java.util.Properties
import scala.collection.mutable
@@ -70,6 +71,18 @@ object KubernetesClientUtils extends Logging {
propertiesWriter.toString
}
+ /**
+ * Build, file -> 'file's content' map of all the selected files in
SPARK_CONF_DIR.
+ * (Java-friendly)
+ */
+ @Since("4.1.0")
+ def buildSparkConfDirFilesMapJava(
+ configMapName: String,
+ sparkConf: SparkConf,
+ resolvedPropertiesMap: JMap[String, String]): JMap[String, String] =
synchronized {
+ buildSparkConfDirFilesMap(configMapName, sparkConf,
resolvedPropertiesMap.asScala.toMap).asJava
+ }
+
/**
* Build, file -> 'file's content' map of all the selected files in
SPARK_CONF_DIR.
*/
@@ -89,6 +102,11 @@ object KubernetesClientUtils extends Logging {
}
}
+ @Since("4.1.0")
+ def buildKeyToPathObjectsJava(confFilesMap: JMap[String, String]):
JList[KeyToPath] = {
+ buildKeyToPathObjects(confFilesMap.asScala.toMap).asJava
+ }
+
@Since("3.1.0")
def buildKeyToPathObjects(confFilesMap: Map[String, String]): Seq[KeyToPath]
= {
confFilesMap.map {
@@ -98,6 +116,16 @@ object KubernetesClientUtils extends Logging {
}.toList.sortBy(x => x.getKey) // List is sorted to make mocking based
tests work
}
+ /**
+ * Build a ConfigMap that will hold the content for environment variable
SPARK_CONF_DIR
+ * on remote pods. (Java-friendly)
+ */
+ @Since("4.1.0")
+ def buildConfigMapJava(configMapName: String, confFileMap: JMap[String,
String],
+ withLabels: JMap[String, String]): ConfigMap = {
+ buildConfigMap(configMapName, confFileMap.asScala.toMap,
withLabels.asScala.toMap)
+ }
+
/**
* Build a Config Map that will hold the content for environment variable
SPARK_CONF_DIR
* on remote pods.
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala
index 5ed9cc5e03b1..f6af0f7d6bf0 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.deploy.k8s.submit
import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.Files
+import java.util.{HashMap => JHashMap}
import java.util.UUID
import scala.jdk.CollectionConverters._
@@ -103,4 +104,28 @@ class KubernetesClientUtilsSuite extends SparkFunSuite
with BeforeAndAfter {
.build()
assert(outputConfigMap === expectedConfigMap)
}
+
+ test("SPARK-53832: verify that configmap built as expected va Java-friendly
APIs") {
+ val configMapName = s"configmap-name-${UUID.randomUUID.toString}"
+ val configMapNameSpace = s"configmap-namespace-${UUID.randomUUID.toString}"
+ val properties = new JHashMap[String, String]()
+ properties.put(Config.KUBERNETES_NAMESPACE.key, configMapNameSpace)
+ val sparkConf =
+ testSetup(properties.asScala.toMap.map(f => f._1 ->
f._2.getBytes(StandardCharsets.UTF_8)))
+ val confFileMap =
+ KubernetesClientUtils.buildSparkConfDirFilesMapJava(configMapName,
sparkConf, properties)
+ val outputConfigMap =
+ KubernetesClientUtils.buildConfigMapJava(configMapName, confFileMap,
properties)
+ val expectedConfigMap =
+ new ConfigMapBuilder()
+ .withNewMetadata()
+ .withName(configMapName)
+ .withNamespace(configMapNameSpace)
+ .withLabels(properties)
+ .endMetadata()
+ .withImmutable(true)
+ .addToData(confFileMap)
+ .build()
+ assert(outputConfigMap === expectedConfigMap)
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]