This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 21bd61a  [SPARK-53825] Use Java `MessageDigest` instead of 
`org.apache.commons.codec`
21bd61a is described below

commit 21bd61a145d41385619763df63650eeb84312cac
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Oct 7 15:22:13 2025 -0700

    [SPARK-53825] Use Java `MessageDigest` instead of `org.apache.commons.codec`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use Java `MessageDigest` instead of 
`org.apache.commons.codec`.
    
    ### Why are the changes needed?
    
    To reduce 3rd-party library dependency.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    This patch had conflicts when merged, resolved by
    Committer: Dongjoon Hyun <[email protected]>
    
    Closes #373 from dongjoon-hyun/SPARK-53825.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../spark/k8s/operator/SparkAppSubmissionWorker.java  | 19 +++++++++++++++++--
 1 file changed, 17 insertions(+), 2 deletions(-)

diff --git 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
index a9e11bb..7c5fb98 100644
--- 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
+++ 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
@@ -19,14 +19,17 @@
 
 package org.apache.spark.k8s.operator;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.math.BigInteger;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
 import java.util.List;
 import java.util.Map;
 
 import scala.Option;
 
 import io.fabric8.kubernetes.client.KubernetesClient;
-import org.apache.commons.codec.digest.DigestUtils;
 
 import org.apache.spark.SparkConf;
 import org.apache.spark.deploy.k8s.KubernetesDriverSpec;
@@ -67,6 +70,17 @@ public class SparkAppSubmissionWorker {
   /** Property name for the Spark master URL prefix. */
   public static final String MASTER_URL_PREFIX_PROPS_NAME = 
"spark.master.url.prefix";
 
+  /** SHA256 Message Digest when generating hash-based identifier. */
+  private static final ThreadLocal<MessageDigest> SHA_256_THREAD_LOCAL =
+      ThreadLocal.withInitial(
+          () -> {
+            try {
+              return MessageDigest.getInstance("SHA-256");
+            } catch (NoSuchAlgorithmException e) {
+              throw new UnsupportedOperationException(e);
+            }
+          });
+
   /**
    * Build secondary resource spec for given app with Spark developer API, 
with defaults / overrides
    * as:
@@ -201,8 +215,9 @@ public class SparkAppSubmissionWorker {
    * @return The generated hash-based ID.
    */
   public static String generateHashBasedId(final String prefix, final 
String... identifiers) {
+    final MessageDigest sha256 = SHA_256_THREAD_LOCAL.get();
     String sha256Hash =
-        new BigInteger(1, DigestUtils.sha256(String.join("/", identifiers)))
+        new BigInteger(1, sha256.digest(String.join("/", 
identifiers).getBytes(UTF_8)))
             .toString(DEFAULT_ENCODE_BASE);
     String truncatedIdentifiersHash =
         sha256Hash.substring(0, DEFAULT_HASH_BASED_IDENTIFIER_LENGTH_LIMIT);


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to