This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 2839e3c  [SPARK-52657] Allow `spark.app.id`
2839e3c is described below

commit 2839e3c5438c7b1e897c0a21c6649cb6600bdebf
Author: Qi Tan <[email protected]>
AuthorDate: Wed Jul 2 18:42:17 2025 -0700

    [SPARK-52657] Allow `spark.app.id`
    
    ### What changes were proposed in this pull request?
    SparkAppDriverConf should use the value stored in effectiveSparkConf rather 
than pre-computed Id for creation.
    
    ### Why are the changes needed?
    When user set the spark.app.id in sparkConf, operator should honor this 
value.
    
    ### Does this PR introduce _any_ user-facing change?
    no
    
    ### How was this patch tested?
    unit test
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #271 from TQJADE/appid-bug-fix.
    
    Authored-by: Qi Tan <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../spark/k8s/operator/SparkAppSubmissionWorker.java     |  2 +-
 .../spark/k8s/operator/SparkAppSubmissionWorkerTest.java | 16 ++++++++++++++++
 2 files changed, 17 insertions(+), 1 deletion(-)

diff --git 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
index acccb8c..1a3ebe2 100644
--- 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
+++ 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorker.java
@@ -126,7 +126,7 @@ public class SparkAppSubmissionWorker {
     effectiveSparkConf.setIfMissing("spark.app.id", appId);
     return SparkAppDriverConf.create(
         effectiveSparkConf,
-        appId,
+        effectiveSparkConf.getAppId(),
         primaryResource,
         applicationSpec.getMainClass(),
         applicationSpec.getDriverArgs().toArray(String[]::new),
diff --git 
a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
 
b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
index b41f99a..b678d94 100644
--- 
a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
+++ 
b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java
@@ -219,4 +219,20 @@ class SparkAppSubmissionWorkerTest {
     String appId = SparkAppSubmissionWorker.generateSparkAppId(mockApp);
     assertTrue(appId.length() <= DEFAULT_ID_LENGTH_LIMIT);
   }
+
+  @Test
+  void checkAppIdWhenUserSpecifiedInSparkConf() {
+    SparkApplication mockApp = mock(SparkApplication.class);
+    ApplicationSpec mockSpec = mock(ApplicationSpec.class);
+    Map<String, String> appProps = new HashMap<>();
+    appProps.put("spark.app.id", "foo");
+    ObjectMeta appMeta = new 
ObjectMetaBuilder().withName("app1").withNamespace("ns1").build();
+    when(mockSpec.getSparkConf()).thenReturn(appProps);
+    when(mockApp.getSpec()).thenReturn(mockSpec);
+    when(mockApp.getMetadata()).thenReturn(appMeta);
+
+    SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
+    SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, 
Collections.emptyMap());
+    assertEquals(conf.appId(), "foo");
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to