This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 241592c  [SPARK-53932] Add `FAIR` schedule examples with `Spark 
(Connect|Thrift)` servers
241592c is described below

commit 241592ccbe7d4cf2384be938071157ee161c269f
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Oct 15 23:27:03 2025 -0700

    [SPARK-53932] Add `FAIR` schedule examples with `Spark (Connect|Thrift)` 
servers
    
    ### What changes were proposed in this pull request?
    
    This PR aims to add `FAIR` schedule examples by
    - Update `Spark 4.1.0-preview2`-based `Spark Connect Server` 
(4.1.0-preview2).
    - Add `Spark 4.1.0-preview2`-based `Spark Thrift Server`
    
    ### Why are the changes needed?
    
    To show `FAIR` schedule examples which is beneficial in the multi-tenant or 
concurrent job execution environments.
    
    <img width="566" height="267" alt="Screenshot 2025-10-15 at 19 52 31" 
src="https://github.com/user-attachments/assets/94968cc6-cee6-483d-8d57-924a0158a2b9";
 />
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change because these are examples.
    
    ### How was this patch tested?
    
    Manual review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #393 from dongjoon-hyun/SPARK-53932.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 examples/spark-connect-server-preview.yaml                |  3 +++
 ...rver-preview.yaml => spark-thrift-server-preview.yaml} | 15 ++++++++++-----
 2 files changed, 13 insertions(+), 5 deletions(-)

diff --git a/examples/spark-connect-server-preview.yaml 
b/examples/spark-connect-server-preview.yaml
index d4cb934..29e89b8 100644
--- a/examples/spark-connect-server-preview.yaml
+++ b/examples/spark-connect-server-preview.yaml
@@ -25,6 +25,9 @@ spec:
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
+    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
+    spark.kubernetes.executor.podNamePrefix: "spark-connect-server-preview"
+    spark.scheduler.mode: "FAIR"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
diff --git a/examples/spark-connect-server-preview.yaml 
b/examples/spark-thrift-server-preview.yaml
similarity index 73%
copy from examples/spark-connect-server-preview.yaml
copy to examples/spark-thrift-server-preview.yaml
index d4cb934..9fda127 100644
--- a/examples/spark-connect-server-preview.yaml
+++ b/examples/spark-thrift-server-preview.yaml
@@ -15,17 +15,22 @@
 apiVersion: spark.apache.org/v1
 kind: SparkApplication
 metadata:
-  name: spark-connect-server-preview
+  name: spark-thrift-server-preview
 spec:
-  mainClass: "org.apache.spark.sql.connect.service.SparkConnectServer"
+  mainClass: "org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
   sparkConf:
     spark.dynamicAllocation.enabled: "true"
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.minExecutors: "3"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
-  applicationTolerations:
-    resourceRetainPolicy: OnFailure
+    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
+    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
+    spark.kubernetes.executor.podNamePrefix: "spark-thrift-server-preview"
+    spark.scheduler.mode: "FAIR"
   runtimeVersions:
     sparkVersion: "4.1.0-preview2"
+  applicationTolerations:
+    restartConfig:
+      restartPolicy: Always
+      maxRestartAttempts: 9223372036854775807


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to