This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch gh-pages
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/gh-pages by this push:
     new 3f136b0  Sync examples with main branch for Apache Spark 4.1.0
3f136b0 is described below

commit 3f136b00bd0144202bc71acf7f737a660d242293
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Mon Dec 22 09:56:58 2025 +0900

    Sync examples with main branch for Apache Spark 4.1.0
---
 pi.yaml                              | 5 ++---
 prod-cluster-with-three-workers.yaml | 4 ++--
 spark-connect-server.yaml            | 7 ++++---
 3 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/pi.yaml b/pi.yaml
index 684d377..a4cc7cc 100644
--- a/pi.yaml
+++ b/pi.yaml
@@ -24,10 +24,9 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    scalaVersion: "2.13"
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/prod-cluster-with-three-workers.yaml 
b/prod-cluster-with-three-workers.yaml
index 1957a6e..a3a6531 100644
--- a/prod-cluster-with-three-workers.yaml
+++ b/prod-cluster-with-three-workers.yaml
@@ -18,14 +18,14 @@ metadata:
   name: prod
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
       minWorkers: 3
       maxWorkers: 3
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.master.ui.title: "Prod Spark Cluster"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/spark-connect-server.yaml b/spark-connect-server.yaml
index 9d05531..5ab877d 100644
--- a/spark-connect-server.yaml
+++ b/spark-connect-server.yaml
@@ -24,10 +24,11 @@ spec:
     spark.dynamicAllocation.minExecutors: "3"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
+    spark.kubernetes.executor.podNamePrefix: "spark-connect-server"
+    spark.scheduler.mode: "FAIR"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    scalaVersion: "2.13"
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to