This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 2d46012  [SPARK-54765] Make `README.md` and `examples` up-to-date with 
4.1.0
2d46012 is described below

commit 2d46012e4e0e54ebe3b605e6afdf2ce41e08c721
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Fri Dec 19 00:15:26 2025 -0800

    [SPARK-54765] Make `README.md` and `examples` up-to-date with 4.1.0
    
    ### What changes were proposed in this pull request?
    
    This PR aims to make `README.md` and `examples` up-to-date with 4.1.0 
(except `Apache Iceberg` example).
    
    ### Why are the changes needed?
    
    To provide the latest updates on top of Apache Spark 4.1.0 images.
    
    In addition, `pi-java21.yaml` is adjusted and renamed to `pi-java17.yaml` 
because `pi.yaml` uses Java 21 already by default since Apache Spark 4.1.0.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change.
    
    ### How was this patch tested?
    
    Manually review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #435 from dongjoon-hyun/SPARK-54765.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 README.md                                          | 31 +++++++++++-----------
 examples/cluster-on-yunikorn.yaml                  |  4 +--
 examples/cluster-with-hpa-template.yaml            |  4 +--
 examples/cluster-with-hpa.yaml                     |  4 +--
 examples/cluster-with-template.yaml                |  4 +--
 examples/dfs-read-write.yaml                       |  6 ++---
 examples/{pi-scala.yaml => pi-java17.yaml}         |  6 ++---
 examples/pi-java21.yaml                            | 31 ----------------------
 examples/pi-on-yunikorn.yaml                       |  4 +--
 examples/pi-python.yaml                            |  4 +--
 examples/pi-statefulset.yaml                       |  4 +--
 examples/pi-v1alpha1.yaml                          |  4 +--
 examples/pi-v1beta1.yaml                           |  4 +--
 examples/pi-with-driver-timeout.yaml               |  4 +--
 examples/pi-with-one-pod.yaml                      |  4 +--
 examples/pi-with-spark-connect-plugin.yaml         |  4 +--
 examples/pi-with-template.yaml                     |  4 +--
 examples/prod-cluster-with-three-workers.yaml      |  4 +--
 examples/qa-cluster-with-one-worker.yaml           |  4 +--
 .../spark-connect-server-with-spark-cluster.yaml   |  4 +--
 examples/sql.yaml                                  |  4 +--
 examples/stream-word-count.yaml                    |  6 ++---
 22 files changed, 58 insertions(+), 90 deletions(-)

diff --git a/README.md b/README.md
index 6538665..83a04e5 100644
--- a/README.md
+++ b/README.md
@@ -75,21 +75,21 @@ $ kubectl port-forward prod-master-0 6066 &
 $ ./examples/submit-pi-to-prod.sh
 {
   "action" : "CreateSubmissionResponse",
-  "message" : "Driver successfully submitted as driver-20250628212324-0000",
-  "serverSparkVersion" : "4.0.0",
-  "submissionId" : "driver-20250628212324-0000",
+  "message" : "Driver successfully submitted as driver-20251219002524-0000",
+  "serverSparkVersion" : "4.1.0",
+  "submissionId" : "driver-20251219002524-0000",
   "success" : true
 }
 
-$ curl http://localhost:6066/v1/submissions/status/driver-20250628212324-0000/
+$ curl http://localhost:6066/v1/submissions/status/driver-20251219002524-0000/
 {
   "action" : "SubmissionStatusResponse",
   "driverState" : "FINISHED",
-  "serverSparkVersion" : "4.0.0",
-  "submissionId" : "driver-20250628212324-0000",
+  "serverSparkVersion" : "4.1.0",
+  "submissionId" : "driver-20251219002524-0000",
   "success" : true,
-  "workerHostPort" : "10.1.0.88:34643",
-  "workerId" : "worker-20250628212306-10.1.0.88-34643"
+  "workerHostPort" : "10.1.0.190:46501",
+  "workerId" : "worker-20251219002506-10.1.0.190-46501"
 }
 
 $ kubectl delete sparkcluster prod
@@ -119,16 +119,15 @@ $ kubectl describe pod pi-on-yunikorn-0-driver
 Events:
   Type    Reason             Age   From      Message
   ----    ------             ----  ----      -------
-  Normal  Scheduling         14s   yunikorn  default/pi-on-yunikorn-0-driver 
is queued and waiting for allocation
-  Normal  Scheduled          14s   yunikorn  Successfully assigned 
default/pi-on-yunikorn-0-driver to node docker-desktop
-  Normal  PodBindSuccessful  14s   yunikorn  Pod 
default/pi-on-yunikorn-0-driver is successfully bound to node docker-desktop
-  Normal  TaskCompleted      6s    yunikorn  Task 
default/pi-on-yunikorn-0-driver is completed
-  Normal  Pulled             13s   kubelet   Container image 
"apache/spark:4.0.0" already present on machine
-  Normal  Created            13s   kubelet   Created container 
spark-kubernetes-driver
-  Normal  Started            13s   kubelet   Started container 
spark-kubernetes-driver
+  Normal  Scheduling         1s    yunikorn  default/pi-on-yunikorn-0-driver 
is queued and waiting for allocation
+  Normal  Scheduled          1s    yunikorn  Successfully assigned 
default/pi-on-yunikorn-0-driver to node docker-desktop
+  Normal  PodBindSuccessful  1s    yunikorn  Pod 
default/pi-on-yunikorn-0-driver is successfully bound to node docker-desktop
+  Normal  Pulled             0s    kubelet   Container image 
"apache/spark:4.1.0-scala" already present on machine
+  Normal  Created            0s    kubelet   Created container: 
spark-kubernetes-driver
+  Normal  Started            0s    kubelet   Started container 
spark-kubernetes-driver
 
 $ kubectl delete sparkapp pi-on-yunikorn
-sparkapplication.spark.apache.org "pi-on-yunikorn" deleted
+sparkapplication.spark.apache.org "pi-on-yunikorn" deleted from default 
namespace
 ```
 
 ## Clean Up
diff --git a/examples/cluster-on-yunikorn.yaml 
b/examples/cluster-on-yunikorn.yaml
index fa5c882..7b0cc7d 100644
--- a/examples/cluster-on-yunikorn.yaml
+++ b/examples/cluster-on-yunikorn.yaml
@@ -18,14 +18,14 @@ metadata:
   name: cluster-on-yunikorn
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
       minWorkers: 1
       maxWorkers: 1
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.scheduler.name: "yunikorn"
     spark.master.ui.title: "Spark Cluster on YuniKorn Scheduler"
     spark.master.rest.enabled: "true"
diff --git a/examples/cluster-with-hpa-template.yaml 
b/examples/cluster-with-hpa-template.yaml
index 73f7a20..8cf6304 100644
--- a/examples/cluster-with-hpa-template.yaml
+++ b/examples/cluster-with-hpa-template.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-hpa-template
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
@@ -58,7 +58,7 @@ spec:
             value: 1
             periodSeconds: 1200
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.master.ui.title: "Cluster with HorizontalPodAutoscaler Template"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/cluster-with-hpa.yaml b/examples/cluster-with-hpa.yaml
index 8f5c164..4bb3d48 100644
--- a/examples/cluster-with-hpa.yaml
+++ b/examples/cluster-with-hpa.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-hpa
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
@@ -38,7 +38,7 @@ spec:
                 cpu: "3"
                 memory: "3Gi"
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.master.ui.title: "Cluster with HorizontalPodAutoscaler"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/cluster-with-template.yaml 
b/examples/cluster-with-template.yaml
index 4627171..1388ec6 100644
--- a/examples/cluster-with-template.yaml
+++ b/examples/cluster-with-template.yaml
@@ -18,7 +18,7 @@ metadata:
   name: cluster-with-template
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
@@ -93,7 +93,7 @@ spec:
       annotations:
         customAnnotation: "annotation"
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.master.ui.title: "Spark Cluster with Template"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/dfs-read-write.yaml b/examples/dfs-read-write.yaml
index 5a6b47d..b3bc209 100644
--- a/examples/dfs-read-write.yaml
+++ b/examples/dfs-read-write.yaml
@@ -25,14 +25,14 @@ spec:
   driverArgs: [ "/opt/spark/RELEASE", "s3a://data/" ]
   sparkConf:
     spark.logConf: "true"
-    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.1"
+    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.2"
     spark.jars.ivy: "/tmp/.ivy2.5.2"
     spark.driver.memory: "2g"
     spark.dynamicAllocation.enabled: "true"
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.hadoop.fs.defaultFS: "s3a://data"
     spark.hadoop.fs.s3a.endpoint: "http://localstack:4566";
     spark.hadoop.fs.s3a.path.style.access: "true"
@@ -41,4 +41,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-scala.yaml b/examples/pi-java17.yaml
similarity index 90%
rename from examples/pi-scala.yaml
rename to examples/pi-java17.yaml
index d6e1b2d..9392619 100644
--- a/examples/pi-scala.yaml
+++ b/examples/pi-java17.yaml
@@ -15,7 +15,7 @@
 apiVersion: spark.apache.org/v1
 kind: SparkApplication
 metadata:
-  name: pi-scala
+  name: pi-java17
 spec:
   mainClass: "org.apache.spark.examples.SparkPi"
   jars: "local:///opt/spark/examples/jars/spark-examples.jar"
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-scala"
+    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-scala-java17"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-java21.yaml b/examples/pi-java21.yaml
deleted file mode 100644
index 14f9301..0000000
--- a/examples/pi-java21.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkApplication
-metadata:
-  name: pi-java21
-spec:
-  mainClass: "org.apache.spark.examples.SparkPi"
-  jars: "local:///opt/spark/examples/jars/spark-examples.jar"
-  sparkConf:
-    spark.dynamicAllocation.enabled: "true"
-    spark.dynamicAllocation.shuffleTracking.enabled: "true"
-    spark.dynamicAllocation.maxExecutors: "3"
-    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
-  applicationTolerations:
-    resourceRetainPolicy: OnFailure
-  runtimeVersions:
-    sparkVersion: "4.0.1"
diff --git a/examples/pi-on-yunikorn.yaml b/examples/pi-on-yunikorn.yaml
index d0c35c4..f3d498b 100644
--- a/examples/pi-on-yunikorn.yaml
+++ b/examples/pi-on-yunikorn.yaml
@@ -25,7 +25,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.scheduler.name: "yunikorn"
     spark.kubernetes.driver.label.queue: "root.default"
     spark.kubernetes.executor.label.queue: "root.default"
@@ -34,4 +34,4 @@ spec:
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-python.yaml b/examples/pi-python.yaml
index 0eafc93..e313810 100644
--- a/examples/pi-python.yaml
+++ b/examples/pi-python.yaml
@@ -23,8 +23,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-python3"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-statefulset.yaml b/examples/pi-statefulset.yaml
index e7ac510..62bef4c 100644
--- a/examples/pi-statefulset.yaml
+++ b/examples/pi-statefulset.yaml
@@ -24,8 +24,8 @@ spec:
     spark.executor.instances: "3"
     spark.kubernetes.allocation.pods.allocator: "statefulset"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-v1alpha1.yaml b/examples/pi-v1alpha1.yaml
index 49d1690..6cf25d2 100644
--- a/examples/pi-v1alpha1.yaml
+++ b/examples/pi-v1alpha1.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-v1beta1.yaml b/examples/pi-v1beta1.yaml
index 5cb2c2c..34c199c 100644
--- a/examples/pi-v1beta1.yaml
+++ b/examples/pi-v1beta1.yaml
@@ -24,8 +24,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-with-driver-timeout.yaml 
b/examples/pi-with-driver-timeout.yaml
index a36e94d..e3d2155 100644
--- a/examples/pi-with-driver-timeout.yaml
+++ b/examples/pi-with-driver-timeout.yaml
@@ -27,8 +27,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-with-one-pod.yaml b/examples/pi-with-one-pod.yaml
index 4a9cc00..ac55eab 100644
--- a/examples/pi-with-one-pod.yaml
+++ b/examples/pi-with-one-pod.yaml
@@ -24,6 +24,6 @@ spec:
     spark.kubernetes.driver.request.cores: "5"
     spark.kubernetes.driver.limit.cores: "5"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-with-spark-connect-plugin.yaml 
b/examples/pi-with-spark-connect-plugin.yaml
index c18e65a..c7ce99d 100644
--- a/examples/pi-with-spark-connect-plugin.yaml
+++ b/examples/pi-with-spark-connect-plugin.yaml
@@ -26,8 +26,8 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi-with-template.yaml b/examples/pi-with-template.yaml
index 8845d6f..a6d4daa 100644
--- a/examples/pi-with-template.yaml
+++ b/examples/pi-with-template.yaml
@@ -24,7 +24,7 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   driverSpec:
@@ -38,4 +38,4 @@ spec:
         priorityClassName: system-cluster-critical
         terminationGracePeriodSeconds: 0
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/prod-cluster-with-three-workers.yaml 
b/examples/prod-cluster-with-three-workers.yaml
index 1957a6e..a3a6531 100644
--- a/examples/prod-cluster-with-three-workers.yaml
+++ b/examples/prod-cluster-with-three-workers.yaml
@@ -18,14 +18,14 @@ metadata:
   name: prod
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
       minWorkers: 3
       maxWorkers: 3
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.master.ui.title: "Prod Spark Cluster"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/qa-cluster-with-one-worker.yaml 
b/examples/qa-cluster-with-one-worker.yaml
index e9a4cb8..1ffbe14 100644
--- a/examples/qa-cluster-with-one-worker.yaml
+++ b/examples/qa-cluster-with-one-worker.yaml
@@ -18,14 +18,14 @@ metadata:
   name: qa
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
       minWorkers: 1
       maxWorkers: 1
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.master.ui.title: "QA Spark Cluster"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/examples/spark-connect-server-with-spark-cluster.yaml 
b/examples/spark-connect-server-with-spark-cluster.yaml
index 00fca54..d7b41cb 100644
--- a/examples/spark-connect-server-with-spark-cluster.yaml
+++ b/examples/spark-connect-server-with-spark-cluster.yaml
@@ -24,7 +24,7 @@ spec:
     spark.executor.cores: "1"
     spark.cores.max: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.ui.reverseProxy: "true"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/sql.yaml b/examples/sql.yaml
index 57bcd24..0fc1e93 100644
--- a/examples/sql.yaml
+++ b/examples/sql.yaml
@@ -25,6 +25,6 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/stream-word-count.yaml b/examples/stream-word-count.yaml
index 9d2cd06..6b2821a 100644
--- a/examples/stream-word-count.yaml
+++ b/examples/stream-word-count.yaml
@@ -21,13 +21,13 @@ spec:
   jars: "local:///opt/spark/examples/jars/spark-examples.jar"
   driverArgs: [ "s3a://ingest" ]
   sparkConf:
-    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.1"
+    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.2"
     spark.jars.ivy: "/tmp/.ivy2.5.2"
     spark.dynamicAllocation.enabled: "true"
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.log.level: "WARN"
     spark.eventLog.enabled: "true"
     spark.eventLog.dir: "s3a://spark-events/"
@@ -37,4 +37,4 @@ spec:
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to