This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new c1849a3  [SPARK-54764] Use 4.1.0 instead of 4.0.1 for tests and 
benchmark
c1849a3 is described below

commit c1849a365d13e547aec8a5572acba5737ea0ba13
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Dec 18 16:30:38 2025 -0800

    [SPARK-54764] Use 4.1.0 instead of 4.0.1 for tests and benchmark
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use 4.1.0 instead of 4.0.1 for tests and benchmark.
    
    ### Why are the changes needed?
    
    To have a test coverage for the latest 4.1.0.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #434 from dongjoon-hyun/SPARK-54764.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 tests/benchmark/sparkapps.sh                                      | 4 ++--
 .../resource-retain-duration/spark-example-retain-duration.yaml   | 4 ++--
 tests/e2e/spark-versions/chainsaw-test.yaml                       | 8 ++++----
 tests/e2e/state-transition/spark-cluster-example-succeeded.yaml   | 4 ++--
 tests/e2e/state-transition/spark-example-succeeded.yaml           | 5 ++---
 tests/e2e/watched-namespaces/spark-example.yaml                   | 5 ++---
 6 files changed, 14 insertions(+), 16 deletions(-)

diff --git a/tests/benchmark/sparkapps.sh b/tests/benchmark/sparkapps.sh
index e9f76e0..d72b89d 100755
--- a/tests/benchmark/sparkapps.sh
+++ b/tests/benchmark/sparkapps.sh
@@ -41,12 +41,12 @@ spec:
     spark.driver.memory: "256m"
     spark.driver.memoryOverhead: "0m"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.driver.master: "local[1]"
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.kubernetes.driver.request.cores: "100m"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
 ---
 EOF
 done
diff --git 
a/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml 
b/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
index 1a410f5..8a83f7a 100644
--- a/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
+++ b/tests/e2e/resource-retain-duration/spark-example-retain-duration.yaml
@@ -29,7 +29,7 @@ spec:
     ttlAfterStopMillis: 30000
   sparkConf:
     spark.executor.instances: "1"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
   runtimeVersions:
-    sparkVersion: 4.0.1
+    sparkVersion: 4.1.0
diff --git a/tests/e2e/spark-versions/chainsaw-test.yaml 
b/tests/e2e/spark-versions/chainsaw-test.yaml
index 36e8279..ac29035 100644
--- a/tests/e2e/spark-versions/chainsaw-test.yaml
+++ b/tests/e2e/spark-versions/chainsaw-test.yaml
@@ -23,13 +23,13 @@ spec:
   scenarios:
   - bindings:
       - name: "SPARK_VERSION"
-        value: "4.0.1"
+        value: "4.1.0"
       - name: "SCALA_VERSION"
         value: "2.13"
       - name: "JAVA_VERSION"
         value: "17"
       - name: "IMAGE"
-        value: "apache/spark:4.0.1-scala2.13-java17-ubuntu"
+        value: "apache/spark:4.1.0-scala-java17"
   - bindings:
       - name: "SPARK_VERSION"
         value: "3.5.7"
@@ -41,13 +41,13 @@ spec:
         value: 'apache/spark:3.5.7-scala2.12-java17-ubuntu'
   - bindings:
       - name: "SPARK_VERSION"
-        value: "4.0.1"
+        value: "4.1.0"
       - name: "SCALA_VERSION"
         value: "2.13"
       - name: "JAVA_VERSION"
         value: "21"
       - name: "IMAGE"
-        value: 'apache/spark:4.0.1-java21-scala'
+        value: 'apache/spark:4.1.0-scala'
   steps:
     - name: install-spark-application
       try:
diff --git a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml 
b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
index 92d2145..07a6eed 100644
--- a/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-cluster-example-succeeded.yaml
@@ -19,14 +19,14 @@ metadata:
   namespace: default
 spec:
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 1
       minWorkers: 1
       maxWorkers: 1
   sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
     spark.master.ui.title: "Spark Cluster E2E Test"
     spark.master.rest.enabled: "true"
     spark.master.rest.host: "0.0.0.0"
diff --git a/tests/e2e/state-transition/spark-example-succeeded.yaml 
b/tests/e2e/state-transition/spark-example-succeeded.yaml
index 9fe1d56..531bc8d 100644
--- a/tests/e2e/state-transition/spark-example-succeeded.yaml
+++ b/tests/e2e/state-transition/spark-example-succeeded.yaml
@@ -25,8 +25,7 @@ spec:
   jars: "local:///opt/spark/examples/jars/spark-examples.jar"
   sparkConf:
     spark.executor.instances: "1"
-    spark.kubernetes.container.image: 
"apache/spark:4.0.1-scala2.13-java17-ubuntu"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
   runtimeVersions:
-    sparkVersion: 4.0.1
-    scalaVersion: "2.13"
+    sparkVersion: 4.1.0
diff --git a/tests/e2e/watched-namespaces/spark-example.yaml 
b/tests/e2e/watched-namespaces/spark-example.yaml
index edaefe6..eec80a7 100644
--- a/tests/e2e/watched-namespaces/spark-example.yaml
+++ b/tests/e2e/watched-namespaces/spark-example.yaml
@@ -25,10 +25,9 @@ spec:
   jars: "local:///opt/spark/examples/jars/spark-examples.jar"
   sparkConf:
     spark.executor.instances: "1"
-    spark.kubernetes.container.image: 
"apache/spark:4.0.1-scala2.13-java17-ubuntu"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.driver.request.cores: "0.5"
     spark.kubernetes.executor.request.cores: "0.5"
   runtimeVersions:
-    sparkVersion: 4.0.1
-    scalaVersion: "2.13"
+    sparkVersion: 4.1.0


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to