This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 6b091e0  [SPARK-54755] Promote all preview examples with 4.1.0
6b091e0 is described below

commit 6b091e0f08c1e2e125eed44bc976789124e1e5c4
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Dec 18 08:56:52 2025 -0800

    [SPARK-54755] Promote all preview examples with 4.1.0
    
    ### What changes were proposed in this pull request?
    
    This PR aims to promote all preview examples with 4.1.0.
    
    ### Why are the changes needed?
    
    Apache Spark 4.1.0 is released officially.
    
    - https://github.com/apache/spark/releases/tag/v4.1.0
    - https://spark.apache.org/docs/4.1.0/
    - https://dist.apache.org/repos/dist/release/spark/spark-4.1.0/
    
    In general, this PR does 3 steps.
    1. Removes `-preview` postfix from the SparkApp or SparkCluster names.
    2. Uses `4.1.0`-based image instead of `4.1.0-preview4`-based one.
    3. Renames files to replaces the corresponding 4.0.1-based examples.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change.
    
    ### How was this patch tested?
    
    Manual review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #432 from dongjoon-hyun/SPARK-54755.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 examples/cluster-java21.yaml                       | 32 ----------------
 examples/{cluster-preview.yaml => cluster.yaml}    |  4 +-
 examples/pi-preview-with-eventlog.yaml             | 40 --------------------
 examples/pi-preview.yaml                           | 32 ----------------
 examples/pi-with-eventlog.yaml                     |  7 ++--
 examples/pi.yaml                                   |  4 +-
 examples/spark-connect-server-preview.yaml         | 34 -----------------
 examples/spark-connect-server.yaml                 |  7 +++-
 examples/spark-history-server-preview.yaml         | 43 ----------------------
 examples/spark-history-server.yaml                 |  6 +--
 examples/spark-thrift-server-preview.yaml          | 36 ------------------
 examples/spark-thrift-server.yaml                  |  7 +++-
 .../{word-count-preview.yaml => word-count.yaml}   |  4 +-
 13 files changed, 23 insertions(+), 233 deletions(-)

diff --git a/examples/cluster-java21.yaml b/examples/cluster-java21.yaml
deleted file mode 100644
index b81fbc3..0000000
--- a/examples/cluster-java21.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkCluster
-metadata:
-  name: cluster-java21
-spec:
-  runtimeVersions:
-    sparkVersion: "4.0.1"
-  clusterTolerations:
-    instanceConfig:
-      initWorkers: 3
-      minWorkers: 3
-      maxWorkers: 3
-  sparkConf:
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21"
-    spark.master.ui.title: "Prod Spark Cluster (Java 21)"
-    spark.master.rest.enabled: "true"
-    spark.master.rest.host: "0.0.0.0"
-    spark.ui.reverseProxy: "true"
diff --git a/examples/cluster-preview.yaml b/examples/cluster.yaml
similarity index 95%
rename from examples/cluster-preview.yaml
rename to examples/cluster.yaml
index 2cbfa91..830fb4c 100644
--- a/examples/cluster-preview.yaml
+++ b/examples/cluster.yaml
@@ -15,10 +15,10 @@
 apiVersion: spark.apache.org/v1
 kind: SparkCluster
 metadata:
-  name: cluster-preview
+  name: cluster
 spec:
   runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
+    sparkVersion: "4.1.0"
   clusterTolerations:
     instanceConfig:
       initWorkers: 3
diff --git a/examples/pi-preview-with-eventlog.yaml 
b/examples/pi-preview-with-eventlog.yaml
deleted file mode 100644
index b99b35d..0000000
--- a/examples/pi-preview-with-eventlog.yaml
+++ /dev/null
@@ -1,40 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkApplication
-metadata:
-  name: pi-preview-with-eventlog
-spec:
-  mainClass: "org.apache.spark.examples.SparkPi"
-  jars: "local:///opt/spark/examples/jars/spark-examples.jar"
-  sparkConf:
-    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.2"
-    spark.jars.ivy: "/tmp/.ivy2.5.2"
-    spark.dynamicAllocation.enabled: "true"
-    spark.dynamicAllocation.shuffleTracking.enabled: "true"
-    spark.dynamicAllocation.maxExecutors: "3"
-    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
-    spark.eventLog.enabled: "true"
-    spark.eventLog.dir: "s3a://spark-events/"
-    spark.hadoop.fs.s3a.endpoint: "http://localstack:4566";
-    spark.hadoop.fs.s3a.path.style.access: "true"
-    spark.hadoop.fs.s3a.access.key: "test"
-    spark.hadoop.fs.s3a.secret.key: "test"
-    spark.driver.memory: "2g"
-  applicationTolerations:
-    resourceRetainPolicy: OnFailure
-  runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
diff --git a/examples/pi-preview.yaml b/examples/pi-preview.yaml
deleted file mode 100644
index b1e55bb..0000000
--- a/examples/pi-preview.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkApplication
-metadata:
-  name: pi-preview
-spec:
-  mainClass: "org.apache.spark.examples.SparkPi"
-  jars: "local:///opt/spark/examples/jars/spark-examples.jar"
-  sparkConf:
-    spark.dynamicAllocation.enabled: "true"
-    spark.dynamicAllocation.shuffleTracking.enabled: "true"
-    spark.dynamicAllocation.maxExecutors: "3"
-    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
-  applicationTolerations:
-    resourceRetainPolicy: OnFailure
-  runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
diff --git a/examples/pi-with-eventlog.yaml b/examples/pi-with-eventlog.yaml
index 8c0995c..b2dda11 100644
--- a/examples/pi-with-eventlog.yaml
+++ b/examples/pi-with-eventlog.yaml
@@ -20,20 +20,21 @@ spec:
   mainClass: "org.apache.spark.examples.SparkPi"
   jars: "local:///opt/spark/examples/jars/spark-examples.jar"
   sparkConf:
-    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.1"
+    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.2"
     spark.jars.ivy: "/tmp/.ivy2.5.2"
     spark.dynamicAllocation.enabled: "true"
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.eventLog.enabled: "true"
     spark.eventLog.dir: "s3a://spark-events/"
     spark.hadoop.fs.s3a.endpoint: "http://localstack:4566";
     spark.hadoop.fs.s3a.path.style.access: "true"
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
+    spark.driver.memory: "2g"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/pi.yaml b/examples/pi.yaml
index abfb530..a4cc7cc 100644
--- a/examples/pi.yaml
+++ b/examples/pi.yaml
@@ -24,9 +24,9 @@ spec:
     spark.dynamicAllocation.shuffleTracking.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/spark-connect-server-preview.yaml 
b/examples/spark-connect-server-preview.yaml
deleted file mode 100644
index 02db224..0000000
--- a/examples/spark-connect-server-preview.yaml
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkApplication
-metadata:
-  name: spark-connect-server-preview
-spec:
-  mainClass: "org.apache.spark.sql.connect.service.SparkConnectServer"
-  sparkConf:
-    spark.dynamicAllocation.enabled: "true"
-    spark.dynamicAllocation.shuffleTracking.enabled: "true"
-    spark.dynamicAllocation.minExecutors: "3"
-    spark.dynamicAllocation.maxExecutors: "3"
-    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
-    spark.kubernetes.executor.podNamePrefix: "spark-connect-server-preview"
-    spark.scheduler.mode: "FAIR"
-  applicationTolerations:
-    resourceRetainPolicy: OnFailure
-  runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
diff --git a/examples/spark-connect-server.yaml 
b/examples/spark-connect-server.yaml
index e9613f2..5ab877d 100644
--- a/examples/spark-connect-server.yaml
+++ b/examples/spark-connect-server.yaml
@@ -24,8 +24,11 @@ spec:
     spark.dynamicAllocation.minExecutors: "3"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
+    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
+    spark.kubernetes.executor.podNamePrefix: "spark-connect-server"
+    spark.scheduler.mode: "FAIR"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
diff --git a/examples/spark-history-server-preview.yaml 
b/examples/spark-history-server-preview.yaml
deleted file mode 100644
index d090ba7..0000000
--- a/examples/spark-history-server-preview.yaml
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkApplication
-metadata:
-  name: spark-history-server
-spec:
-  mainClass: "org.apache.spark.deploy.history.HistoryServer"
-  sparkConf:
-    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.2"
-    spark.jars.ivy: "/tmp/.ivy2.5.2"
-    spark.driver.memory: "2g"
-    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
-    spark.ui.port: "18080"
-    spark.history.fs.logDirectory: "s3a://spark-events"
-    spark.history.fs.cleaner.enabled: "true"
-    spark.history.fs.cleaner.maxAge: "30d"
-    spark.history.fs.cleaner.maxNum: "100"
-    spark.history.fs.eventLog.rolling.maxFilesToRetain: "10"
-    spark.hadoop.fs.defaultFS: "s3a://spark-events"
-    spark.hadoop.fs.s3a.endpoint: "http://localstack:4566";
-    spark.hadoop.fs.s3a.path.style.access: "true"
-    spark.hadoop.fs.s3a.access.key: "test"
-    spark.hadoop.fs.s3a.secret.key: "test"
-  runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
-  applicationTolerations:
-    restartConfig:
-      restartPolicy: Always
-      maxRestartAttempts: 9223372036854775807
diff --git a/examples/spark-history-server.yaml 
b/examples/spark-history-server.yaml
index 77a051f..2754198 100644
--- a/examples/spark-history-server.yaml
+++ b/examples/spark-history-server.yaml
@@ -19,11 +19,11 @@ metadata:
 spec:
   mainClass: "org.apache.spark.deploy.history.HistoryServer"
   sparkConf:
-    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.1"
+    spark.jars.packages: "org.apache.hadoop:hadoop-aws:3.4.2"
     spark.jars.ivy: "/tmp/.ivy2.5.2"
     spark.driver.memory: "2g"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.ui.port: "18080"
     spark.history.fs.logDirectory: "s3a://spark-events"
     spark.history.fs.cleaner.enabled: "true"
@@ -36,7 +36,7 @@ spec:
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   applicationTolerations:
     restartConfig:
       restartPolicy: Always
diff --git a/examples/spark-thrift-server-preview.yaml 
b/examples/spark-thrift-server-preview.yaml
deleted file mode 100644
index 4c730b7..0000000
--- a/examples/spark-thrift-server-preview.yaml
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-apiVersion: spark.apache.org/v1
-kind: SparkApplication
-metadata:
-  name: spark-thrift-server-preview
-spec:
-  mainClass: "org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
-  sparkConf:
-    spark.dynamicAllocation.enabled: "true"
-    spark.dynamicAllocation.shuffleTracking.enabled: "true"
-    spark.dynamicAllocation.minExecutors: "3"
-    spark.dynamicAllocation.maxExecutors: "3"
-    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
-    spark.kubernetes.executor.podNamePrefix: "spark-thrift-server-preview"
-    spark.scheduler.mode: "FAIR"
-  runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
-  applicationTolerations:
-    restartConfig:
-      restartPolicy: Always
-      maxRestartAttempts: 9223372036854775807
diff --git a/examples/spark-thrift-server.yaml 
b/examples/spark-thrift-server.yaml
index c8450aa..5844c80 100644
--- a/examples/spark-thrift-server.yaml
+++ b/examples/spark-thrift-server.yaml
@@ -24,9 +24,12 @@ spec:
     spark.dynamicAllocation.minExecutors: "3"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: "apache/spark:4.0.1-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
+    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
+    spark.kubernetes.executor.podNamePrefix: "spark-thrift-server"
+    spark.scheduler.mode: "FAIR"
   runtimeVersions:
-    sparkVersion: "4.0.1"
+    sparkVersion: "4.1.0"
   applicationTolerations:
     restartConfig:
       restartPolicy: Always
diff --git a/examples/word-count-preview.yaml b/examples/word-count.yaml
similarity index 96%
rename from examples/word-count-preview.yaml
rename to examples/word-count.yaml
index cc51771..8496d79 100644
--- a/examples/word-count-preview.yaml
+++ b/examples/word-count.yaml
@@ -24,9 +24,9 @@ spec:
     spark.dynamicAllocation.enabled: "true"
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
-    spark.kubernetes.container.image: 
"apache/spark:{{SPARK_VERSION}}-java21-scala"
+    spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
     spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:
-    sparkVersion: "4.1.0-preview4"
+    sparkVersion: "4.1.0"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to