This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 080b656  [SPARK-55680] Remove 
`spark.kubernetes.driver.pod.excludedFeatureSteps` config from examples
080b656 is described below

commit 080b6568da2e391a3f470a8334924d649efc55ee
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Wed Feb 25 03:26:02 2026 -0800

    [SPARK-55680] Remove `spark.kubernetes.driver.pod.excludedFeatureSteps` 
config from examples
    
    ### What changes were proposed in this pull request?
    
    This PR removes unnecessary 
`spark.kubernetes.driver.pod.excludedFeatureSteps=org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep`
 config.
    
    ### Why are the changes needed?
    
    Since SPARK-55676, `Apache Spark K8s Operator` doesn't need to exclude 
`KerberosConfDriverFeatureStep` because Apache Hadoop 3.4.3 supports Java 25.
    - #521
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change.
    
    ### How was this patch tested?
    
    Manually tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Generated-by: `Gemini 3.1 Pro (High)` on `Antigravity`
    
    Closes #522 from dongjoon-hyun/SPARK-55680.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 examples/pi-with-comet.yaml                        | 1 -
 examples/pi-with-instanceconfig.yaml               | 1 -
 examples/pi-with-on-demand-pvc.yaml                | 1 -
 examples/pi.yaml                                   | 1 -
 examples/spark-connect-server-iceberg.yaml         | 1 -
 examples/spark-connect-server.yaml                 | 1 -
 examples/spark-history-server-with-jws-filter.yaml | 1 -
 examples/spark-history-server.yaml                 | 1 -
 examples/spark-thrift-server.yaml                  | 1 -
 examples/word-count-celeborn.yaml                  | 1 -
 examples/word-count.yaml                           | 1 -
 11 files changed, 11 deletions(-)

diff --git a/examples/pi-with-comet.yaml b/examples/pi-with-comet.yaml
index 614642f..73c6459 100644
--- a/examples/pi-with-comet.yaml
+++ b/examples/pi-with-comet.yaml
@@ -31,7 +31,6 @@ spec:
     spark.executor.extraClassPath: "local:///comet/comet.jar"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:3.5.8-java17"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.memory.offHeap.enabled: "true"
     spark.memory.offHeap.size: "1g"
     spark.plugins: "org.apache.spark.CometPlugin"
diff --git a/examples/pi-with-instanceconfig.yaml 
b/examples/pi-with-instanceconfig.yaml
index a326b6e..8c75fbe 100644
--- a/examples/pi-with-instanceconfig.yaml
+++ b/examples/pi-with-instanceconfig.yaml
@@ -24,7 +24,6 @@ spec:
     spark.executor.instances: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   applicationTolerations:
     instanceConfig:
       maxExecutors: 3
diff --git a/examples/pi-with-on-demand-pvc.yaml 
b/examples/pi-with-on-demand-pvc.yaml
index 8cc8ae3..f25df33 100644
--- a/examples/pi-with-on-demand-pvc.yaml
+++ b/examples/pi-with-on-demand-pvc.yaml
@@ -26,7 +26,6 @@ spec:
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:
 "/data"
     
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:
 "false"
     
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:
 "OnDemand"
diff --git a/examples/pi.yaml b/examples/pi.yaml
index a70e3aa..d0849a3 100644
--- a/examples/pi.yaml
+++ b/examples/pi.yaml
@@ -25,7 +25,6 @@ spec:
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
     ttlAfterStopMillis: 10000
diff --git a/examples/spark-connect-server-iceberg.yaml 
b/examples/spark-connect-server-iceberg.yaml
index c9e3353..b6d0e88 100644
--- a/examples/spark-connect-server-iceberg.yaml
+++ b/examples/spark-connect-server-iceberg.yaml
@@ -31,7 +31,6 @@ spec:
     spark.jars.packages: 
"org.apache.hadoop:hadoop-aws:3.4.1,org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.1"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:4.0.2"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.kubernetes.executor.podNamePrefix: "spark-connect-server-iceberg"
     spark.scheduler.mode: "FAIR"
     spark.sql.catalog.s3.type: "hadoop"
diff --git a/examples/spark-connect-server.yaml 
b/examples/spark-connect-server.yaml
index aa64c12..aa23b3a 100644
--- a/examples/spark-connect-server.yaml
+++ b/examples/spark-connect-server.yaml
@@ -25,7 +25,6 @@ spec:
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.kubernetes.executor.podNamePrefix: "spark-connect-server"
     spark.scheduler.mode: "FAIR"
   applicationTolerations:
diff --git a/examples/spark-history-server-with-jws-filter.yaml 
b/examples/spark-history-server-with-jws-filter.yaml
index 0746924..3826394 100644
--- a/examples/spark-history-server-with-jws-filter.yaml
+++ b/examples/spark-history-server-with-jws-filter.yaml
@@ -35,7 +35,6 @@ spec:
     spark.hadoop.fs.s3a.path.style.access: "true"
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     # JWS Filter Configuration
     spark.ui.filters: "org.apache.spark.ui.JWSFilter"
     spark.org.apache.spark.ui.JWSFilter.param.secretKey: 
"VmlzaXQgaHR0cHM6Ly9zcGFyay5hcGFjaGUub3JnIHRvIGRvd25sb2FkIEFwYWNoZSBTcGFyay4="
diff --git a/examples/spark-history-server.yaml 
b/examples/spark-history-server.yaml
index b3df1ab..97295b8 100644
--- a/examples/spark-history-server.yaml
+++ b/examples/spark-history-server.yaml
@@ -35,7 +35,6 @@ spec:
     spark.hadoop.fs.s3a.path.style.access: "true"
     spark.hadoop.fs.s3a.access.key: "test"
     spark.hadoop.fs.s3a.secret.key: "test"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   runtimeVersions:
     sparkVersion: "4.1.1"
   applicationTolerations:
diff --git a/examples/spark-thrift-server.yaml 
b/examples/spark-thrift-server.yaml
index c3e3a1f..b14a7f2 100644
--- a/examples/spark-thrift-server.yaml
+++ b/examples/spark-thrift-server.yaml
@@ -25,7 +25,6 @@ spec:
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.kubernetes.executor.podNamePrefix: "spark-thrift-server"
     spark.scheduler.mode: "FAIR"
   runtimeVersions:
diff --git a/examples/word-count-celeborn.yaml 
b/examples/word-count-celeborn.yaml
index 86e9956..c05b7e8 100644
--- a/examples/word-count-celeborn.yaml
+++ b/examples/word-count-celeborn.yaml
@@ -28,7 +28,6 @@ spec:
     spark.kubernetes.container.image: "apache/spark:4.0.2-scala"
     spark.kubernetes.driver.limit.cores: "5"
     spark.kubernetes.driver.master: "local[10]"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
     spark.kubernetes.driver.request.cores: "5"
     spark.shuffle.manager: 
"org.apache.spark.shuffle.celeborn.SparkShuffleManager"
     spark.shuffle.service.enabled: "false"
diff --git a/examples/word-count.yaml b/examples/word-count.yaml
index b3af71a..783c02f 100644
--- a/examples/word-count.yaml
+++ b/examples/word-count.yaml
@@ -25,7 +25,6 @@ spec:
     spark.dynamicAllocation.maxExecutors: "3"
     spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
     spark.kubernetes.container.image: "apache/spark:{{SPARK_VERSION}}-scala"
-    spark.kubernetes.driver.pod.excludedFeatureSteps: 
"org.apache.spark.deploy.k8s.features.KerberosConfDriverFeatureStep"
   applicationTolerations:
     resourceRetainPolicy: OnFailure
   runtimeVersions:


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to