This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 8b1ee1caea86 [SPARK-54189][K8S] Remove
`spark.kubernetes.pyspark.pythonVersion`
8b1ee1caea86 is described below
commit 8b1ee1caea8680c18fa283fb72269f08b7fc1795
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Nov 4 23:20:15 2025 -0800
[SPARK-54189][K8S] Remove `spark.kubernetes.pyspark.pythonVersion`
### What changes were proposed in this pull request?
This PR aims to remove no-op `spark.kubernetes.pyspark.pythonVersion`
configuration.
### Why are the changes needed?
`spark.kubernetes.pyspark.pythonVersion` was deprecated at Apache Spark
3.1.0 when Python 2 was dropped.
It automatically fails when we use other values than `3`. And, technically
no-op. We can remove this safely.
https://github.com/apache/spark/blob/84a3fa07ed5f7281282287f830ab6deefae5ccb9/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala#L598
### Does this PR introduce _any_ user-facing change?
No behavior change.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #52889 from dongjoon-hyun/SPARK-54189.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
docs/running-on-kubernetes.md | 11 -----------
.../scala/org/apache/spark/deploy/k8s/Config.scala | 18 +-----------------
.../deploy/k8s/features/DriverCommandFeatureStep.scala | 10 ----------
3 files changed, 1 insertion(+), 38 deletions(-)
diff --git a/docs/running-on-kubernetes.md b/docs/running-on-kubernetes.md
index 68cd9a78d0f3..bdc2a1a156b0 100644
--- a/docs/running-on-kubernetes.md
+++ b/docs/running-on-kubernetes.md
@@ -1284,17 +1284,6 @@ See the [configuration page](configuration.html) for
information on Spark config
</td>
<td>2.4.0</td>
</tr>
-<tr>
- <td><code>spark.kubernetes.pyspark.pythonVersion</code></td>
- <td><code>"3"</code></td>
- <td>
- This sets the major Python version of the docker image used to run the
driver and executor containers.
- It can be only "3". This configuration was deprecated from Spark 3.1.0, and
is effectively no-op.
- Users should set 'spark.pyspark.python' and 'spark.pyspark.driver.python'
configurations or
- 'PYSPARK_PYTHON' and 'PYSPARK_DRIVER_PYTHON' environment variables.
- </td>
- <td>2.4.0</td>
-</tr>
<tr>
<td><code>spark.kubernetes.kerberos.krb5.path</code></td>
<td><code>(none)</code></td>
diff --git
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
index 0ae9b4a302fb..fafff5046b9d 100644
---
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
+++
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
@@ -21,7 +21,7 @@ import java.util.concurrent.TimeUnit
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.internal.Logging
-import org.apache.spark.internal.config.{ConfigBuilder, PYSPARK_DRIVER_PYTHON,
PYSPARK_PYTHON}
+import org.apache.spark.internal.config.ConfigBuilder
private[spark] object Config extends Logging {
@@ -587,22 +587,6 @@ private[spark] object Config extends Logging {
"Ensure that memory overhead is non-negative")
.createWithDefault(0.1)
- val PYSPARK_MAJOR_PYTHON_VERSION =
- ConfigBuilder("spark.kubernetes.pyspark.pythonVersion")
- .doc(
- s"(Deprecated since Spark 3.1, please set '${PYSPARK_PYTHON.key}' and
" +
- s"'${PYSPARK_DRIVER_PYTHON.key}' configurations or $ENV_PYSPARK_PYTHON
and " +
- s"$ENV_PYSPARK_DRIVER_PYTHON environment variables instead.)")
- .version("2.4.0")
- .stringConf
- .checkValue("3" == _,
- "Python 2 was dropped from Spark 3.1, and only 3 is allowed in " +
- "this configuration. Note that this configuration was deprecated in
Spark 3.1. " +
- s"Please set '${PYSPARK_PYTHON.key}' and
'${PYSPARK_DRIVER_PYTHON.key}' " +
- s"configurations or $ENV_PYSPARK_PYTHON and
$ENV_PYSPARK_DRIVER_PYTHON environment " +
- "variables instead.")
- .createOptional
-
val KUBERNETES_KERBEROS_KRB5_FILE =
ConfigBuilder("spark.kubernetes.kerberos.krb5.path")
.doc("Specify the local location of the krb5.conf file to be mounted on
the driver " +
diff --git
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala
index f15f5bc566b4..0574fa4868f3 100644
---
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala
+++
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala
@@ -25,7 +25,6 @@ import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.submit._
import org.apache.spark.internal.Logging
-import org.apache.spark.internal.LogKeys.{CONFIG, CONFIG2, CONFIG3, CONFIG4,
CONFIG5}
import org.apache.spark.internal.config.{PYSPARK_DRIVER_PYTHON, PYSPARK_PYTHON}
import org.apache.spark.launcher.SparkLauncher
@@ -77,15 +76,6 @@ private[spark] class DriverCommandFeatureStep(conf:
KubernetesDriverConf)
private[spark] def environmentVariables: Map[String, String] = sys.env
private def configureForPython(pod: SparkPod, res: String): SparkPod = {
- if (conf.get(PYSPARK_MAJOR_PYTHON_VERSION).isDefined) {
- logWarning(
- log"${MDC(CONFIG, PYSPARK_MAJOR_PYTHON_VERSION.key)} was deprecated
in Spark 3.1. " +
- log"Please set '${MDC(CONFIG2, PYSPARK_PYTHON.key)}' and " +
- log"'${MDC(CONFIG3, PYSPARK_DRIVER_PYTHON.key)}' " +
- log"configurations or ${MDC(CONFIG4, ENV_PYSPARK_PYTHON)} and " +
- log"${MDC(CONFIG5, ENV_PYSPARK_DRIVER_PYTHON)} environment
variables instead.")
- }
-
val pythonEnvs = {
KubernetesUtils.buildEnvVars(
Seq(
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]