This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 7214029db67b [SPARK-53165][CORE] Add `SparkExitCode.CLASS_NOT_FOUND` 7214029db67b is described below commit 7214029db67b7f8f633e51c1b6c13fad916863f3 Author: Dongjoon Hyun <dongj...@apache.org> AuthorDate: Wed Aug 6 22:38:04 2025 -0700 [SPARK-53165][CORE] Add `SparkExitCode.CLASS_NOT_FOUND` ### What changes were proposed in this pull request? This PR aims to promote the existing exit code, 101, to the official `SparkExitCode` at Apache Spark 4.1.0. ### Why are the changes needed? `SparkSubmit` has been exposing the exit code, `101`, in case of `ClassNotFoundException` or `NoClassDefFoundError`. We had better register this as `SparkExitCode` class for consistency. https://github.com/apache/spark/blob/46fd5258a16523637f7ac5fa7ece16f626816454/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala#L1083 ### Does this PR introduce _any_ user-facing change? No behavior change. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51892 from dongjoon-hyun/SPARK-53165. Authored-by: Dongjoon Hyun <dongj...@apache.org> Signed-off-by: Dongjoon Hyun <dongj...@apache.org> --- core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala | 6 ++---- core/src/main/scala/org/apache/spark/util/SparkExitCode.scala | 3 +++ 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 7029980cdb44..1cd63849de63 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -996,14 +996,14 @@ private[spark] class SparkSubmit extends Logging { // TODO(SPARK-42375): Should point out the user-facing page here instead. logInfo("You need to specify Spark Connect jars with --jars or --packages.") } - throw new SparkUserAppException(CLASS_NOT_FOUND_EXIT_STATUS) + throw new SparkUserAppException(SparkExitCode.CLASS_NOT_FOUND) case e: NoClassDefFoundError => logError(log"Failed to load ${MDC(LogKeys.CLASS_NAME, childMainClass)}", e) if (e.getMessage.contains("org/apache/hadoop/hive")) { logInfo("Failed to load hive class.") logInfo("You need to build Spark with -Phive and -Phive-thriftserver.") } - throw new SparkUserAppException(CLASS_NOT_FOUND_EXIT_STATUS) + throw new SparkUserAppException(SparkExitCode.CLASS_NOT_FOUND) } val app: SparkApplication = if (classOf[SparkApplication].isAssignableFrom(mainClass)) { @@ -1080,8 +1080,6 @@ object SparkSubmit extends CommandLineUtils with Logging { private val SPARKR_PACKAGE_ARCHIVE = "sparkr.zip" private val R_PACKAGE_ARCHIVE = "rpkg.zip" - private val CLASS_NOT_FOUND_EXIT_STATUS = 101 - // Following constants are visible for testing. private[deploy] val YARN_CLUSTER_SUBMIT_CLASS = "org.apache.spark.deploy.yarn.YarnClusterApplication" diff --git a/core/src/main/scala/org/apache/spark/util/SparkExitCode.scala b/core/src/main/scala/org/apache/spark/util/SparkExitCode.scala index e8f8788243cd..0ffc2afd9635 100644 --- a/core/src/main/scala/org/apache/spark/util/SparkExitCode.scala +++ b/core/src/main/scala/org/apache/spark/util/SparkExitCode.scala @@ -45,6 +45,9 @@ private[spark] object SparkExitCode { OutOfMemoryError. */ val OOM = 52 + /** Exit due to ClassNotFoundException or NoClassDefFoundError. */ + val CLASS_NOT_FOUND = 101 + /** Exit because the driver is running over the given threshold. */ val DRIVER_TIMEOUT = 124 --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org