This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 566980fba1c [SPARK-38462][CORE] Add error class INTERNAL_ERROR_EXECUTOR
566980fba1c is described below

commit 566980fba1c36fc41ffef9be41382c6bbeec9f55
Author: Bo Zhang <[email protected]>
AuthorDate: Sat May 6 14:14:13 2023 +0300

    [SPARK-38462][CORE] Add error class INTERNAL_ERROR_EXECUTOR
    
    ### What changes were proposed in this pull request?
    This change adds a error class INTERNAL_ERROR_EXECUTOR and change 
exceptions created in the executor package to use that error class.
    
    ### Why are the changes needed?
    This is to move exceptions created in package org.apache.spark.executor to 
error class.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes. The exceptions created in package org.apache.spark.executor will 
change to be of error class INTERNAL_ERROR_EXECUTOR.
    
    ### How was this patch tested?
    Existing unit tests.
    
    Closes #41048 from bozhang2820/spark-38462.
    
    Authored-by: Bo Zhang <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 core/src/main/resources/error/error-classes.json                  | 6 ++++++
 core/src/main/scala/org/apache/spark/executor/Executor.scala      | 8 ++++----
 .../scala/org/apache/spark/executor/ProcfsMetricsGetter.scala     | 3 ++-
 3 files changed, 12 insertions(+), 5 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 88740ca66ff..8b0d98c7e3d 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -801,6 +801,12 @@
     ],
     "sqlState" : "XX000"
   },
+  "INTERNAL_ERROR_EXECUTOR" : {
+    "message" : [
+      "<message>"
+    ],
+    "sqlState" : "XX000"
+  },
   "INTERVAL_ARITHMETIC_OVERFLOW" : {
     "message" : [
       "<message>.<alternative>"
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala 
b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index 4ea11d753e1..5d623b22abd 100644
--- a/core/src/main/scala/org/apache/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -561,7 +561,7 @@ private[spark] class Executor(
           if (freedMemory > 0 && !threwException) {
             val errMsg = s"Managed memory leak detected; size = $freedMemory 
bytes, $taskName"
             if (conf.get(UNSAFE_EXCEPTION_ON_MEMORY_LEAK)) {
-              throw new SparkException(errMsg)
+              throw SparkException.internalError(errMsg, category = "EXECUTOR")
             } else {
               logWarning(errMsg)
             }
@@ -572,7 +572,7 @@ private[spark] class Executor(
               s"${releasedLocks.size} block locks were not released by 
$taskName\n" +
                 releasedLocks.mkString("[", ", ", "]")
             if (conf.get(STORAGE_EXCEPTION_PIN_LEAK)) {
-              throw new SparkException(errMsg)
+              throw SparkException.internalError(errMsg, category = "EXECUTOR")
             } else {
               logInfo(errMsg)
             }
@@ -934,9 +934,9 @@ private[spark] class Executor(
           } else {
             // In non-local-mode, the exception thrown here will bubble up to 
the uncaught exception
             // handler and cause the executor JVM to exit.
-            throw new SparkException(
+            throw SparkException.internalError(
               s"Killing executor JVM because killed task $taskId could not be 
stopped within " +
-                s"$killTimeoutMs ms.")
+                s"$killTimeoutMs ms.", category = "EXECUTOR")
           }
         }
       } finally {
diff --git 
a/core/src/main/scala/org/apache/spark/executor/ProcfsMetricsGetter.scala 
b/core/src/main/scala/org/apache/spark/executor/ProcfsMetricsGetter.scala
index 2c9ccbc5d86..5448d7da6d6 100644
--- a/core/src/main/scala/org/apache/spark/executor/ProcfsMetricsGetter.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ProcfsMetricsGetter.scala
@@ -148,7 +148,8 @@ private[spark] class ProcfsMetricsGetter(procfsDir: String 
= "/proc/") extends L
       if (exitCode != 0 && exitCode > 2) {
         val cmd = builder.command().toArray.mkString(" ")
         logWarning(s"Process $cmd exited with code $exitCode and stderr: 
$errorString")
-        throw new SparkException(s"Process $cmd exited with code $exitCode")
+        throw SparkException.internalError(msg = s"Process $cmd exited with 
code $exitCode",
+          category = "EXECUTOR")
       }
       childPidsInInt
     } catch {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to