This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new abe785379335 [SPARK-53789][SQL][CONNECT] Canonicalize error condition 
CANNOT_MODIFY_STATIC_CONFIG
abe785379335 is described below

commit abe785379335792b9edd548dd365ebe5ed5e9230
Author: Cheng Pan <[email protected]>
AuthorDate: Wed Oct 15 12:32:52 2025 +0800

    [SPARK-53789][SQL][CONNECT] Canonicalize error condition 
CANNOT_MODIFY_STATIC_CONFIG
    
    ### What changes were proposed in this pull request?
    
    Introduce `CANNOT_MODIFY_STATIC_CONFIG` and
    
    1. Migrate error condition `_LEGACY_ERROR_TEMP_3050` to 
`CANNOT_MODIFY_STATIC_CONFIG`
    2. Migrate `cannotModifyValueOfStaticConfigError` from 
`CANNOT_MODIFY_CONFIG`(with suggestion that directs to DDL migration guide) to 
use `CANNOT_MODIFY_STATIC_CONFIG` because the DDL migration guide does not help 
for this case.
    
    ### Why are the changes needed?
    
    More consistent error message.
    
    ### Does this PR introduce _any_ user-facing change?
    
    More consistent error message.
    
    ### How was this patch tested?
    
    Pass GHA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #52506 from pan3793/SPARK-53789.
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
---
 common/utils/src/main/resources/error/error-conditions.json | 11 ++++++-----
 .../org/apache/spark/sql/errors/CompilationErrors.scala     | 12 ++++++++++++
 .../scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala |  6 ++----
 .../apache/spark/sql/errors/QueryCompilationErrors.scala    | 13 -------------
 .../scala/org/apache/spark/sql/connect/test/SQLHelper.scala |  7 ++-----
 .../scala/org/apache/spark/sql/internal/SQLConfSuite.scala  | 12 ++++++------
 .../sql/hive/thriftserver/HiveThriftServer2Suites.scala     |  2 +-
 .../org/apache/spark/sql/hive/execution/SQLQuerySuite.scala |  7 +++----
 8 files changed, 32 insertions(+), 38 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 5f1d3d16d379..52d9a0bce7ec 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -440,6 +440,12 @@
     ],
     "sqlState" : "46110"
   },
+  "CANNOT_MODIFY_STATIC_CONFIG" : {
+    "message" : [
+      "Cannot modify the value of the static Spark config: <key>."
+    ],
+    "sqlState" : "46110"
+  },
   "CANNOT_PARSE_DECIMAL" : {
     "message" : [
       "Cannot parse decimal. Please ensure that the input is a valid number 
with optional decimal point or comma separators."
@@ -9185,11 +9191,6 @@
       "Failed to get block <blockId>, which is not a shuffle block"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3050" : {
-    "message" : [
-      "Cannot modify the value of a static config: <k>"
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3052" : {
     "message" : [
       "Unexpected resolved action: <other>"
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
index 617cab4b2a39..6a275b9ad0c1 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
@@ -131,6 +131,18 @@ private[sql] trait CompilationErrors extends 
DataTypeErrorsBase {
       errorClass = "SPECIFY_CLUSTER_BY_WITH_BUCKETING_IS_NOT_ALLOWED",
       messageParameters = Map.empty)
   }
+
+  def cannotModifyValueOfStaticConfigError(key: String): Throwable = {
+    new AnalysisException(
+      errorClass = "CANNOT_MODIFY_STATIC_CONFIG",
+      messageParameters = Map("key" -> toSQLConf(key)))
+  }
+
+  def cannotModifyValueOfSparkConfigError(key: String, docroot: String): 
Throwable = {
+    new AnalysisException(
+      errorClass = "CANNOT_MODIFY_CONFIG",
+      messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot))
+  }
 }
 
 private[sql] object CompilationErrors extends CompilationErrors
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
index bd0455d76a86..c5c68e902a2d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst
 
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.errors.CompilationErrors
 import org.apache.spark.sql.internal.SQLConf
 
 /**
@@ -47,9 +47,7 @@ trait SQLConfHelper {
     }
     keys.lazyZip(values).foreach { (k, v) =>
       if (SQLConf.isStaticConfigKey(k)) {
-        throw new AnalysisException(
-          errorClass = "_LEGACY_ERROR_TEMP_3050",
-          messageParameters = Map("k" -> k))
+        throw CompilationErrors.cannotModifyValueOfStaticConfigError(k)
       }
       conf.setConfString(k, v)
     }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 091273aa946e..7d79c5d5d642 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3475,19 +3475,6 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
         "config" -> SQLConf.DATAFRAME_PIVOT_MAX_VALUES.key))
   }
 
-  def cannotModifyValueOfStaticConfigError(key: String): Throwable = {
-    new AnalysisException(
-      errorClass = "CANNOT_MODIFY_CONFIG",
-      messageParameters = Map("key" -> toSQLConf(key), "docroot" -> 
SPARK_DOC_ROOT)
-    )
-  }
-
-  def cannotModifyValueOfSparkConfigError(key: String, docroot: String): 
Throwable = {
-    new AnalysisException(
-      errorClass = "CANNOT_MODIFY_CONFIG",
-      messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot))
-  }
-
   def commandExecutionInRunnerUnsupportedError(runner: String): Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1327",
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
index f23221f1a46c..b8d1062c3b3b 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
@@ -21,9 +21,9 @@ import java.util.UUID
 
 import org.scalatest.Assertions.fail
 
-import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
 import org.apache.spark.sql.connect.{DataFrame, SparkSession, SQLImplicits}
+import org.apache.spark.sql.errors.CompilationErrors
 import org.apache.spark.util.{SparkErrorUtils, SparkFileUtils}
 
 trait SQLHelper {
@@ -59,11 +59,8 @@ trait SQLHelper {
       if (spark.conf.isModifiable(k)) {
         spark.conf.set(k, v)
       } else {
-        throw new AnalysisException(
-          errorClass = "_LEGACY_ERROR_TEMP_3050",
-          messageParameters = Map("k" -> k))
+        throw CompilationErrors.cannotModifyValueOfStaticConfigError(k)
       }
-
     }
     try f
     finally {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 74a39322480d..829774789efb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -236,8 +236,8 @@ class SQLConfSuite extends QueryTest with 
SharedSparkSession {
     // static sql configs
     checkError(
       exception = intercept[AnalysisException](sql(s"RESET 
${StaticSQLConf.WAREHOUSE_PATH.key}")),
-      condition = "CANNOT_MODIFY_CONFIG",
-      parameters = Map("key" -> "\"spark.sql.warehouse.dir\"", "docroot" -> 
SPARK_DOC_ROOT))
+      condition = "CANNOT_MODIFY_STATIC_CONFIG",
+      parameters = Map("key" -> "\"spark.sql.warehouse.dir\""))
 
   }
 
@@ -348,13 +348,13 @@ class SQLConfSuite extends QueryTest with 
SharedSparkSession {
   test("cannot set/unset static SQL conf") {
     checkError(
       exception = intercept[AnalysisException](sql(s"SET 
${GLOBAL_TEMP_DATABASE.key}=10")),
-      condition = "CANNOT_MODIFY_CONFIG",
-      parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"", "docroot" 
-> SPARK_DOC_ROOT)
+      condition = "CANNOT_MODIFY_STATIC_CONFIG",
+      parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"")
     )
     checkError(
       exception = 
intercept[AnalysisException](spark.conf.unset(GLOBAL_TEMP_DATABASE.key)),
-      condition = "CANNOT_MODIFY_CONFIG",
-      parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"", "docroot" 
-> SPARK_DOC_ROOT)
+      condition = "CANNOT_MODIFY_STATIC_CONFIG",
+      parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"")
     )
   }
 
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 44008481ff1e..9db2beaf1e7a 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -1063,7 +1063,7 @@ class SingleSessionSuite extends 
HiveThriftServer2TestBase {
         statement.executeQuery("SET 
spark.sql.hive.thriftServer.singleSession=false")
       }.getMessage
       assert(e.contains(
-        "CANNOT_MODIFY_CONFIG"))
+        "CANNOT_MODIFY_STATIC_CONFIG"))
     }
   }
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index ae53691d4931..e6c0f0c651e4 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -25,7 +25,7 @@ import java.util.{Locale, Set}
 
 import org.apache.hadoop.fs.{FileSystem, Path}
 
-import org.apache.spark.{SPARK_DOC_ROOT, SparkException, TestUtils}
+import org.apache.spark.{SparkException, TestUtils}
 import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
@@ -2464,9 +2464,8 @@ abstract class SQLQuerySuiteBase extends QueryTest with 
SQLTestUtils with TestHi
       "spark.sql.hive.metastore.barrierPrefixes").foreach { key =>
       checkError(
         exception = intercept[AnalysisException](sql(s"set $key=abc")),
-        condition = "CANNOT_MODIFY_CONFIG",
-        parameters = Map(
-          "key" -> toSQLConf(key), "docroot" -> SPARK_DOC_ROOT)
+        condition = "CANNOT_MODIFY_STATIC_CONFIG",
+        parameters = Map("key" -> toSQLConf(key))
       )
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to