This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a90b6157fc6f [SPARK-54526][SQL] Rename _LEGACY_ERROR_TEMP_1133 and add 
sqlState
a90b6157fc6f is described below

commit a90b6157fc6fb66246b12b1627bc5378f6a41e33
Author: Ubuntu <[email protected]>
AuthorDate: Sat Nov 29 01:01:19 2025 +0800

    [SPARK-54526][SQL] Rename _LEGACY_ERROR_TEMP_1133 and add sqlState
    
    ### What changes were proposed in this pull request?
    
    I rename _LEGACY_ERROR_TEMP_1133 to more understandable name 
(USER_SPECIFIED_AND_ACTUAL_SCHEMA_MISMATCH) and add sqlState
    
    ### Why are the changes needed?
    
    This is a frequent error that is not classified
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    This is a small refactoring, tests are already there
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #53198 from aleksandr-chernousov-db/le_1133.
    
    Authored-by: Ubuntu <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../src/main/resources/error/error-conditions.json      | 17 +++++++++--------
 .../spark/sql/errors/QueryCompilationErrors.scala       |  2 +-
 2 files changed, 10 insertions(+), 9 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 0731fbec7231..352fae5aca82 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -7345,6 +7345,15 @@
     ],
     "sqlState" : "P0001"
   },
+  "USER_SPECIFIED_AND_ACTUAL_SCHEMA_MISMATCH" : {
+    "message" : [
+      "The user-specified schema doesn't match the actual schema:",
+      "user-specified: <schema>, actual: <actualSchema>. If you're using",
+      "DataFrameReader.schema API or creating a table, please do not specify 
the schema.",
+      "Or if you're scanning an existed table, please drop it and re-create 
it."
+    ],
+    "sqlState" : "42K03"
+  },
   "USER_SPECIFIED_AND_INFERRED_SCHEMA_NOT_COMPATIBLE" : {
     "message" : [
       "Table '<tableName>' has a user-specified schema that is incompatible 
with the schema",
@@ -7927,14 +7936,6 @@
       "A schema needs to be specified when using <className>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1133" : {
-    "message" : [
-      "The user-specified schema doesn't match the actual schema:",
-      "user-specified: <schema>, actual: <actualSchema>. If you're using",
-      "DataFrameReader.schema API or creating a table, please do not specify 
the schema.",
-      "Or if you're scanning an existed table, please drop it and re-create 
it."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1134" : {
     "message" : [
       "Unable to infer schema for <format> at <fileCatalog>. It must be 
specified manually."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 6ec16b8eaa3a..309dedb963cc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1726,7 +1726,7 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
   def userSpecifiedSchemaMismatchActualSchemaError(
       schema: StructType, actualSchema: StructType): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1133",
+      errorClass = "USER_SPECIFIED_AND_ACTUAL_SCHEMA_MISMATCH",
       messageParameters = Map(
         "schema" -> schema.toDDL,
         "actualSchema" -> actualSchema.toDDL))


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to