This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 7d0b921c465f [SPARK-42841][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_2005
7d0b921c465f is described below

commit 7d0b921c465f7932cb76bb5f130d43d933664dec
Author: xingyue <[email protected]>
AuthorDate: Tue Jun 17 18:53:01 2025 +0800

    [SPARK-42841][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_2005
    
    ### What changes were proposed in this pull request?
    see https://issues.apache.org/jira/browse/SPARK-42841, replace 
_LEGACY_ERROR_TEMP_2005 with a meaningful name.  Some points:
    - To make sure full UT coverage, method with "datatype" is refactored to 
call another one with "string".
    - sqlStat '0A000' means NOT_SUPPORTED or CANNOT.
    
    ### Why are the changes needed?
    see https://issues.apache.org/jira/browse/SPARK-42841
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    UT
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #51110 from yuexing/SPARK-42841-2005.
    
    Lead-authored-by: xingyue <[email protected]>
    Co-authored-by: Yue <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
---
 .../utils/src/main/resources/error/error-conditions.json | 11 ++++++-----
 .../apache/spark/sql/errors/QueryExecutionErrors.scala   |  6 ++----
 .../spark/sql/errors/QueryExecutionErrorsSuite.scala     | 16 ++++++++++++++++
 3 files changed, 24 insertions(+), 9 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index c91106b05d99..5def48196cf3 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -927,6 +927,12 @@
     ],
     "sqlState" : "KD011"
   },
+  "DATATYPE_CANNOT_ORDER" : {
+    "message" : [
+      "Type <dataType> does not support ordered operations."
+    ],
+    "sqlState" : "0A000"
+  },
   "DATATYPE_MISMATCH" : {
     "message" : [
       "Cannot resolve <sqlExpr> due to data type mismatch:"
@@ -7870,11 +7876,6 @@
       "Unsuccessful try to zip maps with <size> unique keys due to exceeding 
the array size limit <maxRoundedArrayLength>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2005" : {
-    "message" : [
-      "Type <dataType> does not support ordered operations."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2017" : {
     "message" : [
       "not resolved."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 83ac4c8a4896..68d4fe690007 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -363,15 +363,13 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
 
   def orderedOperationUnsupportedByDataTypeError(
       dataType: DataType): SparkIllegalArgumentException = {
-    new SparkIllegalArgumentException(
-      errorClass = "_LEGACY_ERROR_TEMP_2005",
-      messageParameters = Map("dataType" -> dataType.toString()))
+    return orderedOperationUnsupportedByDataTypeError(dataType.toString())
   }
 
   def orderedOperationUnsupportedByDataTypeError(
       dataType: String): SparkIllegalArgumentException = {
     new SparkIllegalArgumentException(
-      errorClass = "_LEGACY_ERROR_TEMP_2005",
+      errorClass = "DATATYPE_CANNOT_ORDER",
       messageParameters = Map("dataType" -> dataType))
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 46f2502582c3..633104d02d00 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -1275,6 +1275,22 @@ class QueryExecutionErrorsSuite
       sql("ALTER TABLE t SET LOCATION '/mister/spark'")
     }
   }
+
+  test("SPARK-42841: SQL query with unsupported data types for ordering") {
+    import org.apache.spark.sql.catalyst.types.PhysicalDataType
+    import org.apache.spark.sql.types.CalendarIntervalType
+
+    // Test PhysicalDataType.ordering() with CalendarIntervalType
+    // It's hard to make a sql test that passes Argument verification but fails
+    // Order verification. So we directly test the error.
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        PhysicalDataType.ordering(CalendarIntervalType)
+      },
+      condition = "DATATYPE_CANNOT_ORDER",
+      parameters = Map("dataType" -> "PhysicalCalendarIntervalType"))
+  }
+
 }
 
 class FakeFileSystemSetPermission extends LocalFileSystem {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to