This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e5508443f66 [SPARK-41729][CORE][SQL] Rename `_LEGACY_ERROR_TEMP_0011`
to `UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES`
e5508443f66 is described below
commit e5508443f66d92fe5106bcdf7f2a868164c62c9c
Author: yangjie01 <[email protected]>
AuthorDate: Wed Dec 28 11:36:47 2022 +0500
[SPARK-41729][CORE][SQL] Rename `_LEGACY_ERROR_TEMP_0011` to
`UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES`
### What changes were proposed in this pull request?
In the PR, I propose to assign the name
`UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES` to the error class
`_LEGACY_ERROR_TEMP_0011`.
### Why are the changes needed?
Proper names of error classes should improve user experience with Spark SQL.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass GA
Closes #39235 from LuciferYang/SPARK-41729.
Authored-by: yangjie01 <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
core/src/main/resources/error/error-classes.json | 10 +++++-----
.../scala/org/apache/spark/sql/errors/QueryParsingErrors.scala | 2 +-
.../apache/spark/sql/catalyst/parser/ErrorParserSuite.scala | 2 +-
.../org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala | 8 ++++----
4 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 2f144251e5d..21b7c467b64 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1331,6 +1331,11 @@
"Catalog <catalogName> does not support <operation>."
]
},
+ "COMBINATION_QUERY_RESULT_CLAUSES" : {
+ "message" : [
+ "Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY."
+ ]
+ },
"DESC_TABLE_COLUMN_PARTITION" : {
"message" : [
"DESC TABLE COLUMN for a specific partition."
@@ -1645,11 +1650,6 @@
"There must be at least one WHEN clause in a MERGE statement."
]
},
- "_LEGACY_ERROR_TEMP_0011" : {
- "message" : [
- "Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY is not
supported."
- ]
- },
"_LEGACY_ERROR_TEMP_0012" : {
"message" : [
"DISTRIBUTE BY is not supported."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 773a79a3f3f..ef59dfa5517 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -78,7 +78,7 @@ private[sql] object QueryParsingErrors extends
QueryErrorsBase {
}
def combinationQueryResultClausesUnsupportedError(ctx:
QueryOrganizationContext): Throwable = {
- new ParseException(errorClass = "_LEGACY_ERROR_TEMP_0011", ctx)
+ new ParseException(errorClass =
"UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES", ctx)
}
def distributeByUnsupportedError(ctx: QueryOrganizationContext): Throwable =
{
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
index a985992abba..7cf853b0812 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
@@ -34,7 +34,7 @@ class ErrorParserSuite extends AnalysisTest {
test("semantic errors") {
checkError(
exception = parseException("select *\nfrom r\norder by q\ncluster by q"),
- errorClass = "_LEGACY_ERROR_TEMP_0011",
+ errorClass = "UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES",
parameters = Map.empty,
context = ExpectedContext(fragment = "order by q\ncluster by q", start =
16, stop = 38))
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
index 035e6231178..c25f218fe1b 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
@@ -376,7 +376,7 @@ class PlanParserSuite extends AnalysisTest {
val sql1 = s"$baseSql order by a sort by a"
checkError(
exception = parseException(sql1),
- errorClass = "_LEGACY_ERROR_TEMP_0011",
+ errorClass = "UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES",
parameters = Map.empty,
context = ExpectedContext(
fragment = "order by a sort by a",
@@ -386,7 +386,7 @@ class PlanParserSuite extends AnalysisTest {
val sql2 = s"$baseSql cluster by a distribute by a"
checkError(
exception = parseException(sql2),
- errorClass = "_LEGACY_ERROR_TEMP_0011",
+ errorClass = "UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES",
parameters = Map.empty,
context = ExpectedContext(
fragment = "cluster by a distribute by a",
@@ -396,7 +396,7 @@ class PlanParserSuite extends AnalysisTest {
val sql3 = s"$baseSql order by a cluster by a"
checkError(
exception = parseException(sql3),
- errorClass = "_LEGACY_ERROR_TEMP_0011",
+ errorClass = "UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES",
parameters = Map.empty,
context = ExpectedContext(
fragment = "order by a cluster by a",
@@ -406,7 +406,7 @@ class PlanParserSuite extends AnalysisTest {
val sql4 = s"$baseSql order by a distribute by a"
checkError(
exception = parseException(sql4),
- errorClass = "_LEGACY_ERROR_TEMP_0011",
+ errorClass = "UNSUPPORTED_FEATURE.COMBINATION_QUERY_RESULT_CLAUSES",
parameters = Map.empty,
context = ExpectedContext(
fragment = "order by a distribute by a",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]