This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 332efb2eabb2 [SPARK-49636][SQL] Remove the ANSI config suggestion in
INVALID_ARRAY_INDEX and INVALID_ARRAY_INDEX_IN_ELEMENT_AT
332efb2eabb2 is described below
commit 332efb2eabb2b9383cfcfb0bf633089f38cdb398
Author: Mihailo Milosevic <[email protected]>
AuthorDate: Wed Dec 18 17:58:13 2024 +0300
[SPARK-49636][SQL] Remove the ANSI config suggestion in INVALID_ARRAY_INDEX
and INVALID_ARRAY_INDEX_IN_ELEMENT_AT
### What changes were proposed in this pull request?
Removal of ansi turn off suggestion for INVALID_ARRAY_INDEX and
INVALID_ARRAY_INDEX_IN_ELEMENT_AT.
### Why are the changes needed?
Now that in Spark 4.0.0 we have moved to ANSI mode on by default, we want
to keep suggestions of this kind to the minimum. The good thing is that both
errors have other suggestions which are sufficient.
### Does this PR introduce _any_ user-facing change?
Yes, error message has changed.
### How was this patch tested?
Existing tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #49213 from mihailom-db/array_index.
Authored-by: Mihailo Milosevic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
common/utils/src/main/resources/error/error-conditions.json | 4 ++--
.../scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 6 ++----
sql/core/src/test/resources/sql-tests/results/array.sql.out | 7 -------
.../apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala | 8 ++++----
4 files changed, 8 insertions(+), 17 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 55265fa6dfc9..8c2fc8a038b0 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -2131,13 +2131,13 @@
},
"INVALID_ARRAY_INDEX" : {
"message" : [
- "The index <indexValue> is out of bounds. The array has <arraySize>
elements. Use the SQL function `get()` to tolerate accessing element at invalid
index and return NULL instead. If necessary set <ansiConfig> to \"false\" to
bypass this error."
+ "The index <indexValue> is out of bounds. The array has <arraySize>
elements. Use the SQL function `get()` to tolerate accessing element at invalid
index and return NULL instead."
],
"sqlState" : "22003"
},
"INVALID_ARRAY_INDEX_IN_ELEMENT_AT" : {
"message" : [
- "The index <indexValue> is out of bounds. The array has <arraySize>
elements. Use `try_element_at` to tolerate accessing element at invalid index
and return NULL instead. If necessary set <ansiConfig> to \"false\" to bypass
this error."
+ "The index <indexValue> is out of bounds. The array has <arraySize>
elements. Use `try_element_at` to tolerate accessing element at invalid index
and return NULL instead."
],
"sqlState" : "22003"
},
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 95e2f30bbf2d..86137fc1c3c0 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -225,8 +225,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
errorClass = "INVALID_ARRAY_INDEX",
messageParameters = Map(
"indexValue" -> toSQLValue(index, IntegerType),
- "arraySize" -> toSQLValue(numElements, IntegerType),
- "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ "arraySize" -> toSQLValue(numElements, IntegerType)),
context = getQueryContext(context),
summary = getSummary(context))
}
@@ -239,8 +238,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
errorClass = "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
messageParameters = Map(
"indexValue" -> toSQLValue(index, IntegerType),
- "arraySize" -> toSQLValue(numElements, IntegerType),
- "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ "arraySize" -> toSQLValue(numElements, IntegerType)),
context = getQueryContext(context),
summary = getSummary(context))
}
diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out
b/sql/core/src/test/resources/sql-tests/results/array.sql.out
index 7394e428091c..b5dabfb47f43 100644
--- a/sql/core/src/test/resources/sql-tests/results/array.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out
@@ -185,7 +185,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "3",
"indexValue" : "5"
},
@@ -209,7 +208,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "3",
"indexValue" : "-5"
},
@@ -252,7 +250,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "2",
"indexValue" : "4"
},
@@ -276,7 +273,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "2",
"indexValue" : "0"
},
@@ -300,7 +296,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "2",
"indexValue" : "-1"
},
@@ -356,7 +351,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "3",
"indexValue" : "5"
},
@@ -380,7 +374,6 @@ org.apache.spark.SparkArrayIndexOutOfBoundsException
"errorClass" : "INVALID_ARRAY_INDEX",
"sqlState" : "22003",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\"",
"arraySize" : "3",
"indexValue" : "-1"
},
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index f07d2d6620f7..a9ff7c308c15 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -145,7 +145,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
sql("select array(1, 2, 3, 4, 5)[8]").collect()
},
condition = "INVALID_ARRAY_INDEX",
- parameters = Map("indexValue" -> "8", "arraySize" -> "5", "ansiConfig"
-> ansiConf),
+ parameters = Map("indexValue" -> "8", "arraySize" -> "5"),
context = ExpectedContext(fragment = "array(1, 2, 3, 4, 5)[8]", start =
7, stop = 29))
checkError(
@@ -153,7 +153,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
OneRowRelation().select(lit(Array(1, 2, 3, 4, 5))(8)).collect()
},
condition = "INVALID_ARRAY_INDEX",
- parameters = Map("indexValue" -> "8", "arraySize" -> "5", "ansiConfig"
-> ansiConf),
+ parameters = Map("indexValue" -> "8", "arraySize" -> "5"),
context = ExpectedContext(
fragment = "apply",
callSitePattern = getCurrentClassCallSitePattern))
@@ -165,7 +165,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
sql("select element_at(array(1, 2, 3, 4, 5), 8)").collect()
},
condition = "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
- parameters = Map("indexValue" -> "8", "arraySize" -> "5", "ansiConfig"
-> ansiConf),
+ parameters = Map("indexValue" -> "8", "arraySize" -> "5"),
context = ExpectedContext(
fragment = "element_at(array(1, 2, 3, 4, 5), 8)",
start = 7,
@@ -176,7 +176,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
OneRowRelation().select(element_at(lit(Array(1, 2, 3, 4, 5)),
8)).collect()
},
condition = "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
- parameters = Map("indexValue" -> "8", "arraySize" -> "5", "ansiConfig"
-> ansiConf),
+ parameters = Map("indexValue" -> "8", "arraySize" -> "5"),
context =
ExpectedContext(fragment = "element_at", callSitePattern =
getCurrentClassCallSitePattern))
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]