This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 4d3bc8f5b55 [SPARK-41580][SQL] Assign name to _LEGACY_ERROR_TEMP_2137
4d3bc8f5b55 is described below
commit 4d3bc8f5b55969f7c954991239ff43f9faba1346
Author: itholic <[email protected]>
AuthorDate: Thu Jan 5 10:58:14 2023 +0500
[SPARK-41580][SQL] Assign name to _LEGACY_ERROR_TEMP_2137
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_2137,
"INVALID_JSON_ROOT_FIELD".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39305 from itholic/LEGACY_2137.
Authored-by: itholic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
core/src/main/resources/error/error-classes.json | 10 +++++-----
.../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +-
.../spark/sql/execution/datasources/json/JsonSuite.scala | 14 +++++++++++---
3 files changed, 17 insertions(+), 9 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 12f4b0f9c37..29cafdcc1b6 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -760,6 +760,11 @@
"The identifier <ident> is invalid. Please, consider quoting it with
back-quotes as `<ident>`."
]
},
+ "INVALID_JSON_ROOT_FIELD" : {
+ "message" : [
+ "Cannot convert JSON root field to target Spark type."
+ ]
+ },
"INVALID_JSON_SCHEMA_MAP_TYPE" : {
"message" : [
"Input schema <jsonSchema> can only contain STRING as a key type for a
MAP."
@@ -4110,11 +4115,6 @@
"Failed to parse an empty string for data type <dataType>"
]
},
- "_LEGACY_ERROR_TEMP_2137" : {
- "message" : [
- "Root converter returned null"
- ]
- },
"_LEGACY_ERROR_TEMP_2138" : {
"message" : [
"Cannot have circular references in bean class, but got the circular
reference of class <clazz>"
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 227e86994f5..0c92d56ed04 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1457,7 +1457,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase {
def rootConverterReturnNullError(): SparkRuntimeException = {
new SparkRuntimeException(
- errorClass = "_LEGACY_ERROR_TEMP_2137",
+ errorClass = "INVALID_JSON_ROOT_FIELD",
messageParameters = Map.empty)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 0d2c98316e7..a4b7df9af42 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -25,11 +25,12 @@ import java.time.{Duration, Instant, LocalDate,
LocalDateTime, Period, ZoneId}
import java.util.Locale
import com.fasterxml.jackson.core.JsonFactory
+import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.io.SequenceFile.CompressionType
import org.apache.hadoop.io.compress.GzipCodec
-import org.apache.spark.{SparkConf, SparkException, SparkUpgradeException,
TestUtils}
+import org.apache.spark.{SparkConf, SparkException, SparkRuntimeException,
SparkUpgradeException, TestUtils}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{functions => F, _}
import org.apache.spark.sql.catalyst.json._
@@ -3192,10 +3193,17 @@ abstract class JsonSuite
}
test("SPARK-36379: proceed parsing with root nulls in permissive mode") {
- assert(intercept[SparkException] {
+ val exception = intercept[SparkException] {
spark.read.option("mode", "failfast")
.schema("a string").json(Seq("""[{"a": "str"},
null]""").toDS).collect()
- }.getMessage.contains("Malformed records are detected"))
+ }
+ assert(exception.getMessage.contains("Malformed records are detected"))
+
+ checkError(
+ exception =
ExceptionUtils.getRootCause(exception).asInstanceOf[SparkRuntimeException],
+ errorClass = "INVALID_JSON_ROOT_FIELD",
+ parameters = Map.empty
+ )
// Permissive modes should proceed parsing malformed records (null).
// Here, since an array fails to parse in the middle, we will return one
row.
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]