This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 76d7c857078 [SPARK-41576][SQL] Assign name to _LEGACY_ERROR_TEMP_2051
76d7c857078 is described below
commit 76d7c8570788c773720c6e143e496647dfe9ebe0
Author: itholic <[email protected]>
AuthorDate: Thu Jan 5 10:47:46 2023 +0500
[SPARK-41576][SQL] Assign name to _LEGACY_ERROR_TEMP_2051
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_2051,
"DATA_SOURCE_NOT_FOUND".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39281 from itholic/LEGACY_2051.
Authored-by: itholic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
core/src/main/resources/error/error-classes.json | 10 +++++-----
.../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 4 ++--
.../apache/spark/sql/execution/datasources/DataSource.scala | 2 +-
.../org/apache/spark/sql/execution/command/DDLSuite.scala | 12 ++++++++----
.../apache/spark/sql/sources/ResolvedDataSourceSuite.scala | 9 +++++++--
5 files changed, 23 insertions(+), 14 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 120925f5254..12f4b0f9c37 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -441,6 +441,11 @@
],
"sqlState" : "42000"
},
+ "DATA_SOURCE_NOT_FOUND" : {
+ "message" : [
+ "Failed to find the data source: <provider>. Please find packages at
`https://spark.apache.org/third-party-projects.html`."
+ ]
+ },
"DATETIME_OVERFLOW" : {
"message" : [
"Datetime operation overflow: <operation>."
@@ -3696,11 +3701,6 @@
"Expected exactly one path to be specified, but got: <paths>"
]
},
- "_LEGACY_ERROR_TEMP_2051" : {
- "message" : [
- "Failed to find data source: <provider>. Please find packages at
https://spark.apache.org/third-party-projects.html"
- ]
- },
"_LEGACY_ERROR_TEMP_2052" : {
"message" : [
"<className> was removed in Spark 2.0. Please check if your library is
compatible with Spark 2.0"
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 44a1972272f..227e86994f5 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -731,10 +731,10 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase {
messageParameters = Map("paths" -> allPaths.mkString(", ")))
}
- def failedToFindDataSourceError(
+ def dataSourceNotFoundError(
provider: String, error: Throwable): SparkClassNotFoundException = {
new SparkClassNotFoundException(
- errorClass = "_LEGACY_ERROR_TEMP_2051",
+ errorClass = "DATA_SOURCE_NOT_FOUND",
messageParameters = Map("provider" -> provider),
cause = error)
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
index edbdd6bbc67..9bb5191dc01 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
@@ -643,7 +643,7 @@ object DataSource extends Logging {
} else if (provider1.toLowerCase(Locale.ROOT) == "kafka") {
throw
QueryCompilationErrors.failedToFindKafkaDataSourceError(provider1)
} else {
- throw
QueryExecutionErrors.failedToFindDataSourceError(provider1, error)
+ throw
QueryExecutionErrors.dataSourceNotFoundError(provider1, error)
}
}
} catch {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 6cc37a41210..f5d17b142e2 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -24,7 +24,7 @@ import java.util.Locale
import org.apache.hadoop.fs.{Path, RawLocalFileSystem}
import org.apache.hadoop.fs.permission.{AclEntry, AclStatus}
-import org.apache.spark.{SparkException, SparkFiles, SparkRuntimeException}
+import org.apache.spark.{SparkClassNotFoundException, SparkException,
SparkFiles, SparkRuntimeException}
import org.apache.spark.internal.config
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, QualifiedTableName,
TableIdentifier}
@@ -2040,10 +2040,14 @@ abstract class DDLSuite extends QueryTest with
DDLSuiteBase {
val table2 = catalog.getTableMetadata(TableIdentifier("t2"))
assert(table2.provider == Some("hive"))
- val e1 = intercept[ClassNotFoundException] {
+ val e1 = intercept[SparkClassNotFoundException] {
sql("CREATE TABLE t3 LIKE s USING unknown")
- }.getMessage
- assert(e1.contains("Failed to find data source"))
+ }
+ checkError(
+ exception = e1,
+ errorClass = "DATA_SOURCE_NOT_FOUND",
+ parameters = Map("provider" -> "unknown")
+ )
withGlobalTempView("src") {
val globalTempDB = spark.sharedState.globalTempViewManager.database
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/sources/ResolvedDataSourceSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/sources/ResolvedDataSourceSuite.scala
index 818a66eb436..5d1d0389303 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/sources/ResolvedDataSourceSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/sources/ResolvedDataSourceSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.sources
import java.time.ZoneId
+import org.apache.spark.SparkClassNotFoundException
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.datasources.DataSource
@@ -96,9 +97,13 @@ class ResolvedDataSourceSuite extends SharedSparkSession {
}
test("error message for unknown data sources") {
- val error = intercept[ClassNotFoundException] {
+ val error = intercept[SparkClassNotFoundException] {
getProvidingClass("asfdwefasdfasdf")
}
- assert(error.getMessage.contains("Failed to find data source:
asfdwefasdfasdf."))
+ checkError(
+ exception = error,
+ errorClass = "DATA_SOURCE_NOT_FOUND",
+ parameters = Map("provider" -> "asfdwefasdfasdf")
+ )
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]