This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new ce073960422a [SPARK-50751][SQL] Assign appropriate error condition for
`_LEGACY_ERROR_TEMP_1305`: `UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG`
ce073960422a is described below
commit ce073960422ab204e6df1b8dee58526f9e86ad7f
Author: Haejoon Lee <[email protected]>
AuthorDate: Tue Jan 21 09:58:04 2025 +0200
[SPARK-50751][SQL] Assign appropriate error condition for
`_LEGACY_ERROR_TEMP_1305`: `UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG`
### What changes were proposed in this pull request?
This PR proposes to assign appropriate error condition for
`_LEGACY_ERROR_TEMP_1305`: `UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG` and
improve its error message
### Why are the changes needed?
To improve the error message by assigning proper error condition and
SQLSTATE
### Does this PR introduce _any_ user-facing change?
No, only user-facing error message improved
### How was this patch tested?
Updated the existing tests
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #49395 from itholic/LEGACY_1305.
Authored-by: Haejoon Lee <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
common/utils/src/main/resources/error/error-conditions.json | 11 ++++++-----
.../org/apache/spark/sql/errors/QueryCompilationErrors.scala | 9 ++++++---
.../main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala | 2 +-
.../execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala | 8 +++++---
4 files changed, 18 insertions(+), 12 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 2506c20453ae..556b00dd6a49 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -6007,6 +6007,12 @@
},
"sqlState" : "0A000"
},
+ "UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG" : {
+ "message" : [
+ "The table change <change> is not supported for the JDBC catalog on
table <tableName>. Supported changes include: AddColumn, RenameColumn,
DeleteColumn, UpdateColumnType, UpdateColumnNullability."
+ ],
+ "sqlState" : "42000"
+ },
"UNSUPPORTED_TYPED_LITERAL" : {
"message" : [
"Literals of the type <unsupportedType> are not supported. Supported
types are <supportedTypes>."
@@ -7175,11 +7181,6 @@
"Unable to find the column `<colName>` given [<actualColumns>]."
]
},
- "_LEGACY_ERROR_TEMP_1305" : {
- "message" : [
- "Unsupported TableChange <change> in JDBC catalog."
- ]
- },
"_LEGACY_ERROR_TEMP_1306" : {
"message" : [
"There is a 'path' or 'paths' option set and load() is called with path
parameters. Either remove the path option if it's the same as the path
parameter, or add it to the load() parameter if you do want to read multiple
paths. To ignore this check, set '<config>' to 'true'."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index e6f8f0d73d7d..2fda7894c0fb 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3317,10 +3317,13 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
new NoSuchTableException(ident)
}
- def unsupportedTableChangeInJDBCCatalogError(change: TableChange): Throwable
= {
+ def unsupportedTableChangeInJDBCCatalogError(
+ change: TableChange, tableName: String): Throwable = {
+ val sanitizedTableName = tableName.replaceAll("\"", "")
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1305",
- messageParameters = Map("change" -> change.toString))
+ errorClass = "UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG",
+ messageParameters = Map(
+ "change" -> change.toString, "tableName" ->
toSQLId(sanitizedTableName)))
}
def pathOptionNotSetCorrectlyWhenReadingError(): Throwable = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index 694e60102852..69121e33c592 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -627,7 +627,7 @@ abstract class JdbcDialect extends Serializable with
Logging {
val name = updateNull.fieldNames
updateClause += getUpdateColumnNullabilityQuery(tableName, name(0),
updateNull.nullable())
case _ =>
- throw
QueryCompilationErrors.unsupportedTableChangeInJDBCCatalogError(change)
+ throw
QueryCompilationErrors.unsupportedTableChangeInJDBCCatalogError(change,
tableName)
}
}
updateClause.result()
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
index 580034ff7b0e..f6b63d7939a9 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
@@ -444,9 +444,11 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
exception = intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName ALTER COLUMN ID COMMENT 'test'")
},
- condition = "_LEGACY_ERROR_TEMP_1305",
- parameters = Map("change" ->
-
"org.apache.spark.sql.connector.catalog.TableChange\\$UpdateColumnComment.*"),
+ condition = "UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG",
+ parameters = Map(
+ "change" ->
"org.apache.spark.sql.connector.catalog.TableChange\\$UpdateColumnComment.*",
+ "tableName" -> "`test`.`alt_table`"
+ ),
matchPVals = true)
// Update comment for not existing column
val sqlText = s"ALTER TABLE $tableName ALTER COLUMN bad_column COMMENT
'test'"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]