This is an automated email from the ASF dual-hosted git repository.
dtenedor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 0444c02d3ba4 [SPARK-55155] Fix SET CATALOG to use special chars and
backticks in the identifier name
0444c02d3ba4 is described below
commit 0444c02d3ba48acff79443f1a842c28e625ee43f
Author: tangrizzly <[email protected]>
AuthorDate: Wed Feb 25 09:27:36 2026 -0800
[SPARK-55155] Fix SET CATALOG to use special chars and backticks in the
identifier name
### What changes were proposed in this pull request?
This PR fixes the handling of catalog names with special characters in the
`SET CATALOG` command. The changes modify `ResolveSetCatalogCommand` to
properly wrap identifier parts with backticks and escape any existing backticks
by doubling them when converting `SET CATALOG` commands to string literals.
### Why are the changes needed?
A previous change in https://github.com/apache/spark/pull/53941 extended
the `SET CATALOG` command to accept foldable expressions. As part of that
change, identifiers quoted with backticks are resolved to
`Literal("some-identifier")` with the backticks stripped. This behavior breaks
`SET CATALOG` when catalog names contain special characters such as %, , -, $,
#, or backticks.
This fix wraps identifiers in explicit backticks and escapes any existing
backticks, ensuring the catalog names are resolved correctly during analysis.
### Does this PR introduce _any_ user-facing change?
Yes. Users can continue using `SET CATALOG` with catalog names that contain
special characters when properly quoted with backticks:
```sql
SET CATALOG `te%st-c$a#t` -- works with special characters
SET CATALOG ``test`quote` -- works with backticks in the name
```
### How was this patch tested?
DataSourceV2SQLSuite:
- SET CATALOG with special characters with backticks in identifier
- SET CATALOG with backtick character in identifier
### Was this patch authored or co-authored using generative AI tooling?
Generated-by: Claude Code (Claude Sonnet 4.5 - claude-sonnet-4-5-20250929)
Closes #54076 from
tangrizzly/SPARK-55155-set-catalog-expression-fix-special-char.
Authored-by: tangrizzly <[email protected]>
Signed-off-by: Daniel Tenedorio <[email protected]>
---
.../analysis/ResolveSetCatalogCommand.scala | 7 +++++--
.../spark/sql/connector/DataSourceV2SQLSuite.scala | 21 +++++++++++++++++++++
2 files changed, 26 insertions(+), 2 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSetCatalogCommand.scala
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSetCatalogCommand.scala
index 7f557fe625f5..1e9c0dcfb2a4 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSetCatalogCommand.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSetCatalogCommand.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.util.QuotingUtils
import org.apache.spark.sql.execution.command.SetCatalogCommand
/**
@@ -32,8 +33,10 @@ object ResolveSetCatalogCommand extends Rule[LogicalPlan] {
case cmd @ SetCatalogCommand(expr) =>
val resolvedExpr = expr match {
case UnresolvedAttribute(nameParts) =>
- // Convert `SET CATALOG foo` into Literal("foo").
- Literal(nameParts.mkString("."))
+ // Convert `SET CATALOG foo` into Literal("`foo`").
+ // Wrap each identifier part with backticks to handle
+ // special characters.
+ Literal(QuotingUtils.quoteNameParts(nameParts))
case other =>
// Other expressions (identifier(), CAST, CONCAT, etc.) are resolved
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 50b3bebaeb77..589e3ebffdef 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -3159,6 +3159,27 @@ class DataSourceV2SQLSuiteV1Filter
queryContext = Array(ExpectedContext(fragment = "3.14", start = 12, stop
= 15)))
}
+ test("SET CATALOG with special characters with backticks in identifier") {
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ // Test catalog name with special characters like %, @, -, $, #
+ val catalogName = "te%s@t-c$a#t"
+ registerCatalog(catalogName, classOf[InMemoryCatalog])
+ // Use backtick-quoted identifier
+ sql(s"SET CATALOG `$catalogName`")
+ assertCurrentCatalog(catalogName)
+ }
+
+ test("SET CATALOG with backtick character in identifier") {
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ val catalogName = "test`quote"
+ registerCatalog(catalogName, classOf[InMemoryCatalog])
+ // Use double backticks to escape the backtick in the name
+ sql("SET CATALOG `test``quote`")
+ assertCurrentCatalog(catalogName)
+ }
+
test("SPARK-35973: ShowCatalogs") {
val schema = new StructType()
.add("catalog", StringType, nullable = false)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]