This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 3ac31b1b6eaf [SPARK-48574][SQL] Fix support for StructTypes with
collations
3ac31b1b6eaf is described below
commit 3ac31b1b6eaf9c1a45859f4238a7f7e2c4ffb9dc
Author: Mihailo Milosevic <[email protected]>
AuthorDate: Wed Jun 19 16:07:59 2024 +0800
[SPARK-48574][SQL] Fix support for StructTypes with collations
### What changes were proposed in this pull request?
Fix for ExtractValue expression
### Why are the changes needed?
This fix is needed in case we change default collation.
### Does this PR introduce _any_ user-facing change?
Yes, it fixes a problem.
### How was this patch tested?
Added tests in `CollationSQLExpressionsSuite.scala`
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #46997 from mihailom-db/SPARK-48574.
Authored-by: Mihailo Milosevic <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
---
.../catalyst/expressions/complexTypeExtractors.scala | 4 ++--
.../spark/sql/CollationSQLExpressionsSuite.scala | 19 +++++++++++++++++++
2 files changed, 21 insertions(+), 2 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
index a801d0367080..ff94322efdaa 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
@@ -51,12 +51,12 @@ object ExtractValue {
resolver: Resolver): Expression = {
(child.dataType, extraction) match {
- case (StructType(fields), NonNullLiteral(v, StringType)) =>
+ case (StructType(fields), NonNullLiteral(v, _: StringType)) =>
val fieldName = v.toString
val ordinal = findField(fields, fieldName, resolver)
GetStructField(child, ordinal, Some(fieldName))
- case (ArrayType(StructType(fields), containsNull), NonNullLiteral(v,
StringType)) =>
+ case (ArrayType(StructType(fields), containsNull), NonNullLiteral(v, _:
StringType)) =>
val fieldName = v.toString
val ordinal = findField(fields, fieldName, resolver)
GetArrayStructFields(child, fields(ordinal).copy(name = fieldName),
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala
index a1c6f5f94317..0c54ccb7cfb1 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala
@@ -1854,6 +1854,25 @@ class CollationSQLExpressionsSuite
})
}
+ test("ExtractValue expression with collation") {
+ // Supported collations
+ testSuppCollations.foreach(collationName => {
+ withSQLConf(SqlApiConf.DEFAULT_COLLATION -> collationName) {
+ val query =
+ s"""
+ |select col['Field1']
+ |from values (named_struct('Field1', 'Spark', 'Field2', 5)) as
tab(col);
+ |""".stripMargin
+ // Result & data type check
+ val testQuery = sql(query)
+ val dataType = StringType(collationName)
+ val expectedResult = "Spark"
+ assert(testQuery.schema.fields.head.dataType.sameType(dataType))
+ checkAnswer(testQuery, Row(expectedResult))
+ }
+ })
+ }
+
test("Lag expression with collation") {
// Supported collations
testSuppCollations.foreach(collationName => {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]