This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 2d498d51ab83 Revert "[SPARK-50804][SQL] to_protobuf() should not throw
MatchError"
2d498d51ab83 is described below
commit 2d498d51ab8333238e77e5e7de952ff3b0276b3b
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Jan 14 06:49:53 2025 -0800
Revert "[SPARK-50804][SQL] to_protobuf() should not throw MatchError"
This reverts commit e4cbb91eb2723ebaa91ac308670bbbb39cca6b0b.
---
.../spark/sql/protobuf/ProtobufSerializer.scala | 3 ---
.../sql/protobuf/ProtobufFunctionsSuite.scala | 27 ----------------------
.../expressions/toFromProtobufSqlFunctions.scala | 20 ++++------------
3 files changed, 5 insertions(+), 45 deletions(-)
diff --git
a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala
b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala
index 65e8cce0d056..1c64e70755d5 100644
---
a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala
+++
b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala
@@ -47,9 +47,6 @@ private[sql] class ProtobufSerializer(
}
private val converter: Any => Any = {
- assert(
- rootCatalystType.isInstanceOf[StructType],
- "ProtobufSerializer's root catalyst type must be a struct type")
val baseConverter =
try {
rootCatalystType match {
diff --git
a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala
b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala
index c1f5186b8333..44a8339ac1f0 100644
---
a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala
+++
b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala
@@ -1721,33 +1721,6 @@ class ProtobufFunctionsSuite extends QueryTest with
SharedSparkSession with Prot
}
}
- test("non-struct SQL type") {
- val dfWithInt = spark
- .range(1)
- .select(
- lit(9999).as("int_col")
- )
-
- val parseError = intercept[AnalysisException] {
- dfWithInt.select(
- to_protobuf_wrapper($"int_col", "SimpleMessageEnum",
Some(testFileDesc))).collect()
- }
- val descMsg = testFileDesc.map("%02X".format(_)).mkString("")
- checkError(
- exception = parseError,
- condition = "DATATYPE_MISMATCH.TYPE_CHECK_FAILURE_WITH_HINT",
- parameters = Map(
- "sqlExpr" ->
- s"""\"to_protobuf(int_col, SimpleMessageEnum, X'$descMsg',
NULL)\"""",
- "msg" -> ("The first argument of the TO_PROTOBUF SQL function must be
a struct type"),
- "hint" -> ""
- ),
- queryContext = Array(ExpectedContext(
- fragment = "fn",
- callSitePattern = ".*"))
- )
- }
-
test("test unsigned integer types") {
// Test that we correctly handle unsigned integer parsing.
// We're using Integer/Long's `MIN_VALUE` as it has a 1 in the sign bit.
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/toFromProtobufSqlFunctions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/toFromProtobufSqlFunctions.scala
index 380df38ab049..96bcf49dbd09 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/toFromProtobufSqlFunctions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/toFromProtobufSqlFunctions.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData
import org.apache.spark.sql.errors.QueryCompilationErrors
-import org.apache.spark.sql.types.{BinaryType, MapType, NullType, StringType,
StructType}
+import org.apache.spark.sql.types.{BinaryType, MapType, NullType, StringType}
import org.apache.spark.sql.util.ProtobufUtils
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
@@ -238,15 +238,6 @@ case class ToProtobuf(
}
override def checkInputDataTypes(): TypeCheckResult = {
- val colTypeCheck = first.dataType match {
- case _: StructType => None
- case _ =>
- Some(
- TypeCheckResult.TypeCheckFailure(
- "The first argument of the TO_PROTOBUF SQL function must be a
struct type")
- )
- }
-
val messageNameCheck = messageName.dataType match {
case _: StringType if messageName.foldable => None
case _ =>
@@ -271,11 +262,10 @@ case class ToProtobuf(
"strings to strings containing the options to use for converting
the value to " +
"Protobuf format"))
}
- colTypeCheck.getOrElse(
- messageNameCheck.getOrElse(
- descFilePathCheck.getOrElse(
- optionsCheck.getOrElse(TypeCheckResult.TypeCheckSuccess)
- )
+
+ messageNameCheck.getOrElse(
+ descFilePathCheck.getOrElse(
+ optionsCheck.getOrElse(TypeCheckResult.TypeCheckSuccess)
)
)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]