This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e0cb5120134d [SPARK-53795][CONNECT] Remove unused parameters in 
LiteralValueProtoConverter
e0cb5120134d is described below

commit e0cb5120134dae7d94775ba997607d92951769bf
Author: Yihong He <[email protected]>
AuthorDate: Wed Oct 8 10:04:16 2025 +0800

    [SPARK-53795][CONNECT] Remove unused parameters in 
LiteralValueProtoConverter
    
    ### What changes were proposed in this pull request?
    
    This PR removes unused parameters from the LiteralValueProtoConverter:
    1. **Removed `containsNull` parameter** from the `arrayBuilder` method 
signature and its usage in pattern matching for `ArrayType`
    2. **Removed `valueContainsNull` parameter** from the `mapBuilder` method 
signature and its usage in pattern matching for `MapType`
    
    ### Why are the changes needed?
    
    The `containsNull` and `valueContainsNull` parameters were not being used 
within the `arrayBuilder` and `mapBuilder` methods.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Generated-by: Cursor 1.7.28
    
    Closes #52512 from heyihong/SPARK-53795.
    
    Authored-by: Yihong He <[email protected]>
    Signed-off-by: Ruifeng Zheng <[email protected]>
---
 .../sql/connect/common/LiteralValueProtoConverter.scala  | 16 ++++++----------
 1 file changed, 6 insertions(+), 10 deletions(-)

diff --git 
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala
 
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala
index 84637a9c8aab..b5a648d6111a 100644
--- 
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala
+++ 
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala
@@ -128,7 +128,7 @@ object LiteralValueProtoConverter {
       options: ToLiteralProtoOptions): proto.Expression.Literal.Builder = {
     val builder = proto.Expression.Literal.newBuilder()
 
-    def arrayBuilder(scalaValue: Any, elementType: DataType, containsNull: 
Boolean) = {
+    def arrayBuilder(scalaValue: Any, elementType: DataType) = {
       val ab = builder.getArrayBuilder
       scalaValue match {
         case a: Array[_] =>
@@ -148,11 +148,7 @@ object LiteralValueProtoConverter {
       ab
     }
 
-    def mapBuilder(
-        scalaValue: Any,
-        keyType: DataType,
-        valueType: DataType,
-        valueContainsNull: Boolean) = {
+    def mapBuilder(scalaValue: Any, keyType: DataType, valueType: DataType) = {
       val mb = builder.getMapBuilder
       scalaValue match {
         case map: scala.collection.Map[_, _] =>
@@ -214,10 +210,10 @@ object LiteralValueProtoConverter {
         toLiteralProtoBuilderInternal(v.unsafeArray, dataType, options)
       case (v: Array[Byte], ArrayType(_, _)) =>
         toLiteralProtoBuilderInternal(v, options)
-      case (v, ArrayType(elementType, containsNull)) =>
-        builder.setArray(arrayBuilder(v, elementType, containsNull))
-      case (v, MapType(keyType, valueType, valueContainsNull)) =>
-        builder.setMap(mapBuilder(v, keyType, valueType, valueContainsNull))
+      case (v, ArrayType(elementType, _)) =>
+        builder.setArray(arrayBuilder(v, elementType))
+      case (v, MapType(keyType, valueType, _)) =>
+        builder.setMap(mapBuilder(v, keyType, valueType))
       case (v, structType: StructType) =>
         builder.setStruct(structBuilder(v, structType))
       case (v: LocalTime, timeType: TimeType) =>


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to