This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.0 by this push:
     new cfa6f309522c [SPARK-50868][BUILD] Upgrade `scalafmt` to 3.8.5
cfa6f309522c is described below

commit cfa6f309522c433832db8b6b8472c7ba8f48e3cf
Author: panbingkun <[email protected]>
AuthorDate: Sun Jan 19 01:06:24 2025 +0800

    [SPARK-50868][BUILD] Upgrade `scalafmt` to 3.8.5
    
    ### What changes were proposed in this pull request?
    The pr aims to upgrade `scalafmt` from `3.8.2` to `3.8.5`.
    
    ### Why are the changes needed?
    - The full release notes:
    https://github.com/scalameta/scalafmt/releases/tag/v3.8.5
    https://github.com/scalameta/scalafmt/releases/tag/v3.8.4
    https://github.com/scalameta/scalafmt/releases/tag/v3.8.3
    
    - The last upgrade occurred 7 months ago.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    Manually test:
    ```
    sh dev/scalastyle
    -e Scalastyle checks passed.
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #49545 from panbingkun/SPARK-50868.
    
    Authored-by: panbingkun <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
    (cherry picked from commit 8b12bbb69c0dd3210820d404f4e6b047a63cf0cb)
    Signed-off-by: yangjie01 <[email protected]>
---
 .../scala/org/apache/spark/sql/SparkSession.scala  |  4 +--
 .../connect/client/arrow/ArrowEncoderSuite.scala   | 35 +++++++++++-----------
 dev/.scalafmt.conf                                 |  2 +-
 .../sql/connect/messages/AbbreviateSuite.scala     | 27 +++++++++--------
 4 files changed, 35 insertions(+), 33 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 89519034d07c..110ecde5f99f 100644
--- 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -377,8 +377,8 @@ class SparkSession private[sql] (
    */
   @Since("4.0.0")
   @DeveloperApi
-  def newDataset[T](encoder: AgnosticEncoder[T])(
-      f: proto.Relation.Builder => Unit): Dataset[T] = {
+  def newDataset[T](
+      encoder: AgnosticEncoder[T])(f: proto.Relation.Builder => Unit): 
Dataset[T] = {
     newDataset[T](encoder, Seq.empty)(f)
   }
 
diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
index d0468c8d57b5..2cdbdc67d2ca 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
@@ -291,23 +291,24 @@ class ArrowEncoderSuite extends ConnectFunSuite with 
BeforeAndAfterAll {
       val maybeNull11 = MaybeNull(11)
       val maybeNull13 = MaybeNull(13)
       val maybeNull17 = MaybeNull(17)
-      Iterator.tabulate(100)(i =>
-        Row(
-          maybeNull5(
-            Row(
-              i,
-              maybeNull7(new VariantVal(Array[Byte](12, i.toByte), 
Array[Byte](1, 0, 0))),
-              i + 1,
-              maybeNull11(
-                new VariantVal(Array[Byte](12, (i + 1).toByte), Array[Byte](1, 
0, 0))))),
-          maybeNull7((0 until 10).map(j =>
-            new VariantVal(Array[Byte](12, (i + j).toByte), Array[Byte](1, 0, 
0)))),
-          maybeNull13(
-            Map(
-              (
-                i.toString,
-                maybeNull17(
-                  new VariantVal(Array[Byte](12, (i + 2).toByte), 
Array[Byte](1, 0, 0))))))))
+      Iterator
+        .tabulate(100)(i =>
+          Row(
+            maybeNull5(
+              Row(
+                i,
+                maybeNull7(new VariantVal(Array[Byte](12, i.toByte), 
Array[Byte](1, 0, 0))),
+                i + 1,
+                maybeNull11(
+                  new VariantVal(Array[Byte](12, (i + 1).toByte), 
Array[Byte](1, 0, 0))))),
+            maybeNull7((0 until 10).map(j =>
+              new VariantVal(Array[Byte](12, (i + j).toByte), Array[Byte](1, 
0, 0)))),
+            maybeNull13(
+              Map(
+                (
+                  i.toString,
+                  maybeNull17(
+                    new VariantVal(Array[Byte](12, (i + 2).toByte), 
Array[Byte](1, 0, 0))))))))
     }
   }
 
diff --git a/dev/.scalafmt.conf b/dev/.scalafmt.conf
index bb16145f4df7..29615c46c4bd 100644
--- a/dev/.scalafmt.conf
+++ b/dev/.scalafmt.conf
@@ -27,4 +27,4 @@ danglingParentheses.preset = false
 docstrings.style = Asterisk
 maxColumn = 98
 runner.dialect = scala213
-version = 3.8.2
+version = 3.8.5
diff --git 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/messages/AbbreviateSuite.scala
 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/messages/AbbreviateSuite.scala
index 71dd8f2c45b5..94d92a264d20 100644
--- 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/messages/AbbreviateSuite.scala
+++ 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/messages/AbbreviateSuite.scala
@@ -231,21 +231,22 @@ class AbbreviateSuite extends SparkFunSuite {
     val sql = proto.SQL
       .newBuilder()
       .setQuery("SELECT * FROM T")
-      .putAllNamedArguments(Map(
-        "k1" -> proto.Expression
-          .newBuilder()
-          .setUnresolvedAttribute(proto.Expression.UnresolvedAttribute
+      .putAllNamedArguments(
+        Map(
+          "k1" -> proto.Expression
             .newBuilder()
-            .setUnparsedIdentifier("v1" * 4096)
-            .build())
-          .build(),
-        "k2" -> proto.Expression
-          .newBuilder()
-          .setUnresolvedAttribute(proto.Expression.UnresolvedAttribute
+            .setUnresolvedAttribute(proto.Expression.UnresolvedAttribute
+              .newBuilder()
+              .setUnparsedIdentifier("v1" * 4096)
+              .build())
+            .build(),
+          "k2" -> proto.Expression
             .newBuilder()
-            .setUnparsedIdentifier("v2" * 4096)
-            .build())
-          .build()).asJava)
+            .setUnresolvedAttribute(proto.Expression.UnresolvedAttribute
+              .newBuilder()
+              .setUnparsedIdentifier("v2" * 4096)
+              .build())
+            .build()).asJava)
       .build()
 
     val threshold = 1024


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to