This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.0 by this push:
     new ec16e6edf3c6 [SPARK-52944][CORE][SQL][YARN] Fix invalid assertions in 
tests
ec16e6edf3c6 is described below

commit ec16e6edf3c69ebb257a4fcbb965000491d74197
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Fri Jul 25 22:15:44 2025 +0800

    [SPARK-52944][CORE][SQL][YARN] Fix invalid assertions in tests
    
    ### What changes were proposed in this pull request?
    This pr fixes some invalid assertions in the test code, mainly addressing 
two types of issues:
    
    1. Forgetting to use `assert`. For example:
    
    
https://github.com/apache/spark/blob/5a9929c32ef8aafd275a3cf4797bb0ba9a6e61e2/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala#L342
    
    Here, it is clearly intended to assert that `dt.simpleString equals 
"struct<c1:string>"`, but the use of `assert` was forgotten, rendering it an 
invalid expression.
    
    2. Incorrect line breaks in `should be` statements, causing one line of 
code to be mistakenly treated as two unrelated lines. For example:
    
    
https://github.com/apache/spark/blob/5a9929c32ef8aafd275a3cf4797bb0ba9a6e61e2/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala#L72-L73
    
    Here, it is clearly intended to make an assertion similar to the following:
    
    ```
    filesCreatedByShuffle.map(_.getName) should be Set("shuffle_0_0_0.data", 
"shuffle_0_0_0.index")
    ```
    
    However, due to the incorrect line break, it actually fails to function as 
an assertion.
    
    Meanwhile, after implementing the aforementioned fixes, this pr also 
addresses and repairs the failing test cases within it.
    
    ### Why are the changes needed?
    Fix invalid assertions in tests
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    - Pass Github Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #51654 from LuciferYang/unused-expressions.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
    (cherry picked from commit 90fd991d992b2965a70fd9ebb03fd5f9a41c1114)
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 .../scala/org/apache/spark/SortShuffleSuite.scala  |  4 +-
 .../org/apache/spark/deploy/SparkSubmitSuite.scala | 16 +++----
 .../network/yarn/YarnShuffleServiceSuite.scala     | 56 +++++++++++-----------
 .../expressions/UnsafeRowConverterSuite.scala      |  8 ++--
 .../org/apache/spark/sql/types/DataTypeSuite.scala |  8 ++--
 .../streaming/StreamingDeduplicationSuite.scala    |  2 +-
 6 files changed, 47 insertions(+), 47 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala 
b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
index 8c6a876e8c1b..b57fda538ff4 100644
--- a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
@@ -69,8 +69,8 @@ class SortShuffleSuite extends ShuffleSuite with 
BeforeAndAfterAll {
     shuffledRdd.count()
     // Ensure that the shuffle actually created files that will need to be 
cleaned up
     val filesCreatedByShuffle = getAllFiles -- filesBeforeShuffle
-    filesCreatedByShuffle.map(_.getName) should be
-    Set("shuffle_0_0_0.data", "shuffle_0_0_0.index")
+    filesCreatedByShuffle.map(_.getName) should be(
+      Set("shuffle_0_0_0.data", "shuffle_0_0_0.checksum.ADLER32", 
"shuffle_0_0_0.index"))
     // Check that the cleanup actually removes the files
     sc.env.blockManager.master.removeShuffle(0, blocking = true)
     for (file <- filesCreatedByShuffle) {
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index bd34e6f2bba3..fbdb58791535 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -1189,14 +1189,14 @@ class SparkSubmitSuite
 
             val appArgs = new SparkSubmitArguments(args)
             val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs)
-            conf.get("spark.yarn.dist.jars").split(",").toSet should be
-            (Set(jar1.toURI.toString, jar2.toURI.toString))
-            conf.get("spark.yarn.dist.files").split(",").toSet should be
-            (Set(file1.toURI.toString, file2.toURI.toString))
-            conf.get("spark.yarn.dist.pyFiles").split(",").toSet should be
-            (Set(pyFile1.getAbsolutePath, pyFile2.getAbsolutePath))
-            conf.get("spark.yarn.dist.archives").split(",").toSet should be
-            (Set(archive1.toURI.toString, archive2.toURI.toString))
+            conf.get("spark.yarn.dist.jars").split(",").toSet should be(
+              Set(jar1.toURI.toString, jar2.toURI.toString))
+            conf.get("spark.yarn.dist.files").split(",").toSet should be(
+              Set(file1.toURI.toString, file2.toURI.toString))
+            conf.get("spark.yarn.dist.pyFiles").split(",").toSet should be(
+              Set(pyFile1.toURI.toString, pyFile2.toURI.toString))
+            conf.get("spark.yarn.dist.archives").split(",").toSet should be(
+              Set(archive1.toURI.toString, archive2.toURI.toString))
           }
         }
       }
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala
index 56d7b7ff6a09..076098939362 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala
@@ -616,35 +616,35 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     val mergeManager1DB = ShuffleTestAccessor.mergeManagerDB(mergeManager1)
     ShuffleTestAccessor.recoveryFile(mergeManager1) should be (mergeMgrFile)
 
-    ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1).size() equals 0
-    ShuffleTestAccessor.reloadAppShuffleInfo(
-      mergeManager1, mergeManager1DB).size() equals 0
+    assert(ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1).size() equals 
0)
+    assert(ShuffleTestAccessor.reloadAppShuffleInfo(
+      mergeManager1, mergeManager1DB).size() equals 0)
 
     mergeManager1.registerExecutor(app1Id.toString, mergedShuffleInfo1)
     var appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     var appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 1
+    assert(appShuffleInfoAfterReload.size() equals 1)
     appShuffleInfoAfterReload.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
 
     mergeManager1.registerExecutor(app2Attempt1Id.toString, 
mergedShuffleInfo2Attempt1)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 2
+    assert(appShuffleInfo.size() equals 2)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfo.get(
       app2Attempt1Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt1)
     appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 2
+    assert(appShuffleInfoAfterReload.size() equals 2)
     appShuffleInfoAfterReload.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfoAfterReload.get(
       app2Attempt1Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt1)
 
     mergeManager1.registerExecutor(app3IdNoAttemptId.toString, 
mergedShuffleInfo3NoAttemptId)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 3
+    assert(appShuffleInfo.size() equals 3)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfo.get(
       app2Attempt1Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt1)
@@ -652,7 +652,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
       app3IdNoAttemptId.toString).getAppPathsInfo should be 
(appPathsInfo3NoAttempt)
     appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 3
+    assert(appShuffleInfoAfterReload.size() equals 3)
     appShuffleInfoAfterReload.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfoAfterReload.get(
       app2Attempt1Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt1)
@@ -661,7 +661,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
 
     mergeManager1.registerExecutor(app2Attempt2Id.toString, 
mergedShuffleInfo2Attempt2)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 3
+    assert(appShuffleInfo.size() equals 3)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfo.get(
       app2Attempt2Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt2)
@@ -669,7 +669,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
       app3IdNoAttemptId.toString).getAppPathsInfo should be 
(appPathsInfo3NoAttempt)
     appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 3
+    assert(appShuffleInfoAfterReload.size() equals 3)
     appShuffleInfoAfterReload.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfoAfterReload.get(
       app2Attempt2Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt2)
@@ -678,14 +678,14 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
 
     mergeManager1.applicationRemoved(app2Attempt2Id.toString, true)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 2
+    assert(appShuffleInfo.size() equals 2)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     assert(!appShuffleInfo.containsKey(app2Attempt2Id.toString))
     appShuffleInfo.get(
       app3IdNoAttemptId.toString).getAppPathsInfo should be 
(appPathsInfo3NoAttempt)
     appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 2
+    assert(appShuffleInfoAfterReload.size() equals 2)
     appShuffleInfoAfterReload.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     assert(!appShuffleInfoAfterReload.containsKey(app2Attempt2Id.toString))
     appShuffleInfoAfterReload.get(
@@ -725,9 +725,9 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     val mergeManager1DB = ShuffleTestAccessor.mergeManagerDB(mergeManager1)
     ShuffleTestAccessor.recoveryFile(mergeManager1) should be (mergeMgrFile)
 
-    ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1).size() equals 0
-    ShuffleTestAccessor.reloadAppShuffleInfo(
-      mergeManager1, mergeManager1DB).size() equals 0
+    assert(ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1).size() equals 
0)
+    assert(ShuffleTestAccessor.reloadAppShuffleInfo(
+      mergeManager1, mergeManager1DB).size() equals 0)
 
     mergeManager1.registerExecutor(app1Id.toString, mergedShuffleInfo1)
     mergeManager1.registerExecutor(app2Attempt1Id.toString, 
mergedShuffleInfo2Attempt1)
@@ -737,7 +737,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     prepareAppShufflePartition(mergeManager1, partitionId2, 2, "4")
 
     var appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 2
+    assert(appShuffleInfo.size() equals 2)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfo.get(
       app2Attempt1Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt1)
@@ -745,7 +745,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     
assert(!appShuffleInfo.get(app2Attempt1Id.toString).getShuffles.get(2).isFinalized)
     var appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 2
+    assert(appShuffleInfoAfterReload.size() equals 2)
     appShuffleInfoAfterReload.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfoAfterReload.get(
       app2Attempt1Id.toString).getAppPathsInfo should be 
(appPathsInfo2Attempt1)
@@ -765,12 +765,12 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
 
     mergeManager1.applicationRemoved(app1Id.toString, true)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     assert(!appShuffleInfo.containsKey(app1Id.toString))
     
assert(appShuffleInfo.get(app2Attempt1Id.toString).getShuffles.get(2).isFinalized)
     appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 1
+    assert(appShuffleInfoAfterReload.size() equals 1)
     assert(!appShuffleInfoAfterReload.containsKey(app1Id.toString))
     
assert(appShuffleInfoAfterReload.get(app2Attempt1Id.toString).getShuffles.get(2).isFinalized)
 
@@ -844,7 +844,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     prepareAppShufflePartition(mergeManager1, partitionId2, 2, "4")
 
     var appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 2
+    assert(appShuffleInfo.size() equals 2)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     appShuffleInfo.get(
       app2Id.toString).getAppPathsInfo should be (appPathsInfo2Attempt1)
@@ -867,20 +867,20 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     mergeManager1.applicationRemoved(app1Id.toString, true)
 
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     assert(!appShuffleInfo.containsKey(app1Id.toString))
     assert(appShuffleInfo.get(app2Id.toString).getShuffles.get(2).isFinalized)
     // Clear the AppsShuffleInfo hashmap and reload the hashmap from DB
     appShuffleInfoAfterReload =
       ShuffleTestAccessor.reloadAppShuffleInfo(mergeManager1, mergeManager1DB)
-    appShuffleInfoAfterReload.size() equals 1
+    assert(appShuffleInfoAfterReload.size() equals 1)
     assert(!appShuffleInfoAfterReload.containsKey(app1Id.toString))
     
assert(appShuffleInfoAfterReload.get(app2Id.toString).getShuffles.get(2).isFinalized)
 
     // Register application app1Id again and reload the DB again
     mergeManager1.registerExecutor(app1Id.toString, mergedShuffleInfo1)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 2
+    assert(appShuffleInfo.size() equals 2)
     appShuffleInfo.get(app1Id.toString).getAppPathsInfo should be 
(appPathsInfo1)
     assert(appShuffleInfo.get(app1Id.toString).getShuffles.isEmpty)
     assert(appShuffleInfo.get(app2Id.toString).getShuffles.get(2).isFinalized)
@@ -924,7 +924,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     prepareAppShufflePartition(mergeManager1, partitionId1, 2, "4")
 
     var appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     appShuffleInfo.get(
       app1Id.toString).getAppPathsInfo should be (appPathsInfo1Attempt1)
     assert(!appShuffleInfo.get(app1Id.toString).getShuffles.get(2).isFinalized)
@@ -938,7 +938,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     prepareAppShufflePartition(mergeManager1, partitionId2, 2, "4")
 
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     appShuffleInfo.get(
       app1Id.toString).getAppPathsInfo should be (appPathsInfo1Attempt2)
     assert(!appShuffleInfo.get(app1Id.toString).getShuffles.get(2).isFinalized)
@@ -973,7 +973,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     val mergeManager3 = 
s3.shuffleMergeManager.asInstanceOf[RemoteBlockPushResolver]
     val mergeManager3DB = ShuffleTestAccessor.mergeManagerDB(mergeManager3)
     appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager3)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     appShuffleInfo.get(
       app1Id.toString).getAppPathsInfo should be (appPathsInfo1Attempt2)
     assert(appShuffleInfo.get(app1Id.toString).getShuffles.get(2).isFinalized)
@@ -1014,7 +1014,7 @@ abstract class YarnShuffleServiceSuite extends 
SparkFunSuite with Matchers {
     mergeManager1.registerExecutor(app1Id.toString, mergedShuffleInfo1Attempt2)
 
     val appShuffleInfo = ShuffleTestAccessor.getAppsShuffleInfo(mergeManager1)
-    appShuffleInfo.size() equals 1
+    assert(appShuffleInfo.size() equals 1)
     appShuffleInfo.get(
       app1Id.toString).getAppPathsInfo should be (appPathsInfo1Attempt2)
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
index 1fbdcd97a346..37ef843665fa 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
@@ -114,14 +114,14 @@ class UnsafeRowConverterSuite extends SparkFunSuite with 
Matchers with Expressio
     // Date is represented as Int in unsafeRow
     assert(DateTimeUtils.toJavaDate(unsafeRow.getInt(2)) === 
Date.valueOf("1970-01-01"))
     // Timestamp is represented as Long in unsafeRow
-    DateTimeUtils.toJavaTimestamp(unsafeRow.getLong(3)) should be
-    (Timestamp.valueOf("2015-05-08 08:10:25"))
+    DateTimeUtils.toJavaTimestamp(unsafeRow.getLong(3)) should be(
+      Timestamp.valueOf("2015-05-08 08:10:25"))
 
     unsafeRow.setInt(2, DateTimeUtils.fromJavaDate(Date.valueOf("2015-06-22")))
     assert(DateTimeUtils.toJavaDate(unsafeRow.getInt(2)) === 
Date.valueOf("2015-06-22"))
     unsafeRow.setLong(3, 
DateTimeUtils.fromJavaTimestamp(Timestamp.valueOf("2015-06-22 08:10:25")))
-    DateTimeUtils.toJavaTimestamp(unsafeRow.getLong(3)) should be
-    (Timestamp.valueOf("2015-06-22 08:10:25"))
+    DateTimeUtils.toJavaTimestamp(unsafeRow.getLong(3)) should be(
+      Timestamp.valueOf("2015-06-22 08:10:25"))
   }
 
   testBothCodegenAndInterpreted(
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index e7e7acc100b0..c4287df94d61 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -339,11 +339,11 @@ class DataTypeSuite extends SparkFunSuite {
         |""".stripMargin
     val dt = DataType.fromJson(schema)
 
-    dt.simpleString equals "struct<c1:string>"
-    dt.json equals
+    assert(dt.simpleString equals "struct<c1:string>")
+    assert(dt.json equals
       """
-        
|{"type":"struct","fields":[{"name":"c1","type":"string","nullable":false,"metadata":{}}]}
-        |""".stripMargin
+        
|{"type":"struct","fields":[{"name":"c1","type":"string","nullable":true,"metadata":{}}]}
+        |""".stripMargin.trim)
   }
 
   def checkDefaultSize(dataType: DataType, expectedDefaultSize: Int): Unit = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
index 040b99e55cb0..d47869f85fc2 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
@@ -321,7 +321,7 @@ class StreamingDeduplicationSuite extends 
StateStoreMetricsTest {
         },
         AssertOnQuery { q =>
           eventually(timeout(streamingTimeout)) {
-            q.lastProgress.sink.numOutputRows == 0L
+            assert(q.lastProgress.sink.numOutputRows == 0L)
             true
           }
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to