This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e1842c7f3023 [SPARK-51263][CORE][SQL][TESTS] Clean up unnecessary 
`invokePrivate` method calls in test code
e1842c7f3023 is described below

commit e1842c7f30231bbb697979acc6320e881154659a
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Fri Feb 21 08:13:28 2025 -0800

    [SPARK-51263][CORE][SQL][TESTS] Clean up unnecessary `invokePrivate` method 
calls in test code
    
    ### What changes were proposed in this pull request?
    This pr cleans up unnecessary calls to the 
`org.scalatest.PrivateMethodTester.Invoker#invokePrivate` method in the test 
code, replacing those cases with direct function calls.
    
    ### Why are the changes needed?
    Due to changes in the function's access scope, some cases in the original 
tests that used `invokePrivate` to call private methods are no longer necessary.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    - Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #50012 from LuciferYang/PrivateMethod-cleanup.
    
    Lead-authored-by: yangjie01 <yangji...@baidu.com>
    Co-authored-by: YangJie <yangji...@baidu.com>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../apache/spark/ExecutorAllocationManagerSuite.scala    | 16 ++++------------
 .../org/apache/spark/deploy/master/MasterSuiteBase.scala |  4 +---
 .../org/apache/spark/deploy/master/RecoverySuite.scala   |  6 +++---
 .../org/apache/spark/storage/BlockManagerSuite.scala     |  6 ++----
 .../spark/storage/ShuffleBlockFetcherIteratorSuite.scala |  6 ++----
 .../spark/sql/catalyst/expressions/TimeWindowSuite.scala | 16 ++++++----------
 6 files changed, 18 insertions(+), 36 deletions(-)

diff --git 
a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala 
b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index e1da2b6dd9d6..f034e5da0a69 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -1911,14 +1911,6 @@ private object ExecutorAllocationManagerSuite extends 
PrivateMethodTester {
    | Helper methods for accessing private methods and fields |
    * ------------------------------------------------------- */
 
-  private val _numExecutorsToAddPerResourceProfileId =
-    PrivateMethod[mutable.HashMap[Int, Int]](
-      Symbol("numExecutorsToAddPerResourceProfileId"))
-  private val _numExecutorsTargetPerResourceProfileId =
-    PrivateMethod[mutable.HashMap[Int, Int]](
-      Symbol("numExecutorsTargetPerResourceProfileId"))
-  private val _maxNumExecutorsNeededPerResourceProfile =
-    PrivateMethod[Int](Symbol("maxNumExecutorsNeededPerResourceProfile"))
   private val _addTime = PrivateMethod[Long](Symbol("addTime"))
   private val _schedule = PrivateMethod[Unit](Symbol("schedule"))
   private val _doUpdateRequest = PrivateMethod[Unit](Symbol("doUpdateRequest"))
@@ -1946,7 +1938,7 @@ private object ExecutorAllocationManagerSuite extends 
PrivateMethodTester {
   private def numExecutorsToAdd(
       manager: ExecutorAllocationManager,
       rp: ResourceProfile): Int = {
-    val nmap = manager invokePrivate _numExecutorsToAddPerResourceProfileId()
+    val nmap = manager.numExecutorsToAddPerResourceProfileId
     nmap(rp.id)
   }
 
@@ -1963,7 +1955,7 @@ private object ExecutorAllocationManagerSuite extends 
PrivateMethodTester {
   private def numExecutorsTarget(
       manager: ExecutorAllocationManager,
       rpId: Int): Int = {
-    val numMap = manager invokePrivate 
_numExecutorsTargetPerResourceProfileId()
+    val numMap = manager.numExecutorsTargetPerResourceProfileId
     numMap(rpId)
   }
 
@@ -1982,7 +1974,7 @@ private object ExecutorAllocationManagerSuite extends 
PrivateMethodTester {
       rp: ResourceProfile
   ): Int = {
     val maxNumExecutorsNeeded =
-      manager invokePrivate _maxNumExecutorsNeededPerResourceProfile(rp.id)
+      manager.maxNumExecutorsNeededPerResourceProfile(rp.id)
     manager invokePrivate
       _addExecutorsToTarget(maxNumExecutorsNeeded, rp.id, updatesNeeded)
   }
@@ -2005,7 +1997,7 @@ private object ExecutorAllocationManagerSuite extends 
PrivateMethodTester {
   private def maxNumExecutorsNeededPerResourceProfile(
       manager: ExecutorAllocationManager,
       rp: ResourceProfile): Int = {
-    manager invokePrivate _maxNumExecutorsNeededPerResourceProfile(rp.id)
+    manager.maxNumExecutorsNeededPerResourceProfile(rp.id)
   }
 
   private def adjustRequestedExecutors(manager: ExecutorAllocationManager): 
Int = {
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuiteBase.scala 
b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuiteBase.scala
index 2e159b828884..70291625cdb4 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuiteBase.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuiteBase.scala
@@ -440,13 +440,11 @@ trait MasterSuiteBase extends SparkFunSuite
   private val _drivers = PrivateMethod[HashSet[DriverInfo]](Symbol("drivers"))
   protected val _waitingDrivers =
     PrivateMethod[mutable.ArrayBuffer[DriverInfo]](Symbol("waitingDrivers"))
-  private val _state = PrivateMethod[RecoveryState.Value](Symbol("state"))
   protected val _newDriverId = PrivateMethod[String](Symbol("newDriverId"))
   protected val _newApplicationId = 
PrivateMethod[String](Symbol("newApplicationId"))
   protected val _maybeUpdateAppName =
     PrivateMethod[DriverDescription](Symbol("maybeUpdateAppName"))
   protected val _createApplication = 
PrivateMethod[ApplicationInfo](Symbol("createApplication"))
-  protected val _persistenceEngine = 
PrivateMethod[PersistenceEngine](Symbol("persistenceEngine"))
 
   protected val workerInfo = makeWorkerInfo(512, 10)
   private val workerInfos = Array(workerInfo, workerInfo, workerInfo)
@@ -567,7 +565,7 @@ trait MasterSuiteBase extends SparkFunSuite
   }
 
   protected def getState(master: Master): RecoveryState.Value = {
-    master.invokePrivate(_state())
+    master.state
   }
 }
 
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/master/RecoverySuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/master/RecoverySuite.scala
index 18b22e7352c9..f8beee7b2a34 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/RecoverySuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/RecoverySuite.scala
@@ -482,7 +482,7 @@ class RecoverySuite extends MasterSuiteBase {
     var master: Master = null
     try {
       master = makeAliveMaster(conf)
-      val e = 
master.invokePrivate(_persistenceEngine()).asInstanceOf[FileSystemPersistenceEngine]
+      val e = 
master.persistenceEngine.asInstanceOf[FileSystemPersistenceEngine]
       assert(e.codec.isEmpty)
     } finally {
       if (master != null) {
@@ -502,7 +502,7 @@ class RecoverySuite extends MasterSuiteBase {
     var master: Master = null
     try {
       master = makeAliveMaster(conf)
-      val e = 
master.invokePrivate(_persistenceEngine()).asInstanceOf[FileSystemPersistenceEngine]
+      val e = 
master.persistenceEngine.asInstanceOf[FileSystemPersistenceEngine]
       assert(e.codec.get.isInstanceOf[LZ4CompressionCodec])
     } finally {
       if (master != null) {
@@ -521,7 +521,7 @@ class RecoverySuite extends MasterSuiteBase {
     var master: Master = null
     try {
       master = makeAliveMaster(conf)
-      val e = 
master.invokePrivate(_persistenceEngine()).asInstanceOf[RocksDBPersistenceEngine]
+      val e = master.persistenceEngine.asInstanceOf[RocksDBPersistenceEngine]
       assert(e.serializer.isInstanceOf[JavaSerializer])
     } finally {
       if (master != null) {
diff --git 
a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala 
b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index 9fbe15402c8b..ed2a1e7fadfa 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -851,8 +851,7 @@ class BlockManagerSuite extends SparkFunSuite with Matchers 
with PrivateMethodTe
     when(bmMaster.getLocations(mc.any[BlockId])).thenReturn(Seq(bmId1, bmId2, 
bmId3))
 
     val blockManager = makeBlockManager(128, "exec", bmMaster)
-    val sortLocations = 
PrivateMethod[Seq[BlockManagerId]](Symbol("sortLocations"))
-    val locations = blockManager invokePrivate 
sortLocations(bmMaster.getLocations("test"))
+    val locations = blockManager.sortLocations(bmMaster.getLocations("test"))
     assert(locations.map(_.host) === Seq(localHost, localHost, otherHost))
   }
 
@@ -874,8 +873,7 @@ class BlockManagerSuite extends SparkFunSuite with Matchers 
with PrivateMethodTe
     val blockManager = makeBlockManager(128, "exec", bmMaster)
     blockManager.blockManagerId =
       BlockManagerId(SparkContext.DRIVER_IDENTIFIER, localHost, 1, 
Some(localRack))
-    val sortLocations = 
PrivateMethod[Seq[BlockManagerId]](Symbol("sortLocations"))
-    val locations = blockManager invokePrivate 
sortLocations(bmMaster.getLocations("test"))
+    val locations = blockManager.sortLocations(bmMaster.getLocations("test"))
     assert(locations.map(_.host) === Seq(localHost, localHost, otherHost, 
otherHost, otherHost))
     assert(locations.flatMap(_.topologyInfo)
       === Seq(localRack, localRack, localRack, otherRack, otherRack))
diff --git 
a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
 
b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
index 033bd9d244cf..b730055e0c8a 100644
--- 
a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
@@ -37,7 +37,6 @@ import org.mockito.Mockito.{doThrow, mock, times, verify, 
when}
 import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
 import org.roaringbitmap.RoaringBitmap
-import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.{MapOutputTracker, SparkFunSuite, TaskContext}
 import org.apache.spark.MapOutputTracker.SHUFFLE_PUSH_MAP_ID
@@ -51,7 +50,7 @@ import org.apache.spark.storage.ShuffleBlockFetcherIterator._
 import org.apache.spark.util.Utils
 
 
-class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with 
PrivateMethodTester {
+class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite {
 
   private var transfer: BlockTransferService = _
   private var mapOutputTracker: MapOutputTracker = _
@@ -159,8 +158,7 @@ class ShuffleBlockFetcherIteratorSuite extends 
SparkFunSuite with PrivateMethodT
     // Note: ShuffleBlockFetcherIterator wraps input streams in a 
BufferReleasingInputStream
     val wrappedInputStream = 
inputStream.asInstanceOf[BufferReleasingInputStream]
     verify(buffer, times(0)).release()
-    val delegateAccess = PrivateMethod[InputStream](Symbol("delegate"))
-    var in = wrappedInputStream.invokePrivate(delegateAccess())
+    var in = wrappedInputStream.delegate
     in match {
       case stream: CheckedInputStream =>
         val underlyingInputFiled = 
classOf[CheckedInputStream].getSuperclass.getDeclaredField("in")
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala
index eb89141e0300..0530c348b6ef 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala
@@ -20,15 +20,13 @@ package org.apache.spark.sql.catalyst.expressions
 import scala.jdk.CollectionConverters._
 import scala.reflect.ClassTag
 
-import org.scalatest.PrivateMethodTester
-
 import org.apache.spark.{SparkException, SparkFunSuite, 
SparkIllegalArgumentException, SparkThrowable}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.util.DateTimeConstants._
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types.{LongType, StructField, StructType, 
TimestampNTZType, TimestampType}
 
-class TimeWindowSuite extends SparkFunSuite with ExpressionEvalHelper with 
PrivateMethodTester {
+class TimeWindowSuite extends SparkFunSuite with ExpressionEvalHelper {
 
   test("time window is unevaluable") {
     intercept[SparkException] {
@@ -96,35 +94,33 @@ class TimeWindowSuite extends SparkFunSuite with 
ExpressionEvalHelper with Priva
     }
   }
 
-  private val parseExpression = PrivateMethod[Long](Symbol("parseExpression"))
-
   test("parse sql expression for duration in microseconds - string") {
-    val dur = TimeWindow.invokePrivate(parseExpression(Literal("5 seconds")))
+    val dur = TimeWindow.parseExpression(Literal("5 seconds"))
     assert(dur.isInstanceOf[Long])
     assert(dur === 5000000)
   }
 
   test("parse sql expression for duration in microseconds - integer") {
-    val dur = TimeWindow.invokePrivate(parseExpression(Literal(100)))
+    val dur = TimeWindow.parseExpression(Literal(100))
     assert(dur.isInstanceOf[Long])
     assert(dur === 100)
   }
 
   test("parse sql expression for duration in microseconds - long") {
-    val dur = TimeWindow.invokePrivate(parseExpression(Literal.create(2L << 
52, LongType)))
+    val dur = TimeWindow.parseExpression(Literal.create(2L << 52, LongType))
     assert(dur.isInstanceOf[Long])
     assert(dur === (2L << 52))
   }
 
   test("parse sql expression for duration in microseconds - invalid interval") 
{
     intercept[AnalysisException] {
-      TimeWindow.invokePrivate(parseExpression(Literal("2 apples")))
+      TimeWindow.parseExpression(Literal("2 apples"))
     }
   }
 
   test("parse sql expression for duration in microseconds - invalid 
expression") {
     intercept[AnalysisException] {
-      TimeWindow.invokePrivate(parseExpression(Rand(123)))
+      TimeWindow.parseExpression(Rand(123))
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to