This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 840306d867e2 [SPARK-45643][CORE][SQL] Replace
`s.c.mutable.MapOps#transform` with `s.c.mutable.MapOps#mapValuesInPlace`
840306d867e2 is described below
commit 840306d867e2538d2e29c2be0251fcecc3ac9e23
Author: yangjie01 <[email protected]>
AuthorDate: Tue Oct 24 09:46:21 2023 -0700
[SPARK-45643][CORE][SQL] Replace `s.c.mutable.MapOps#transform` with
`s.c.mutable.MapOps#mapValuesInPlace`
### What changes were proposed in this pull request?
This pr replace `s.c.mutable.MapOps#transform` with
`s.c.mutable.MapOps#mapValuesInPlace` due to `transform` has been marked as
deprecated since Scala 2.13.0.
```scala
deprecated("Use mapValuesInPlace instead", "2.13.0")
inline final def transform(f: (K, V) => V): this.type =
mapValuesInPlace(f)
```
### Why are the changes needed?
Clean up deprecated API usage.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass GitHub Actions
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #43500 from LuciferYang/transform-2-mapValuesInPlace.
Authored-by: yangjie01 <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala | 2 +-
.../scala/org/apache/spark/sql/execution/stat/FrequentItems.scala | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git
a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
index 441bf60e4891..9b808161bec9 100644
--- a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
+++ b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
@@ -630,7 +630,7 @@ private[spark] class ExecutorAllocationManager(
private def onSchedulerQueueEmpty(): Unit = synchronized {
logDebug("Clearing timer to add executors because there are no more
pending tasks")
addTime = NOT_SET
- numExecutorsToAddPerResourceProfileId.transform { case (_, _) => 1 }
+ numExecutorsToAddPerResourceProfileId.mapValuesInPlace { case (_, _) => 1 }
}
private case class StageAttempt(stageId: Int, stageAttemptId: Int) {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala
index 951a782b1426..c05562fc083c 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala
@@ -97,9 +97,9 @@ case class CollectFrequentItems(
if (remainder >= 0) {
map += key -> count // something will get kicked out, so we can add
this
map.filterInPlace((k, v) => v > minCount)
- map.transform((k, v) => v - minCount)
+ map.mapValuesInPlace((k, v) => v - minCount)
} else {
- map.transform((k, v) => v - count)
+ map.mapValuesInPlace((k, v) => v - count)
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]