This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new bc9c255e28c3 [SPARK-45694][SPARK-45695][SQL] Clean up deprecated API
usage `View.force` and `ScalaNumberProxy.signum`
bc9c255e28c3 is described below
commit bc9c255e28c323035036a6f75a7bd984e0306b4c
Author: Tengfei Huang <[email protected]>
AuthorDate: Fri Nov 3 11:14:24 2023 +0800
[SPARK-45694][SPARK-45695][SQL] Clean up deprecated API usage `View.force`
and `ScalaNumberProxy.signum`
### What changes were proposed in this pull request?
Clean up deprecated API usage:
1. ScalaNumberProxy.signum -> use `sign` instead;
2. Map.view.mapValues.view.force.toMap -> replaced to
Map.vew.mapValues.toMap;
### Why are the changes needed?
Eliminate compile warnings and no longer use deprecated scala APIs.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass GA.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #43637 from ivoson/SPARK-45694.
Authored-by: Tengfei Huang <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
.../sql/catalyst/expressions/EquivalentExpressions.scala | 2 +-
.../scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala | 11 ++++-------
2 files changed, 5 insertions(+), 8 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
index 8738015ce910..7f43b2b78478 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
@@ -191,7 +191,7 @@ class EquivalentExpressions(
val skip = useCount == 0 || expr.isInstanceOf[LeafExpression]
if (!skip && !updateExprInMap(expr, map, useCount)) {
- val uc = useCount.signum
+ val uc = useCount.sign
childrenToRecurse(expr).foreach(updateExprTree(_, map, uc))
commonChildrenToRecurse(expr).filter(_.nonEmpty).foreach(updateCommonExprs(_,
map, uc))
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index c9425b24764c..017b20077cf6 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -362,10 +362,8 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]]
case s: Seq[_] =>
s.map(mapChild)
case m: Map[_, _] =>
- // `map.mapValues().view.force` return `Map` in Scala 2.12 but return
`IndexedSeq` in Scala
- // 2.13, call `toMap` method manually to compatible with Scala 2.12
and Scala 2.13
- // `mapValues` is lazy and we need to force it to materialize
- m.view.mapValues(mapChild).view.force.toMap
+ // `mapValues` is lazy and we need to force it to materialize by
converting to Map
+ m.view.mapValues(mapChild).toMap
case arg: TreeNode[_] if containsChild(arg) => mapTreeNode(arg)
case Some(child) => Some(mapChild(child))
case nonChild: AnyRef => nonChild
@@ -784,13 +782,12 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]]
arg.asInstanceOf[BaseType].clone()
case Some(arg: TreeNode[_]) if containsChild(arg) =>
Some(arg.asInstanceOf[BaseType].clone())
- // `map.mapValues().view.force` return `Map` in Scala 2.12 but return
`IndexedSeq` in Scala
- // 2.13, call `toMap` method manually to compatible with Scala 2.12 and
Scala 2.13
+ // `mapValues` is lazy and we need to force it to materialize by
converting to Map
case m: Map[_, _] => m.view.mapValues {
case arg: TreeNode[_] if containsChild(arg) =>
arg.asInstanceOf[BaseType].clone()
case other => other
- }.view.force.toMap // `mapValues` is lazy and we need to force it to
materialize
+ }.toMap
case d: DataType => d // Avoid unpacking Structs
case args: LazyList[_] => args.map(mapChild).force // Force
materialization on stream
case args: Iterable[_] => args.map(mapChild)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]