This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new 3eb94de Revert "[SPARK-34326][CORE][SQL] Fix UTs added in SPARK-31793
depending on the length of temp path"
3eb94de is described below
commit 3eb94de8ad11e535351fd04a780f1f832f8c39f6
Author: HyukjinKwon <[email protected]>
AuthorDate: Wed Feb 3 12:33:16 2021 +0900
Revert "[SPARK-34326][CORE][SQL] Fix UTs added in SPARK-31793 depending on
the length of temp path"
This reverts commit d9e54381e32bbc86247cf18b7d2ca1e3126bd917.
---
.../scala/org/apache/spark/util/UtilsSuite.scala | 6 ------
.../DataSourceScanExecRedactionSuite.scala | 21 +++------------------
2 files changed, 3 insertions(+), 24 deletions(-)
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 18ff960..8fb4080 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -1308,12 +1308,6 @@ class UtilsSuite extends SparkFunSuite with
ResetSystemProperties with Logging {
assert(Utils.buildLocationMetadata(paths, 10) == "[path0, path1]")
assert(Utils.buildLocationMetadata(paths, 15) == "[path0, path1, path2]")
assert(Utils.buildLocationMetadata(paths, 25) == "[path0, path1, path2,
path3]")
-
- // edge-case: we should consider the fact non-path chars including '[' and
", " are accounted
- // 1. second path is not added due to the addition of '['
- assert(Utils.buildLocationMetadata(paths, 6) == "[path0]")
- // 2. third path is not added due to the addition of ", "
- assert(Utils.buildLocationMetadata(paths, 13) == "[path0, path1]")
}
test("checkHost supports both IPV4 and IPV6") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala
index 07bacad..c99be98 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala
@@ -137,24 +137,9 @@ class DataSourceScanExecRedactionSuite extends
DataSourceScanRedactionTest {
assert(location.isDefined)
// The location metadata should at least contain one path
assert(location.get.contains(paths.head))
-
- // The location metadata should have bracket wrapping paths
- assert(location.get.indexOf('[') > -1)
- assert(location.get.indexOf(']') > -1)
-
- // extract paths in location metadata (removing classname, brackets,
separators)
- val pathsInLocation = location.get.substring(
- location.get.indexOf('[') + 1, location.get.indexOf(']')).split(",
").toSeq
-
- // If the temp path length is less than (stop appending threshold - 1),
say, 100 - 1 = 99,
- // location should include more than one paths. Otherwise location
should include only one
- // path.
- // (Note we apply subtraction with 1 to count start bracket '['.)
- if (paths.head.length < 99) {
- assert(pathsInLocation.size >= 2)
- } else {
- assert(pathsInLocation.size == 1)
- }
+ // If the temp path length is larger than 100, the metadata length
should not exceed
+ // twice of the length; otherwise, the metadata length should be
controlled within 200.
+ assert(location.get.length < Math.max(paths.head.length, 100) * 2)
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]