This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 057d9fa4f986 [SPARK-52469][CORE] Use JEP 223 API to process Java
version
057d9fa4f986 is described below
commit 057d9fa4f9869fa0b7e0f2d726f57deeee3085c1
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Jun 16 16:24:53 2025 -0700
[SPARK-52469][CORE] Use JEP 223 API to process Java version
### What changes were proposed in this pull request?
Use [JEP 223](https://openjdk.org/jeps/223) API instead of `commons-lang3`
to process Java version.
Checkstyle and Scalastyle configuration files are updated too to avoid
future regression.
### Why are the changes needed?
[JEP 223](https://openjdk.org/jeps/223) API is available in modern JDKs, it
provides simple and richer API compared to
`org.apache.commons.lang3.JavaVersion` to process Java version.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass GHA.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #51174 from pan3793/SPARK-52469.
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
core/src/main/scala/org/apache/spark/util/Utils.scala | 9 +++++++--
core/src/test/scala/org/apache/spark/util/UtilsSuite.scala | 4 +---
dev/checkstyle.xml | 1 +
scalastyle-config.xml | 6 ++++++
.../apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala | 5 ++---
.../org/apache/spark/streaming/util/WriteAheadLogSuite.scala | 5 ++---
6 files changed, 19 insertions(+), 11 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index ea9b742fb2e1..c3b02feaa4c8 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -51,7 +51,7 @@ import com.google.common.net.InetAddresses
import jakarta.ws.rs.core.UriBuilder
import org.apache.commons.codec.binary.Hex
import org.apache.commons.io.IOUtils
-import org.apache.commons.lang3.{JavaVersion, SystemUtils}
+import org.apache.commons.lang3.SystemUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
import org.apache.hadoop.fs.audit.CommonAuditContext.currentAuditContext
@@ -1868,10 +1868,15 @@ private[spark] object Utils
*/
val isMac = SystemUtils.IS_OS_MAC_OSX
+ /**
+ * Whether the underlying Java version is at most 17.
+ */
+ val isJavaVersionAtMost17 = Runtime.version().feature() <= 17
+
/**
* Whether the underlying Java version is at least 21.
*/
- val isJavaVersionAtLeast21 =
SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_21)
+ val isJavaVersionAtLeast21 = Runtime.version().feature() >= 21
/**
* Whether the underlying operating system is Mac OS X and processor is
Apple Silicon.
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 077dd489378f..f85fa9da09fc 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -527,13 +527,11 @@ class UtilsSuite extends SparkFunSuite with
ResetSystemProperties {
// The following 3 scenarios are only for the method: createDirectory(File)
// 6. Symbolic link
- // JAVA_RUNTIME_VERSION is like "17.0.14+7-LTS"
- lazy val javaVersion =
Runtime.Version.parse(SystemUtils.JAVA_RUNTIME_VERSION)
val scenario6 = java.nio.file.Files.createSymbolicLink(new File(testDir,
"scenario6")
.toPath, scenario1.toPath).toFile
if (Utils.isJavaVersionAtLeast21) {
assert(Utils.createDirectory(scenario6))
- } else if (javaVersion.feature() == 17 && javaVersion.update() >= 14) {
+ } else if (Runtime.version().feature() == 17 && Runtime.version().update()
>= 14) {
// SPARK-50946: Java 17.0.14 includes JDK-8294193, so scenario6 can
succeed.
assert(Utils.createDirectory(scenario6))
} else {
diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml
index 6c50718e27fe..e4ae2afec42e 100644
--- a/dev/checkstyle.xml
+++ b/dev/checkstyle.xml
@@ -183,6 +183,7 @@
<module name="IllegalImport">
<property name="illegalPkgs" value="org.apache.log4j" />
<property name="illegalPkgs" value="org.apache.commons.lang" />
+ <property name="illegalClasses"
value="org.apache.commons.lang3.JavaVersion" />
</module>
<module name="RegexpSinglelineJava">
<property name="format" value="new URL\("/>
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index ca3d507ed3a4..91f960284876 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -288,6 +288,12 @@ This file is divided into 3 sections:
of Commons Lang 2 (package org.apache.commons.lang.*)</customMessage>
</check>
+ <check customId="commonslang3javaversion" level="error"
class="org.scalastyle.file.RegexChecker" enabled="true">
+ <parameters><parameter
name="regex">org\.apache\.commons\.lang3\..*JavaVersion</parameter></parameters>
+ <customMessage>Use JEP 223 API (java.lang.Runtime.Version) instead of
+ Commons Lang 3 JavaVersion
(org.apache.commons.lang3.JavaVersion)</customMessage>
+ </check>
+
<check customId="uribuilder" level="error"
class="org.scalastyle.file.RegexChecker" enabled="true">
<parameters><parameter
name="regex">UriBuilder\.fromUri</parameter></parameters>
<customMessage>Use Utils.getUriBuilder instead.</customMessage>
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index 32f2c6d508d3..58ca4a4ad1cf 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -24,7 +24,6 @@ import java.nio.file.{Files, Paths}
import scala.sys.process._
import scala.util.control.NonFatal
-import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.hadoop.conf.Configuration
import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._
@@ -200,7 +199,7 @@ class HiveExternalCatalogVersionsSuite extends
SparkSubmitTestUtils {
if (PROCESS_TABLES.testingVersions.isEmpty) {
if (PROCESS_TABLES.isPythonVersionAvailable) {
- if (SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
+ if (Utils.isJavaVersionAtMost17) {
logError("Fail to get the latest Spark versions to test.")
} else {
logInfo("Skip tests because old Spark versions don't support Java
21.")
@@ -265,7 +264,7 @@ object PROCESS_TABLES extends QueryTest with SQLTestUtils {
"https://dist.apache.org/repos/dist/release")
// Tests the latest version of every release line if Java version is at most
17.
val testingVersions: Seq[String] = if (isPythonVersionAvailable &&
- SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
+ Utils.isJavaVersionAtMost17) {
import scala.io.Source
val sparkVersionPattern = """<a href="spark-(\d.\d.\d)/">""".r
try Utils.tryWithResource(
diff --git
a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
index cf9d5b7387f7..2d68e7572d9d 100644
---
a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
+++
b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
@@ -27,7 +27,6 @@ import scala.concurrent.duration._
import scala.jdk.CollectionConverters._
import scala.language.implicitConversions
-import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.mockito.ArgumentCaptor
@@ -477,7 +476,7 @@ class BatchedWriteAheadLogSuite extends
CommonWriteAheadLogTests(
val batchedWal = new BatchedWriteAheadLog(wal, sparkConf)
val e = intercept[SparkException] {
- val buffer = if (SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
+ val buffer = if (Utils.isJavaVersionAtMost17) {
mock[ByteBuffer]
} else {
// SPARK-40731: Use a 0 size `ByteBuffer` instead of `mock[ByteBuffer]`
@@ -553,7 +552,7 @@ class BatchedWriteAheadLogSuite extends
CommonWriteAheadLogTests(
batchedWal.close()
verify(wal, times(1)).close()
- val buffer = if (SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
+ val buffer = if (Utils.isJavaVersionAtMost17) {
mock[ByteBuffer]
} else {
// SPARK-40731: Use a 0 size `ByteBuffer` instead of `mock[ByteBuffer]`
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]