This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 63e58d24269f [SPARK-52859] Add `SparkSystemUtils` trait
63e58d24269f is described below

commit 63e58d24269fe509b237e9dafdb4884f5677c960
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Fri Jul 18 08:03:15 2025 -0700

    [SPARK-52859] Add `SparkSystemUtils` trait
    
    ### What changes were proposed in this pull request?
    
    This PR aims to introduce `SparkSystemUtils` trait.
    
    ### Why are the changes needed?
    
    To modularize `OS` name and arch related utilities as an independent trait, 
`SparkSystemUtils` .
    
    1. Move the existing `isMac`, `isMacOnAppleSilicon`, `isUnix` and 
`isWindows` to the new trait and reimplement.
    2. Add a new variables, `isLinux`, to the trait for completeness.
    3. Use it as much as possible.
      - For example, we cannot use SparkSystemUtils (and Utils) in 
`connect-client-jvm` module yet in the current dependency structure.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, this is `private[spark]` scope refactoring.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #51545 from dongjoon-hyun/SPARK-52859.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../spark/util/kvstore/LevelDBIteratorSuite.java   |  5 +-
 .../apache/spark/util/kvstore/LevelDBSuite.java    |  5 +-
 .../apache/spark/network/util/DBProviderSuite.java |  5 +-
 .../org/apache/spark/network/util/JavaUtils.java   |  8 +--
 .../org/apache/spark/util/SparkSystemUtils.scala   | 57 ++++++++++++++++++++++
 .../main/scala/org/apache/spark/util/Utils.scala   | 24 +--------
 .../org/apache/spark/benchmark/Benchmark.scala     |  5 +-
 .../scala/org/apache/spark/util/UtilsSuite.scala   |  3 +-
 8 files changed, 75 insertions(+), 37 deletions(-)

diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBIteratorSuite.java
 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBIteratorSuite.java
index 6ff628665445..9288e0f3fbf7 100644
--- 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBIteratorSuite.java
+++ 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBIteratorSuite.java
@@ -20,10 +20,11 @@ package org.apache.spark.util.kvstore;
 import java.io.File;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.SystemUtils;
 import org.junit.jupiter.api.AfterAll;
 import static org.junit.jupiter.api.Assumptions.assumeFalse;
 
+import org.apache.spark.util.SparkSystemUtils$;
+
 public class LevelDBIteratorSuite extends DBIteratorSuite {
 
   private static File dbpath;
@@ -41,7 +42,7 @@ public class LevelDBIteratorSuite extends DBIteratorSuite {
 
   @Override
   protected KVStore createStore() throws Exception {
-    assumeFalse(SystemUtils.IS_OS_MAC_OSX && 
SystemUtils.OS_ARCH.equals("aarch64"));
+    assumeFalse(SparkSystemUtils$.MODULE$.isMacOnAppleSilicon());
     dbpath = File.createTempFile("test.", ".ldb");
     dbpath.delete();
     db = new LevelDB(dbpath);
diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBSuite.java 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBSuite.java
index 040ccce70b5a..8db239861cf3 100644
--- 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBSuite.java
+++ 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBSuite.java
@@ -31,12 +31,13 @@ import java.util.stream.StreamSupport;
 
 import com.google.common.collect.ImmutableSet;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.SystemUtils;
 import org.iq80.leveldb.DBIterator;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
+import org.apache.spark.util.SparkSystemUtils$;
+
 import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assumptions.assumeFalse;
 
@@ -57,7 +58,7 @@ public class LevelDBSuite {
 
   @BeforeEach
   public void setup() throws Exception {
-    assumeFalse(SystemUtils.IS_OS_MAC_OSX && 
SystemUtils.OS_ARCH.equals("aarch64"));
+    assumeFalse(SparkSystemUtils$.MODULE$.isMacOnAppleSilicon());
     dbpath = File.createTempFile("test.", ".ldb");
     dbpath.delete();
     db = new LevelDB(dbpath);
diff --git 
a/common/network-common/src/test/java/org/apache/spark/network/util/DBProviderSuite.java
 
b/common/network-common/src/test/java/org/apache/spark/network/util/DBProviderSuite.java
index 81bfc55264c4..b809034f1f3b 100644
--- 
a/common/network-common/src/test/java/org/apache/spark/network/util/DBProviderSuite.java
+++ 
b/common/network-common/src/test/java/org/apache/spark/network/util/DBProviderSuite.java
@@ -18,7 +18,6 @@
 package org.apache.spark.network.util;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.lang3.SystemUtils;
 import org.apache.spark.network.shuffledb.DBBackend;
 import org.apache.spark.network.shuffledb.StoreVersion;
 import org.junit.jupiter.api.Assertions;
@@ -27,6 +26,8 @@ import org.junit.jupiter.api.Test;
 import java.io.File;
 import java.io.IOException;
 
+import org.apache.spark.util.SparkSystemUtils$;
+
 import static org.junit.jupiter.api.Assumptions.assumeFalse;
 
 public class DBProviderSuite {
@@ -38,7 +39,7 @@ public class DBProviderSuite {
 
   @Test
   public void testLevelDBCheckVersionFailed() throws IOException, 
InterruptedException {
-    assumeFalse(SystemUtils.IS_OS_MAC_OSX && 
SystemUtils.OS_ARCH.equals("aarch64"));
+    assumeFalse(SparkSystemUtils$.MODULE$.isMacOnAppleSilicon());
     testCheckVersionFailed(DBBackend.LEVELDB, "leveldb");
   }
 
diff --git 
a/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java 
b/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java
index 94f9f02ed2c9..93953cc0252d 100644
--- a/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java
+++ b/common/utils/src/main/java/org/apache/spark/network/util/JavaUtils.java
@@ -29,12 +29,11 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.lang3.SystemUtils;
-
 import org.apache.spark.internal.SparkLogger;
 import org.apache.spark.internal.SparkLoggerFactory;
 import org.apache.spark.internal.LogKeys;
 import org.apache.spark.internal.MDC;
+import org.apache.spark.util.SparkSystemUtils$;
 
 /**
  * General utilities available in the network package. Many of these are 
sourced from Spark's
@@ -110,8 +109,9 @@ public class JavaUtils {
     // On Unix systems, use operating system command to run faster
     // If that does not work out, fallback to the Java IO way
     // We exclude Apple Silicon test environment due to the limited resource 
issues.
-    if (SystemUtils.IS_OS_UNIX && filter == null && 
!(SystemUtils.IS_OS_MAC_OSX &&
-        (System.getenv("SPARK_TESTING") != null || 
System.getProperty("spark.testing") != null))) {
+    if (SparkSystemUtils$.MODULE$.isUnix() && filter == null &&
+        !(SparkSystemUtils$.MODULE$.isMac() && (System.getenv("SPARK_TESTING") 
!= null ||
+        System.getProperty("spark.testing") != null))) {
       try {
         deleteRecursivelyUsingUnixNative(file);
         return;
diff --git 
a/common/utils/src/main/scala/org/apache/spark/util/SparkSystemUtils.scala 
b/common/utils/src/main/scala/org/apache/spark/util/SparkSystemUtils.scala
new file mode 100644
index 000000000000..c855d6bace73
--- /dev/null
+++ b/common/utils/src/main/scala/org/apache/spark/util/SparkSystemUtils.scala
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.util
+
+private[spark] trait SparkSystemUtils {
+  /**
+   * The `os.name` system property.
+   */
+  val osName = System.getProperty("os.name")
+
+  /**
+   * The `os.arch` system property.
+   */
+  val osArch = System.getProperty("os.arch")
+
+  /**
+   * Whether the underlying operating system is Windows.
+   */
+  val isWindows = osName.regionMatches(true, 0, "Windows", 0, 7)
+
+  /**
+   * Whether the underlying operating system is Mac OS X.
+   */
+  val isMac = osName.regionMatches(true, 0, "Mac OS X", 0, 8)
+
+  /**
+   * Whether the underlying operating system is Mac OS X and processor is 
Apple Silicon.
+   */
+  val isMacOnAppleSilicon = isMac && osArch.equals("aarch64")
+
+  /**
+   * Whether the underlying operating system is Linux.
+   */
+  val isLinux = osName.regionMatches(true, 0, "Linux", 0, 5)
+
+  /**
+   * Whether the underlying operating system is UNIX.
+   */
+  val isUnix = Seq("AIX", "HP-UX", "Irix", "Linux", "Mac OS X", "Solaris", 
"SunOS", "FreeBSD",
+      "OpenBSD", "NetBSD").exists(prefix => osName.regionMatches(true, 0, 
prefix, 0, prefix.length))
+}
+
+object SparkSystemUtils extends SparkSystemUtils
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 1645281a7942..ad9d30da2b4a 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -51,7 +51,6 @@ import com.google.common.net.InetAddresses
 import jakarta.ws.rs.core.UriBuilder
 import org.apache.commons.codec.binary.Hex
 import org.apache.commons.io.IOUtils
-import org.apache.commons.lang3.SystemUtils
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
 import org.apache.hadoop.fs.audit.CommonAuditContext.currentAuditContext
@@ -105,7 +104,8 @@ private[spark] object Utils
   with SparkFileUtils
   with SparkSerDeUtils
   with SparkStreamUtils
-  with SparkStringUtils {
+  with SparkStringUtils
+  with SparkSystemUtils {
 
   private val sparkUncaughtExceptionHandler = new SparkUncaughtExceptionHandler
   @volatile private var cachedLocalDir: String = ""
@@ -1857,21 +1857,6 @@ private[spark] object Utils
     getHadoopFileSystem(new URI(path), conf)
   }
 
-  /**
-   * Whether the underlying operating system is UNIX.
-   */
-  val isUnix = SystemUtils.IS_OS_UNIX
-
-  /**
-   * Whether the underlying operating system is Windows.
-   */
-  val isWindows = SystemUtils.IS_OS_WINDOWS
-
-  /**
-   * Whether the underlying operating system is Mac OS X.
-   */
-  val isMac = SystemUtils.IS_OS_MAC_OSX
-
   /**
    * Whether the underlying Java version is at most 17.
    */
@@ -1882,11 +1867,6 @@ private[spark] object Utils
    */
   val isJavaVersionAtLeast21 = Runtime.version().feature() >= 21
 
-  /**
-   * Whether the underlying operating system is Mac OS X and processor is 
Apple Silicon.
-   */
-  val isMacOnAppleSilicon = SystemUtils.IS_OS_MAC_OSX && 
SystemUtils.OS_ARCH.equals("aarch64")
-
   /**
    * Whether the underlying JVM prefer IPv6 addresses.
    */
diff --git a/core/src/test/scala/org/apache/spark/benchmark/Benchmark.scala 
b/core/src/test/scala/org/apache/spark/benchmark/Benchmark.scala
index 7e88c7ee684b..a2cdf033308a 100644
--- a/core/src/test/scala/org/apache/spark/benchmark/Benchmark.scala
+++ b/core/src/test/scala/org/apache/spark/benchmark/Benchmark.scala
@@ -25,7 +25,6 @@ import scala.concurrent.duration._
 import scala.util.Try
 
 import org.apache.commons.io.output.TeeOutputStream
-import org.apache.commons.lang3.SystemUtils
 
 import org.apache.spark.util.Utils
 
@@ -211,10 +210,10 @@ private[spark] object Benchmark {
    * This should return something like "Intel(R) Core(TM) i7-4870HQ CPU @ 
2.50GHz"
    */
   def getProcessorName(): String = {
-    val cpu = if (SystemUtils.IS_OS_MAC_OSX) {
+    val cpu = if (Utils.isMac) {
       Utils.executeAndGetOutput(Seq("/usr/sbin/sysctl", "-n", 
"machdep.cpu.brand_string"))
         .stripLineEnd
-    } else if (SystemUtils.IS_OS_LINUX) {
+    } else if (Utils.isLinux) {
       Try {
         val grepPath = Utils.executeAndGetOutput(Seq("which", 
"grep")).stripLineEnd
         Utils.executeAndGetOutput(Seq(grepPath, "-m", "1", "model name", 
"/proc/cpuinfo"))
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index f85fa9da09fc..430de1cad588 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -33,7 +33,6 @@ import scala.util.{Random, Try}
 
 import com.google.common.io.Files
 import org.apache.commons.io.IOUtils
-import org.apache.commons.lang3.SystemUtils
 import org.apache.commons.math3.stat.inference.ChiSquareTest
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
@@ -1032,7 +1031,7 @@ class UtilsSuite extends SparkFunSuite with 
ResetSystemProperties {
   test("Kill process") {
     // Verify that we can terminate a process even if it is in a bad state. 
This is only run
     // on UNIX since it does some OS specific things to verify the correct 
behavior.
-    if (SystemUtils.IS_OS_UNIX) {
+    if (Utils.isUnix) {
       def pidExists(pid: Long): Boolean = {
         val p = Runtime.getRuntime.exec(Array("kill", "-0", s"$pid"))
         p.waitFor()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to