This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 2cc1ee4d3a0 [SPARK-45344][CORE][SQL] Remove all Scala version string
check
2cc1ee4d3a0 is described below
commit 2cc1ee4d3a05a641d7a245f015ef824d8f7bae8b
Author: yangjie01 <[email protected]>
AuthorDate: Thu Sep 28 11:06:04 2023 +0800
[SPARK-45344][CORE][SQL] Remove all Scala version string check
### What changes were proposed in this pull request?
This PR removes all the no longer needed Scala version string checks.
### Why are the changes needed?
These version checks are no longer needed.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass GitHub Actions
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #43133 from LuciferYang/SPARK-45344.
Authored-by: yangjie01 <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
.../apache/spark/serializer/KryoSerializer.scala | 5 +---
.../deploy/rest/SubmitRestProtocolSuite.scala | 32 +---------------------
.../spark/serializer/KryoSerializerSuite.scala | 1 -
.../spark/sql/hive/HiveSparkSubmitSuite.scala | 3 +-
4 files changed, 3 insertions(+), 38 deletions(-)
diff --git
a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
index 126f5a0ca3b..60af1abe943 100644
--- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
@@ -26,7 +26,6 @@ import javax.annotation.Nullable
import scala.collection.mutable.ArrayBuffer
import scala.jdk.CollectionConverters._
import scala.reflect.ClassTag
-import scala.util.Properties
import scala.util.control.NonFatal
import com.esotericsoftware.kryo.{Kryo, KryoException, Serializer =>
KryoClassSerializer}
@@ -229,9 +228,7 @@ class KryoSerializer(conf: SparkConf)
kryo.register(None.getClass)
kryo.register(Nil.getClass)
- if (Properties.versionNumberString.startsWith("2.13")) {
-
kryo.register(Utils.classForName("scala.collection.immutable.ArraySeq$ofRef"))
- }
+
kryo.register(Utils.classForName("scala.collection.immutable.ArraySeq$ofRef"))
kryo.register(Utils.classForName("scala.collection.immutable.$colon$colon"))
kryo.register(Utils.classForName("scala.collection.immutable.Map$EmptyMap$"))
kryo.register(Utils.classForName("scala.math.Ordering$Reverse"))
diff --git
a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
index 9fdbf485e17..9eb51725831 100644
---
a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
@@ -19,8 +19,6 @@ package org.apache.spark.deploy.rest
import java.lang.Boolean
-import scala.util.Properties.versionNumberString
-
import org.json4s.jackson.JsonMethods._
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -235,34 +233,7 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
|}
""".stripMargin
- private lazy val submitDriverRequestJson = if
(versionNumberString.startsWith("2.12")) {
- s"""
- |{
- | "action" : "CreateSubmissionRequest",
- | "appArgs" : [ "two slices", "a hint of cinnamon" ],
- | "appResource" : "honey-walnut-cherry.jar",
- | "clientSparkVersion" : "1.2.3",
- | "environmentVariables" : {
- | "PATH" : "/dev/null"
- | },
- | "mainClass" : "org.apache.spark.examples.SparkPie",
- | "sparkProperties" : {
- | "spark.archives" : "fireballs.zip",
- | "spark.driver.extraLibraryPath" : "pickle.jar",
- | "spark.jars" : "mayonnaise.jar,ketchup.jar",
- | "spark.driver.supervise" : "false",
- | "spark.app.name" : "SparkPie",
- | "spark.cores.max" : "10000",
- | "spark.driver.memory" : "${Utils.DEFAULT_DRIVER_MEM_MB}m",
- | "spark.files" : "fireball.png",
- | "spark.driver.cores" : "180",
- | "spark.driver.extraJavaOptions" : " -Dslices=5 -Dcolor=mostly_red",
- | "spark.executor.memory" : "256m",
- | "spark.driver.extraClassPath" : "food-coloring.jar"
- | }
- |}
- """.stripMargin
- } else {
+ private lazy val submitDriverRequestJson =
s"""
|{
| "action" : "CreateSubmissionRequest",
@@ -289,7 +260,6 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
| }
|}
""".stripMargin
- }
private val submitDriverResponseJson =
"""
diff --git
a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 57d59672b7b..40319e1a46c 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -553,7 +553,6 @@ class KryoSerializerSuite extends SparkFunSuite with
SharedSparkContext {
}
test("SPARK-43898: Register scala.collection.immutable.ArraySeq$ofRef for
Scala 2.13") {
- assume(scala.util.Properties.versionNumberString.startsWith("2.13"))
val conf = new SparkConf(false)
conf.set(KRYO_REGISTRATION_REQUIRED, true)
val ser = new KryoSerializer(conf).newInstance()
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 5e405127a15..56055962014 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -162,9 +162,8 @@ class HiveSparkSubmitSuite
// Before the fix in SPARK-8470, this results in a MissingRequirementError
because
// the HiveContext code mistakenly overrides the class loader that
contains user classes.
// For more detail, see
sql/hive/src/test/resources/regression-test-SPARK-8489/*scala.
- // TODO: revisit for Scala 2.13 support
val version = Properties.versionNumberString match {
- case v if v.startsWith("2.12") || v.startsWith("2.13") => v.substring(0,
4)
+ case v if v.startsWith("2.13") => v.substring(0, 4)
case x => throw new Exception(s"Unsupported Scala Version: $x")
}
val jarDir = getTestResourcePath("regression-test-SPARK-8489")
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]