Repository: spark
Updated Branches:
  refs/heads/master 6776cb33e -> 258d154c9


[SPARK-6048] SparkConf should not translate deprecated configs on set

There are multiple issues with translating on set outlined in the JIRA.

This PR reverts the translation logic added to `SparkConf`. In the future, 
after the 1.3.0 release we will figure out a way to reorganize the internal 
structure more elegantly. For now, let's preserve the existing semantics of 
`SparkConf` since it's a public interface. Unfortunately this means duplicating 
some code for now, but this is all internal and we can always clean it up later.

Author: Andrew Or <[email protected]>

Closes #4799 from andrewor14/conf-set-translate and squashes the following 
commits:

11c525b [Andrew Or] Move warning to driver
10e77b5 [Andrew Or] Add documentation for deprecation precedence
a369cb1 [Andrew Or] Merge branch 'master' of github.com:apache/spark into 
conf-set-translate
c26a9e3 [Andrew Or] Revert all translate logic in SparkConf
fef6c9c [Andrew Or] Restore deprecation logic for 
spark.executor.userClassPathFirst
94b4dfa [Andrew Or] Translate on get, not set


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/258d154c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/258d154c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/258d154c

Branch: refs/heads/master
Commit: 258d154c9f1afdd52dce19f03d81683ee34effac
Parents: 6776cb3
Author: Andrew Or <[email protected]>
Authored: Mon Mar 2 16:36:42 2015 -0800
Committer: Patrick Wendell <[email protected]>
Committed: Mon Mar 2 16:36:42 2015 -0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkConf.scala | 15 +++++++++++----
 .../scala/org/apache/spark/executor/Executor.scala   | 13 +++++++++----
 .../test/scala/org/apache/spark/SparkConfSuite.scala | 12 ------------
 docs/configuration.md                                |  4 +++-
 .../scala/org/apache/spark/deploy/yarn/Client.scala  |  3 ++-
 5 files changed, 25 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/258d154c/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala 
b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 61b34d5..2ca19f5 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -68,7 +68,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with 
Logging {
     if (value == null) {
       throw new NullPointerException("null value for " + key)
     }
-    settings.put(translateConfKey(key, warn = true), value)
+    settings.put(key, value)
     this
   }
 
@@ -140,7 +140,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
 
   /** Set a parameter if it isn't already configured */
   def setIfMissing(key: String, value: String): SparkConf = {
-    settings.putIfAbsent(translateConfKey(key, warn = true), value)
+    settings.putIfAbsent(key, value)
     this
   }
 
@@ -176,7 +176,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
 
   /** Get a parameter as an Option */
   def getOption(key: String): Option[String] = {
-    Option(settings.get(translateConfKey(key)))
+    Option(settings.get(key))
   }
 
   /** Get all parameters as a list of pairs */
@@ -229,7 +229,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
   def getAppId: String = get("spark.app.id")
 
   /** Does the configuration contain a given parameter? */
-  def contains(key: String): Boolean = 
settings.containsKey(translateConfKey(key))
+  def contains(key: String): Boolean = settings.containsKey(key)
 
   /** Copy this object */
   override def clone: SparkConf = {
@@ -343,6 +343,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable 
with Logging {
         }
       }
     }
+
+    // Warn against the use of deprecated configs
+    deprecatedConfigs.values.foreach { dc =>
+      if (contains(dc.oldName)) {
+        dc.warn()
+      }
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/258d154c/core/src/main/scala/org/apache/spark/executor/Executor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala 
b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index b684fb7..bed0a08 100644
--- a/core/src/main/scala/org/apache/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -92,6 +92,12 @@ private[spark] class Executor(
   private val executorActor = env.actorSystem.actorOf(
     Props(new ExecutorActor(executorId)), "ExecutorActor")
 
+  // Whether to load classes in user jars before those in Spark jars
+  private val userClassPathFirst: Boolean = {
+    conf.getBoolean("spark.executor.userClassPathFirst",
+      conf.getBoolean("spark.files.userClassPathFirst", false))
+  }
+
   // Create our ClassLoader
   // do this after SparkEnv creation so can access the SecurityManager
   private val urlClassLoader = createClassLoader()
@@ -309,7 +315,7 @@ private[spark] class Executor(
     val urls = userClassPath.toArray ++ currentJars.keySet.map { uri =>
       new File(uri.split("/").last).toURI.toURL
     }
-    if (conf.getBoolean("spark.executor.userClassPathFirst", false)) {
+    if (userClassPathFirst) {
       new ChildFirstURLClassLoader(urls, currentLoader)
     } else {
       new MutableURLClassLoader(urls, currentLoader)
@@ -324,14 +330,13 @@ private[spark] class Executor(
     val classUri = conf.get("spark.repl.class.uri", null)
     if (classUri != null) {
       logInfo("Using REPL class URI: " + classUri)
-      val userClassPathFirst: java.lang.Boolean =
-        conf.getBoolean("spark.executor.userClassPathFirst", false)
       try {
+        val _userClassPathFirst: java.lang.Boolean = userClassPathFirst
         val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader")
           .asInstanceOf[Class[_ <: ClassLoader]]
         val constructor = klass.getConstructor(classOf[SparkConf], 
classOf[String],
           classOf[ClassLoader], classOf[Boolean])
-        constructor.newInstance(conf, classUri, parent, userClassPathFirst)
+        constructor.newInstance(conf, classUri, parent, _userClassPathFirst)
       } catch {
         case _: ClassNotFoundException =>
           logError("Could not find org.apache.spark.repl.ExecutorClassLoader 
on classpath!")

http://git-wip-us.apache.org/repos/asf/spark/blob/258d154c/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index ea6b73b..e08210a 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -197,18 +197,6 @@ class SparkConfSuite extends FunSuite with 
LocalSparkContext with ResetSystemPro
     serializer.newInstance().serialize(new StringBuffer())
   }
 
-  test("deprecated config keys") {
-    val conf = new SparkConf()
-      .set("spark.files.userClassPathFirst", "true")
-      .set("spark.yarn.user.classpath.first", "true")
-    assert(conf.contains("spark.files.userClassPathFirst"))
-    assert(conf.contains("spark.executor.userClassPathFirst"))
-    assert(conf.contains("spark.yarn.user.classpath.first"))
-    assert(conf.getBoolean("spark.files.userClassPathFirst", false))
-    assert(conf.getBoolean("spark.executor.userClassPathFirst", false))
-    assert(conf.getBoolean("spark.yarn.user.classpath.first", false))
-  }
-
 }
 
 class Class1 {}

http://git-wip-us.apache.org/repos/asf/spark/blob/258d154c/docs/configuration.md
----------------------------------------------------------------------
diff --git a/docs/configuration.md b/docs/configuration.md
index c11787b..ae90fe1 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -70,7 +70,9 @@ each line consists of a key and a value separated by 
whitespace. For example:
 Any values specified as flags or in the properties file will be passed on to 
the application
 and merged with those specified through SparkConf. Properties set directly on 
the SparkConf
 take highest precedence, then flags passed to `spark-submit` or `spark-shell`, 
then options
-in the `spark-defaults.conf` file.
+in the `spark-defaults.conf` file. A few configuration keys have been renamed 
since earlier
+versions of Spark; in such cases, the older key names are still accepted, but 
take lower
+precedence than any instance of the newer key.
 
 ## Viewing Spark Properties
 

http://git-wip-us.apache.org/repos/asf/spark/blob/258d154c/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
----------------------------------------------------------------------
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala 
b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 46d9df9..61f8fc3 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -955,7 +955,8 @@ object Client extends Logging {
     if (isDriver) {
       conf.getBoolean("spark.driver.userClassPathFirst", false)
     } else {
-      conf.getBoolean("spark.executor.userClassPathFirst", false)
+      conf.getBoolean("spark.executor.userClassPathFirst",
+        conf.getBoolean("spark.files.userClassPathFirst", false))
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to