Repository: spark
Updated Branches:
  refs/heads/master 95a07591b -> 694aef0d7


[hotfix] [build] Make sure JAVA_HOME is set for tests.

This is needed at least for YARN integration tests, since `$JAVA_HOME` is used 
to launch the executors.

Author: Marcelo Vanzin <[email protected]>

Closes #5441 from vanzin/yarn-test-test and squashes the following commits:

3eeec30 [Marcelo Vanzin] Use JAVA_HOME when available, java.home otherwise.
d71f1bb [Marcelo Vanzin] And sbt too.
6bda399 [Marcelo Vanzin] WIP: Testing to see whether this fixes the yarn test 
issue.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/694aef0d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/694aef0d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/694aef0d

Branch: refs/heads/master
Commit: 694aef0d71d2683eaf63cbd1d8e95c2da423b72e
Parents: 95a0759
Author: Marcelo Vanzin <[email protected]>
Authored: Sat Apr 11 13:10:01 2015 +0100
Committer: Sean Owen <[email protected]>
Committed: Sat Apr 11 13:10:01 2015 +0100

----------------------------------------------------------------------
 pom.xml                  | 14 ++++++++++++++
 project/SparkBuild.scala | 10 +++++++---
 2 files changed, 21 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/694aef0d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 70e297c..d8881c2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -159,6 +159,8 @@
     <snappy.version>1.1.1.6</snappy.version>
     <netlib.java.version>1.1.2</netlib.java.version>
 
+    <test.java.home>${java.home}</test.java.home>
+
     <!--
       Dependency scopes that can be overridden by enabling certain profiles. 
These profiles are
       declared in the projects that build assemblies.
@@ -1194,6 +1196,7 @@
                 launched by the tests have access to the correct test-time 
classpath.
               -->
               <SPARK_DIST_CLASSPATH>${test_classpath}</SPARK_DIST_CLASSPATH>
+              <JAVA_HOME>${test.java.home}</JAVA_HOME>
             </environmentVariables>
             <systemProperties>
               <java.awt.headless>true</java.awt.headless>
@@ -1224,6 +1227,7 @@
                 launched by the tests have access to the correct test-time 
classpath.
               -->
               <SPARK_DIST_CLASSPATH>${test_classpath}</SPARK_DIST_CLASSPATH>
+              <JAVA_HOME>${test.java.home}</JAVA_HOME>
             </environmentVariables>
             <systemProperties>
               <java.awt.headless>true</java.awt.headless>
@@ -1717,6 +1721,16 @@
     </profile>
 
     <profile>
+      <id>test-java-home</id>
+      <activation>
+        <property><name>env.JAVA_HOME</name></property>
+      </activation>
+      <properties>
+        <test.java.home>${env.JAVA_HOME}</test.java.home>
+      </properties>
+    </profile>
+
+    <profile>
       <id>scala-2.11</id>
       <activation>
         <property><name>scala-2.11</name></property>

http://git-wip-us.apache.org/repos/asf/spark/blob/694aef0d/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d3faa55..5f51f4b 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -119,7 +119,9 @@ object SparkBuild extends PomBuild {
   lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local 
for m2 and ivy")
 
   lazy val sharedSettings = graphSettings ++ genjavadocSettings ++ Seq (
-    javaHome   := Properties.envOrNone("JAVA_HOME").map(file),
+    javaHome := sys.env.get("JAVA_HOME")
+      .orElse(sys.props.get("java.home").map { p => new 
File(p).getParentFile().getAbsolutePath() })
+      .map(file),
     incOptions := incOptions.value.withNameHashing(true),
     retrieveManaged := true,
     retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
@@ -426,8 +428,10 @@ object TestSettings {
     fork := true,
     // Setting SPARK_DIST_CLASSPATH is a simple way to make sure any child 
processes
     // launched by the tests have access to the correct test-time classpath.
-    envVars in Test += ("SPARK_DIST_CLASSPATH" ->
-      (fullClasspath in 
Test).value.files.map(_.getAbsolutePath).mkString(":").stripSuffix(":")),
+    envVars in Test ++= Map(
+      "SPARK_DIST_CLASSPATH" -> 
+        (fullClasspath in 
Test).value.files.map(_.getAbsolutePath).mkString(":").stripSuffix(":"),
+      "JAVA_HOME" -> 
sys.env.get("JAVA_HOME").getOrElse(sys.props("java.home"))),
     javaOptions in Test += "-Dspark.test.home=" + sparkHome,
     javaOptions in Test += "-Dspark.testing=1",
     javaOptions in Test += "-Dspark.port.maxRetries=100",


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to