Repository: spark
Updated Branches:
  refs/heads/branch-1.2 8cf122799 -> 32198347f


[SPARK-4193][BUILD] Disable doclint in Java 8 to prevent from build error.

Author: Takuya UESHIN <[email protected]>

Closes #3058 from ueshin/issues/SPARK-4193 and squashes the following commits:

e096bb1 [Takuya UESHIN] Add a plugin declaration to pluginManagement.
6762ec2 [Takuya UESHIN] Fix usage of -Xdoclint javadoc option.
fdb280a [Takuya UESHIN] Fix Javadoc errors.
4745f3c [Takuya UESHIN] Merge branch 'master' into issues/SPARK-4193
923e2f0 [Takuya UESHIN] Use doclint option `-missing` instead of `none`.
30d6718 [Takuya UESHIN] Fix Javadoc errors.
b548017 [Takuya UESHIN] Disable doclint in Java 8 to prevent from build error.

(cherry picked from commit e464f0ac2d7210a4bf715478885fe7a8d397fe89)
Signed-off-by: Patrick Wendell <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/32198347
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/32198347
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/32198347

Branch: refs/heads/branch-1.2
Commit: 32198347ffb71f72f37e4bded262da80452a5aea
Parents: 8cf1227
Author: Takuya UESHIN <[email protected]>
Authored: Fri Nov 28 13:00:15 2014 -0500
Committer: Patrick Wendell <[email protected]>
Committed: Fri Nov 28 13:00:29 2014 -0500

----------------------------------------------------------------------
 .../spark/network/client/TransportClient.java   |  2 +-
 .../network/server/OneForOneStreamManager.java  |  2 +-
 .../spark/network/util/LimitedInputStream.java  |  2 +-
 .../apache/spark/network/util/NettyUtils.java   |  2 +-
 .../spark/network/util/TransportConf.java       |  2 +-
 pom.xml                                         | 24 ++++++++++++++++++++
 project/SparkBuild.scala                        |  7 +++++-
 7 files changed, 35 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/network/common/src/main/java/org/apache/spark/network/client/TransportClient.java
----------------------------------------------------------------------
diff --git 
a/network/common/src/main/java/org/apache/spark/network/client/TransportClient.java
 
b/network/common/src/main/java/org/apache/spark/network/client/TransportClient.java
index 4e94411..37f2e34 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/client/TransportClient.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/client/TransportClient.java
@@ -49,7 +49,7 @@ import org.apache.spark.network.util.NettyUtils;
  * to perform this setup.
  *
  * For example, a typical workflow might be:
- * client.sendRPC(new OpenFile("/foo")) --> returns StreamId = 100
+ * client.sendRPC(new OpenFile("/foo")) --&gt; returns StreamId = 100
  * client.fetchChunk(streamId = 100, chunkIndex = 0, callback)
  * client.fetchChunk(streamId = 100, chunkIndex = 1, callback)
  * ...

http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/network/common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
----------------------------------------------------------------------
diff --git 
a/network/common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
 
b/network/common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
index 731d48d..a6d390e 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
@@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory;
 import org.apache.spark.network.buffer.ManagedBuffer;
 
 /**
- * StreamManager which allows registration of an Iterator<ManagedBuffer>, 
which are individually
+ * StreamManager which allows registration of an 
Iterator&lt;ManagedBuffer&gt;, which are individually
  * fetched as chunks by the client. Each registered buffer is one chunk.
  */
 public class OneForOneStreamManager extends StreamManager {

http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/network/common/src/main/java/org/apache/spark/network/util/LimitedInputStream.java
----------------------------------------------------------------------
diff --git 
a/network/common/src/main/java/org/apache/spark/network/util/LimitedInputStream.java
 
b/network/common/src/main/java/org/apache/spark/network/util/LimitedInputStream.java
index 63ca43c..57113ed 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/util/LimitedInputStream.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/util/LimitedInputStream.java
@@ -27,7 +27,7 @@ import com.google.common.base.Preconditions;
  * Wraps a {@link InputStream}, limiting the number of bytes which can be read.
  *
  * This code is from Guava's 14.0 source code, because there is no compatible 
way to
- * use this functionality in both a Guava 11 environment and a Guava >14 
environment.
+ * use this functionality in both a Guava 11 environment and a Guava &gt;14 
environment.
  */
 public final class LimitedInputStream extends FilterInputStream {
   private long left;

http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/network/common/src/main/java/org/apache/spark/network/util/NettyUtils.java
----------------------------------------------------------------------
diff --git 
a/network/common/src/main/java/org/apache/spark/network/util/NettyUtils.java 
b/network/common/src/main/java/org/apache/spark/network/util/NettyUtils.java
index b3991a6..2a4b88b 100644
--- a/network/common/src/main/java/org/apache/spark/network/util/NettyUtils.java
+++ b/network/common/src/main/java/org/apache/spark/network/util/NettyUtils.java
@@ -99,7 +99,7 @@ public class NettyUtils {
     return new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 8, -8, 8);
   }
 
-  /** Returns the remote address on the channel or "<remote address>" if none 
exists. */
+  /** Returns the remote address on the channel or "&lt;remote address&gt;" if 
none exists. */
   public static String getRemoteAddress(Channel channel) {
     if (channel != null && channel.remoteAddress() != null) {
       return channel.remoteAddress().toString();

http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/network/common/src/main/java/org/apache/spark/network/util/TransportConf.java
----------------------------------------------------------------------
diff --git 
a/network/common/src/main/java/org/apache/spark/network/util/TransportConf.java 
b/network/common/src/main/java/org/apache/spark/network/util/TransportConf.java
index 621427d..1af40ac 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/util/TransportConf.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/util/TransportConf.java
@@ -72,7 +72,7 @@ public class TransportConf {
 
   /**
    * Time (in milliseconds) that we will wait in order to perform a retry 
after an IOException.
-   * Only relevant if maxIORetries > 0.
+   * Only relevant if maxIORetries &gt; 0.
    */
   public int ioRetryWaitTime() { return 
conf.getInt("spark.shuffle.io.retryWaitMs", 5000); }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 94c4422..57323ca 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1037,6 +1037,11 @@
             </filesets>
           </configuration>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-javadoc-plugin</artifactId>
+          <version>2.10.1</version>
+        </plugin>
       </plugins>
     </pluginManagement>
 
@@ -1230,6 +1235,25 @@
 
     </profile>
 
+    <profile>
+      <id>doclint-java8-disable</id>
+      <activation>
+        <jdk>[1.8,)</jdk>
+      </activation>
+
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-javadoc-plugin</artifactId>
+            <configuration>
+              <additionalparam>-Xdoclint:all 
-Xdoclint:-missing</additionalparam>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
     <!-- A series of build profiles where customizations for particular Hadoop 
releases can be made -->
 
     <!-- Hadoop-a.b.c dependencies can be found at

http://git-wip-us.apache.org/repos/asf/spark/blob/32198347/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d539a3d..f73e0f6 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -136,7 +136,12 @@ object SparkBuild extends PomBuild {
     },
     publishMavenStyle in MavenCompile := true,
     publishLocal in MavenCompile <<= publishTask(publishLocalConfiguration in 
MavenCompile, deliverLocal),
-    publishLocalBoth <<= Seq(publishLocal in MavenCompile, 
publishLocal).dependOn
+    publishLocalBoth <<= Seq(publishLocal in MavenCompile, 
publishLocal).dependOn,
+
+    javacOptions in (Compile, doc) ++= {
+      val Array(major, minor, _) = 
System.getProperty("java.version").split("\\.", 3)
+      if (major.toInt >= 1 && minor.toInt >= 8) Seq("-Xdoclint:all", 
"-Xdoclint:-missing") else Seq.empty
+    }
   )
 
   def enable(settings: Seq[Setting[_]])(projectRef: ProjectRef) = {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to