Repository: zeppelin Updated Branches: refs/heads/master cf8b8310d -> 839db5ec5
ZEPPELIN-3707. In yarn cluster mode, zeppelin does not allow specifying additional artifacts to be uploaded via distributed cache ### What is this PR for? Put all the additional files to `spark.files` instead of `--files`. Because `--files` will overwrite `spark.files` ### What type of PR is it? [Bug Fix] ### Todos * [ ] - Task ### What is the Jira issue? * https://issues.apache.org/jira/browse/ZEPPELIN-3707 ### How should this be tested? * CI pass ### Screenshots (if appropriate) ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Jeff Zhang <zjf...@apache.org> Closes #3135 from zjffdu/ZEPPELIN-3707 and squashes the following commits: c31a1008a [Jeff Zhang] ZEPPELIN-3707. In yarn cluster mode, zeppelin does not allow specifying additional artifacts to be uploaded via distributed cache Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/839db5ec Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/839db5ec Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/839db5ec Branch: refs/heads/master Commit: 839db5ec58fd5d852706e10f7a05bb756c8e75b9 Parents: cf8b831 Author: Jeff Zhang <zjf...@apache.org> Authored: Sun Aug 12 11:04:53 2018 +0800 Committer: Jeff Zhang <zjf...@apache.org> Committed: Tue Aug 14 21:23:34 2018 +0800 ---------------------------------------------------------------------- .../interpreter/launcher/SparkInterpreterLauncher.java | 7 ++++++- .../interpreter/launcher/SparkInterpreterLauncherTest.java | 6 +++--- 2 files changed, 9 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/839db5ec/zeppelin-plugins/launcher/spark/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java ---------------------------------------------------------------------- diff --git a/zeppelin-plugins/launcher/spark/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java b/zeppelin-plugins/launcher/spark/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java index f1a8012..bcf319a 100644 --- a/zeppelin-plugins/launcher/spark/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java +++ b/zeppelin-plugins/launcher/spark/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java @@ -71,7 +71,12 @@ public class SparkInterpreterLauncher extends StandardInterpreterLauncher { sparkConfBuilder.append(" --master " + sparkMaster); } if (isYarnMode() && getDeployMode().equals("cluster")) { - sparkConfBuilder.append(" --files " + zConf.getConfDir() + "/log4j_yarn_cluster.properties"); + if (sparkProperties.containsKey("spark.files")) { + sparkProperties.put("spark.files", sparkProperties.getProperty("spark.files") + "," + + zConf.getConfDir() + "/log4j_yarn_cluster.properties"); + } else { + sparkProperties.put("spark.files", zConf.getConfDir() + "/log4j_yarn_cluster.properties"); + } } for (String name : sparkProperties.stringPropertyNames()) { sparkConfBuilder.append(" --conf " + name + "=" + sparkProperties.getProperty(name)); http://git-wip-us.apache.org/repos/asf/zeppelin/blob/839db5ec/zeppelin-plugins/launcher/spark/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java ---------------------------------------------------------------------- diff --git a/zeppelin-plugins/launcher/spark/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java b/zeppelin-plugins/launcher/spark/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java index 119a67e..d7dcd0a 100644 --- a/zeppelin-plugins/launcher/spark/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java +++ b/zeppelin-plugins/launcher/spark/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java @@ -163,7 +163,7 @@ public class SparkInterpreterLauncherTest { assertTrue(interpreterProcess.getEnv().size() >= 3); assertEquals("/user/spark", interpreterProcess.getEnv().get("SPARK_HOME")); assertEquals("true", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER")); - assertEquals(" --master yarn-cluster --files .//conf/log4j_yarn_cluster.properties --conf spark.files='file_1' --conf spark.jars='jar_1' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF")); + assertEquals(" --master yarn-cluster --conf spark.files='file_1',.//conf/log4j_yarn_cluster.properties --conf spark.jars='jar_1' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF")); } @Test @@ -196,7 +196,7 @@ public class SparkInterpreterLauncherTest { assertTrue(interpreterProcess.getEnv().size() >= 3); assertEquals("/user/spark", interpreterProcess.getEnv().get("SPARK_HOME")); assertEquals("true", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER")); - assertEquals(" --master yarn --files .//conf/log4j_yarn_cluster.properties --conf spark.files='file_1' --conf spark.jars='jar_1' --conf spark.submit.deployMode='cluster' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false --proxy-user user1 --jars " + Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar").toString(), interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF")); + assertEquals(" --master yarn --conf spark.files='file_1',.//conf/log4j_yarn_cluster.properties --conf spark.jars='jar_1' --conf spark.submit.deployMode='cluster' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false --proxy-user user1 --jars " + Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar").toString(), interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF")); Files.deleteIfExists(Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar")); FileUtils.deleteDirectory(localRepoPath.toFile()); } @@ -230,7 +230,7 @@ public class SparkInterpreterLauncherTest { assertTrue(interpreterProcess.getEnv().size() >= 3); assertEquals("/user/spark", interpreterProcess.getEnv().get("SPARK_HOME")); assertEquals("true", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER")); - assertEquals(" --master yarn --files .//conf/log4j_yarn_cluster.properties --conf spark.files='file_1' --conf spark.jars='jar_1' --conf spark.submit.deployMode='cluster' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false --proxy-user user1", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF")); + assertEquals(" --master yarn --conf spark.files='file_1',.//conf/log4j_yarn_cluster.properties --conf spark.jars='jar_1' --conf spark.submit.deployMode='cluster' --conf spark.yarn.isPython=true --conf spark.yarn.submit.waitAppCompletion=false --proxy-user user1", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF")); FileUtils.deleteDirectory(localRepoPath.toFile()); } }