Repository: zeppelin Updated Branches: refs/heads/branch-0.7 55a2bbbc3 -> 47eb61774
[HOTFIX][branch-0.7] Correct py4j.version in spark pom ### What is this PR for? `master` and `branch-0.7` have different py4j version property in pom.xml master: `spark.py4j.version` branch-0.7: `py4j.version` This PR correct property name for py4j version in branch-0.7. Wrong property name causes build error: ``` main: [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Zeppelin ........................................... SUCCESS [ 3.321 s] [INFO] Zeppelin: Interpreter .............................. SUCCESS [ 10.231 s] [INFO] Zeppelin: Zengine .................................. SUCCESS [ 7.804 s] [INFO] Zeppelin: Display system apis ...................... SUCCESS [ 13.944 s] [INFO] Zeppelin: Spark dependencies ....................... FAILURE [ 11.051 s] [INFO] Zeppelin: Spark .................................... SKIPPED [INFO] Zeppelin: Markdown interpreter ..................... SKIPPED [INFO] Zeppelin: Angular interpreter ...................... SKIPPED [INFO] Zeppelin: Shell interpreter ........................ SKIPPED [INFO] Zeppelin: Livy interpreter ......................... SKIPPED [INFO] Zeppelin: HBase interpreter ........................ SKIPPED [INFO] Zeppelin: Apache Pig Interpreter ................... SKIPPED [INFO] Zeppelin: PostgreSQL interpreter ................... SKIPPED [INFO] Zeppelin: JDBC interpreter ......................... SKIPPED [INFO] Zeppelin: File System Interpreters ................. SKIPPED [INFO] Zeppelin: Flink .................................... SKIPPED [INFO] Zeppelin: Apache Ignite interpreter ................ SKIPPED [INFO] Zeppelin: Kylin interpreter ........................ SKIPPED [INFO] Zeppelin: Python interpreter ....................... SKIPPED [INFO] Zeppelin: Lens interpreter ......................... SKIPPED [INFO] Zeppelin: Apache Cassandra interpreter ............. SKIPPED [INFO] Zeppelin: Elasticsearch interpreter ................ SKIPPED [INFO] Zeppelin: BigQuery interpreter ..................... SKIPPED [INFO] Zeppelin: Alluxio interpreter ...................... SKIPPED [INFO] Zeppelin: Scio ..................................... SKIPPED [INFO] Zeppelin: web Application .......................... SKIPPED [INFO] Zeppelin: Server ................................... SKIPPED [INFO] Zeppelin: Packaging distribution ................... SKIPPED [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 46.981 s [INFO] Finished at: 2017-09-01T18:20:01+09:00 [INFO] Final Memory: 87M/1434M [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-antrun-plugin:1.7:run (zip-pyspark-files) on project zeppelin-spark-dependencies_2.11: An Ant BuildException has occured: Warning: Could not find file /private/tmp/zeppelin-release/zeppelin-0.7.3-bin-all/spark-dependencies/target/spark-2.1.0/python/lib/py4j-0.8.2.1-src.zip to copy. [ERROR] around Ant part ...<copy todir="../interpreter/spark/pyspark" file="/private/tmp/zeppelin-release/zeppelin-0.7.3-bin-all/spark-dependencies/target/spark-2.1.0/python/lib/py4j-0.8.2.1-src.zip"/>... 5:178 in /private/tmp/zeppelin-release/zeppelin-0.7.3-bin-all/spark-dependencies/target/antrun/build-main.xml [ERROR] -> [Help 1] ``` ### What type of PR is it? Bug Fix | Hot Fix ### What is the Jira issue? None, it is hot fix. Needs to be merged asap ### Questions: * Does the licenses files need update? no * Is there breaking changes for older versions? no * Does this needs documentation? no Author: Mina Lee <mina...@apache.org> Closes #2557 from minahlee/hotfix/py4j.version and squashes the following commits: 9d759c03 [Mina Lee] Fix build profile 35cf61b1 [Mina Lee] Set travis ubutu env to precise to fix r-base-dev installation issue a3766685 [Mina Lee] Correct spark.py4j.version to py4j.version for branch-0.7 Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/47eb6177 Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/47eb6177 Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/47eb6177 Branch: refs/heads/branch-0.7 Commit: 47eb617748f93814d7e67bed5adeaded1a46be86 Parents: 55a2bbb Author: Mina Lee <mina...@apache.org> Authored: Sun Sep 3 20:21:38 2017 +0900 Committer: Mina Lee <mina...@apache.org> Committed: Mon Sep 4 10:30:16 2017 +0900 ---------------------------------------------------------------------- .travis.yml | 3 ++- spark-dependencies/pom.xml | 2 +- spark/pom.xml | 2 +- .../main/java/org/apache/zeppelin/spark/PySparkInterpreter.java | 5 +++-- 4 files changed, 7 insertions(+), 5 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/47eb6177/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index 10eba61..c085303 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,6 +16,7 @@ language: java sudo: false +dist: precise cache: apt: true @@ -59,7 +60,7 @@ matrix: # Test spark module for 2.2.0 with scala 2.11, livy - jdk: "oraclejdk8" - env: SCALA_VER="2.11" SPARK_VER="2.2.0" HADOOP_VER="2.6" PROFILE="-Pweb-ci -Pspark-2.2 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,livy" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.livy.* -DfailIfNoTests=false" + env: SCALA_VER="2.11" SPARK_VER="2.2.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.2 -Phadoop-2.6 -Ppyspark -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,livy" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.livy.* -DfailIfNoTests=false" # Test spark module for 2.1.0 with scala 2.11, livy - jdk: "oraclejdk7" http://git-wip-us.apache.org/repos/asf/zeppelin/blob/47eb6177/spark-dependencies/pom.xml ---------------------------------------------------------------------- diff --git a/spark-dependencies/pom.xml b/spark-dependencies/pom.xml index ffdc754..e4e5858 100644 --- a/spark-dependencies/pom.xml +++ b/spark-dependencies/pom.xml @@ -535,7 +535,7 @@ <properties> <spark.version>2.1.0</spark.version> <protobuf.version>2.5.0</protobuf.version> - <spark.py4j.version>0.10.4</spark.py4j.version> + <py4j.version>0.10.4</py4j.version> <scala.version>2.11.8</scala.version> </properties> </profile> http://git-wip-us.apache.org/repos/asf/zeppelin/blob/47eb6177/spark/pom.xml ---------------------------------------------------------------------- diff --git a/spark/pom.xml b/spark/pom.xml index 3d005eb..be35e8d 100644 --- a/spark/pom.xml +++ b/spark/pom.xml @@ -528,7 +528,7 @@ <properties> <spark.version>2.1.0</spark.version> <protobuf.version>2.5.0</protobuf.version> - <spark.py4j.version>0.10.4</spark.py4j.version> + <py4j.version>0.10.4</py4j.version> <scala.version>2.11.8</scala.version> </properties> </profile> http://git-wip-us.apache.org/repos/asf/zeppelin/blob/47eb6177/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java ---------------------------------------------------------------------- diff --git a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java index 3ab5676..6e6396a 100644 --- a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java +++ b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java @@ -171,8 +171,8 @@ public class PySparkInterpreter extends Interpreter implements ExecuteResultHand private Map setupPySparkEnv() throws IOException{ Map env = EnvironmentUtils.getProcEnvironment(); + SparkConf conf = getSparkConf(); if (!env.containsKey("PYTHONPATH")) { - SparkConf conf = getSparkConf(); env.put("PYTHONPATH", conf.get("spark.submit.pyFiles").replaceAll(",", ":") + ":../interpreter/lib/python"); } @@ -182,13 +182,14 @@ public class PySparkInterpreter extends Interpreter implements ExecuteResultHand if (SparkInterpreter.useSparkSubmit() && !getSparkInterpreter().isYarnMode()) { - String sparkSubmitJars = getSparkConf().get("spark.jars").replace(",", ":"); + String sparkSubmitJars = conf.get("spark.jars").replace(",", ":"); if (!"".equals(sparkSubmitJars)) { env.put("PYTHONPATH", env.get("PYTHONPATH") + sparkSubmitJars); } } + logger.debug("PYTHONPATH: " + env.get("PYTHONPATH")); return env; }