This is an automated email from the ASF dual-hosted git repository. felixcheung pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/master by this push: new f7df680 [ZEPPELIN-3957] Divide SparkIntegrationTest into parts for build under 50 minutes f7df680 is described below commit f7df680ef91a2862e22fe914b8c6ddeae2491fc5 Author: Andrey Koshkin <37596468+d...@users.noreply.github.com> AuthorDate: Thu Jan 24 06:51:16 2019 +0300 [ZEPPELIN-3957] Divide SparkIntegrationTest into parts for build under 50 minutes ### What is this PR for? Fix Travis profiles and tests to archive build time under 50 minutes. ### What type of PR is it? [Improvement] ### What is the Jira issue? * [ZEPPELIN-3957] ### How should this be tested? * Travis CI pass ### Screenshots (if appropriate) ### Questions: * Does the licenses files need update? no * Is there breaking changes for older versions? no * Does this needs documentation? no --- .travis.yml | 10 +++++++-- .../zeppelin/interpreter/SparkIntegrationTest.java | 16 ++------------ .../interpreter/SparkIntegrationTestPt1.java | 25 ++++++++++++++++++++++ .../interpreter/SparkIntegrationTestPt2.java | 25 ++++++++++++++++++++++ 4 files changed, 60 insertions(+), 16 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2d810bf..ada776e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -92,11 +92,17 @@ matrix: dist: trusty env: BUILD_PLUGINS="true" PYTHON="2" PROFILE="-Pspark-2.2" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,zeppelin-server,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" - # Integration test of spark interpreter with different spark versions under python3, only run SparkIntegrationTest. Also run spark unit test of spark 1.6 in this build. + # Integration test of spark interpreter with different spark versions under python3, only run SparkIntegrationTestPt1. Also run spark unit test of spark 1.6 in this build. - sudo: required jdk: "oraclejdk8" dist: trusty - env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" + env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTestPt1,org.apache.zeppelin.spark.* -DfailIfNoTests=false" + + # Integration test of spark interpreter with different spark versions under python3, only run SparkIntegrationTestPt2. Also run spark unit test of spark 1.6 in this build. + - sudo: required + jdk: "oraclejdk8" + dist: trusty + env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTestPt2,org.apache.zeppelin.spark.* -DfailIfNoTests=false" # Test spark module for 2.4.0 with scala 2.11 - jdk: "oraclejdk8" diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTest.java index 8d076e3..0665953 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTest.java @@ -21,8 +21,8 @@ import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -@RunWith(value = Parameterized.class) -public class SparkIntegrationTest { + +public abstract class SparkIntegrationTest { private static Logger LOGGER = LoggerFactory.getLogger(SparkIntegrationTest.class); private static MiniHadoopCluster hadoopCluster; @@ -39,18 +39,6 @@ public class SparkIntegrationTest { this.sparkHome = SparkDownloadUtils.downloadSpark(sparkVersion); } - @Parameterized.Parameters - public static List<Object[]> data() { - return Arrays.asList(new Object[][]{ - {"2.4.0"}, - {"2.3.2"}, - {"2.2.1"}, - {"2.1.2"}, - {"2.0.2"}, - {"1.6.3"} - }); - } - @BeforeClass public static void setUp() throws IOException { hadoopCluster = new MiniHadoopCluster(); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTestPt1.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTestPt1.java new file mode 100644 index 0000000..acbdde4 --- /dev/null +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTestPt1.java @@ -0,0 +1,25 @@ +package org.apache.zeppelin.interpreter; + +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; +import java.util.List; + +@RunWith(value = Parameterized.class) +public class SparkIntegrationTestPt1 extends SparkIntegrationTest{ + + public SparkIntegrationTestPt1(String sparkVersion) { + super(sparkVersion); + } + + @Parameterized.Parameters + public static List<Object[]> data() { + return Arrays.asList(new Object[][]{ + {"2.4.0"}, + {"2.3.2"}, + {"2.2.1"}, + }); + } + +} diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTestPt2.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTestPt2.java new file mode 100644 index 0000000..80cca77 --- /dev/null +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SparkIntegrationTestPt2.java @@ -0,0 +1,25 @@ +package org.apache.zeppelin.interpreter; + +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; +import java.util.List; + +@RunWith(value = Parameterized.class) +public class SparkIntegrationTestPt2 extends SparkIntegrationTest{ + + public SparkIntegrationTestPt2(String sparkVersion) { + super(sparkVersion); + } + + @Parameterized.Parameters + public static List<Object[]> data() { + return Arrays.asList(new Object[][]{ + {"2.1.2"}, + {"2.0.2"}, + {"1.6.3"} + }); + } + +}