Repository: zeppelin
Updated Branches:
  refs/heads/branch-0.7 730784bab -> bfa812a9d


Fix CI build failure on branch-0.7

### What is this PR for?
CI build is failing on `branch-0.7`. This PR cherry-pick some necessary commits 
from

https://github.com/apache/zeppelin/pull/2003
https://github.com/apache/zeppelin/pull/2081

and a commit (3ae8760) fixes changes made by 
https://github.com/apache/zeppelin/pull/2071 for branch-0.7.

### What type of PR is it?
Hot Fix

### Todos
* [ ] - Make CI green

### How should this be tested?
See if CI becomes green

### Questions:
* Does the licenses files need update? no
* Is there breaking changes for older versions? no
* Does this needs documentation? no

Author: Lee moon soo <m...@apache.org>

Closes #2103 from Leemoonsoo/fix-branch-0.7-ci and squashes the following 
commits:

9539c9b [Lee moon soo] Try start and terminate spark context after each test 
class
f077980 [Lee moon soo] Correct test implementation with Authentication Enable
0eefb66 [Lee moon soo] Handle multiple Set-Cookie headers
8cfc5f9 [Lee moon soo] Remove unnecessary assert
d4a8807 [Lee moon soo] helium.bundle.js -> vis.bundle.js
9b6ec4a [Lee moon soo] create zeppelin-web/dist directory for test
129b40f [Lee moon soo] reduce build time
7d9489b [Lee moon soo] Reduce log


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/bfa812a9
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/bfa812a9
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/bfa812a9

Branch: refs/heads/branch-0.7
Commit: bfa812a9d0d989e9abf24fcb00e87f0431b21a8b
Parents: 730784b
Author: Lee moon soo <m...@apache.org>
Authored: Tue Feb 14 02:48:59 2017 +0900
Committer: Lee moon soo <m...@apache.org>
Committed: Tue Mar 14 20:13:07 2017 -0700

----------------------------------------------------------------------
 .travis.yml                                     | 26 +++-----
 .../spark/PySparkInterpreterMatplotlibTest.java | 48 +++++++-------
 .../zeppelin/spark/PySparkInterpreterTest.java  | 51 ++++++++-------
 .../zeppelin/spark/SparkInterpreterTest.java    | 53 ++++++++--------
 .../zeppelin/spark/SparkSqlInterpreterTest.java | 67 ++++++++++----------
 .../zeppelin/rest/AbstractTestRestApi.java      | 18 +++++-
 .../zeppelin/rest/SecurityRestApiTest.java      | 13 ++--
 .../interpreter/InterpreterSetting.java         |  4 +-
 .../src/main/resources/helium/webpack.config.js |  2 +-
 .../apache/zeppelin/notebook/NotebookTest.java  |  5 +-
 10 files changed, 144 insertions(+), 143 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index c2a47e5..a73f7a8 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -48,21 +48,13 @@ matrix:
     - jdk: "oraclejdk7"
       env: SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" 
PROFILE="-Pspark-2.0 -Phadoop-2.6 -Ppyspark -Psparkr -Pscalding -Phelium-dev 
-Pexamples -Pscala-2.11" BUILD_FLAG="package -Pbuild-distr -DskipRat" 
TEST_FLAG="verify -Pusing-packaged-distr -DskipRat" TEST_PROJECTS=""
 
-    # Test all modules with scala 2.10
+    # Test spark module for 1.6.3 with scala 2.10
     - jdk: "oraclejdk7"
-      env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.6 -Pr -Phadoop-2.6 -Ppyspark -Psparkr -Pscalding -Pbeam 
-Phelium-dev -Pexamples -Pscala-2.10" BUILD_FLAG="package -Pbuild-distr 
-DskipRat" TEST_FLAG="verify -Pusing-packaged-distr -DskipRat" TEST_PROJECTS=""
+      env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.6 -Phadoop-2.6 -Ppyspark -Psparkr -Pscala-2.10" 
BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" 
MODULES="-pl 
zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark"
 TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* 
-DfailIfNoTests=false"
 
-    # Test all modules with scala 2.11
+    # Test spark module for 1.6.3 with scala 2.11
     - jdk: "oraclejdk7"
-      env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.6 -Pr -Phadoop-2.6 -Ppyspark -Psparkr -Pscalding 
-Phelium-dev -Pexamples -Pscala-2.11" BUILD_FLAG="package -Pbuild-distr 
-DskipRat" TEST_FLAG="verify -Pusing-packaged-distr -DskipRat" TEST_PROJECTS=""
-
-    # Test spark module for 1.5.2
-    - jdk: "oraclejdk7"
-      env: SCALA_VER="2.10" SPARK_VER="1.5.2" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.5 -Pr -Phadoop-2.6 -Ppyspark -Psparkr" BUILD_FLAG="package 
-DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl 
zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,r
 -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark.* 
-DfailIfNoTests=false"
-
-    # Test spark module for 1.4.1
-    - jdk: "oraclejdk7"
-      env: SCALA_VER="2.10" SPARK_VER="1.4.1" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.4 -Pr -Phadoop-2.6 -Ppyspark -Psparkr" BUILD_FLAG="package 
-DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl 
zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,r
 -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark.* 
-DfailIfNoTests=false"
+      env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.6 -Phadoop-2.6 -Ppyspark -Psparkr -Pscala-2.11 
-Dscala.version=2.11.7 -Dscala.binary.version=2.11" BUILD_FLAG="package 
-DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl 
zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark"
 TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* 
-DfailIfNoTests=false"
 
     # Test selenium with spark module for 1.6.3
     - jdk: "oraclejdk7"
@@ -70,15 +62,15 @@ matrix:
 
     # Test python/pyspark with python 2
     - jdk: "oraclejdk7"
-      env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.6 -Phadoop-2.6 -Ppyspark" BUILD_FLAG="package -pl 
spark,python -am -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" 
TEST_PROJECTS="-pl 
zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python 
-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* 
-Dpyspark.test.exclude='' -DfailIfNoTests=false"
+      env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.6" 
PROFILE="-Pspark-1.6 -Phadoop-2.6 -Ppyspark" BUILD_FLAG="package -pl 
spark,python -am -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl 
zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python" 
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.*
 -Dpyspark.test.exclude='' -DfailIfNoTests=false"
 
     # Test python/pyspark with python 3
     - jdk: "oraclejdk7"
-      env: PYTHON="3" SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.6" 
PROFILE="-Pspark-2.0 -Phadoop-2.6 -Ppyspark -Pscala-2.11" BUILD_FLAG="package 
-pl spark,python -am -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" 
TEST_PROJECTS="-pl 
zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python 
-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* 
-Dpyspark.test.exclude='' -DfailIfNoTests=false"
+      env: PYTHON="3" SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.6" 
PROFILE="-Pspark-2.0 -Phadoop-2.6 -Ppyspark -Pscala-2.11" BUILD_FLAG="package 
-pl spark,python -am -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" 
MODULES="-pl 
zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python" 
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.*
 -Dpyspark.test.exclude='' -DfailIfNoTests=false"
 
     # Test livy with spark 1.5.2 and hadoop 2.6
     - jdk: "oraclejdk7"
-      env: SCALA_VER="2.10" $LIVY_VER="0.2.0" SPARK_VER="1.5.2" 
HADOOP_VER="2.6" PROFILE="-Pspark-1.5 -Phadoop-2.6" BUILD_FLAG="package 
-DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl 
zeppelin-interpreter,livy -DfailIfNoTests=false"
+      env: SCALA_VER="2.10" $LIVY_VER="0.2.0" SPARK_VER="1.5.2" 
HADOOP_VER="2.6" PROFILE="-Pspark-1.5 -Phadoop-2.6" BUILD_FLAG="package 
-DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl 
zeppelin-interpreter,livy" TEST_PROJECTS="-DfailIfNoTests=false"
 
 before_install:
   - echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxPermSize=1024m 
-XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.defaultLogLevel=warn'" >> 
~/.mavenrc
@@ -90,7 +82,7 @@ before_install:
   - source ~/.environ
 
 install:
-  - mvn $BUILD_FLAG $PROFILE -B
+  - mvn $BUILD_FLAG $MODULES $PROFILE -B
 
 before_script:
   - travis_retry ./testing/downloadSpark.sh $SPARK_VER $HADOOP_VER
@@ -101,7 +93,7 @@ before_script:
   - tail conf/zeppelin-env.sh
 
 script:
-  - mvn $TEST_FLAG $PROFILE -B $TEST_PROJECTS
+  - mvn $TEST_FLAG $MODULES $PROFILE -B $TEST_PROJECTS
 
 after_success:
   - echo "Travis exited with ${TRAVIS_TEST_RESULT}"

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
 
b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
index 17b2128..7fe8b5e 100644
--- 
a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
+++ 
b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
@@ -39,14 +39,14 @@ import static org.junit.Assert.*;
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class PySparkInterpreterMatplotlibTest {
 
-  @Rule
-  public TemporaryFolder tmpDir = new TemporaryFolder();
-
-  public static SparkInterpreter sparkInterpreter;
-  public static PySparkInterpreter pyspark;
-  public static InterpreterGroup intpGroup;
-  public static Logger LOGGER = 
LoggerFactory.getLogger(PySparkInterpreterTest.class);
-  private InterpreterContext context;
+  @ClassRule
+  public static TemporaryFolder tmpDir = new TemporaryFolder();
+
+  static SparkInterpreter sparkInterpreter;
+  static PySparkInterpreter pyspark;
+  static InterpreterGroup intpGroup;
+  static Logger LOGGER = LoggerFactory.getLogger(PySparkInterpreterTest.class);
+  static InterpreterContext context;
   
   public static class AltPySparkInterpreter extends PySparkInterpreter {
     /**
@@ -80,7 +80,7 @@ public class PySparkInterpreterMatplotlibTest {
     }
   }
 
-  private Properties getPySparkTestProperties() throws IOException {
+  private static Properties getPySparkTestProperties() throws IOException {
     Properties p = new Properties();
     p.setProperty("master", "local[*]");
     p.setProperty("spark.app.name", "Zeppelin Test");
@@ -106,24 +106,20 @@ public class PySparkInterpreterMatplotlibTest {
     return version;
   }
 
-  @Before
-  public void setUp() throws Exception {
+  @BeforeClass
+  public static void setUp() throws Exception {
     intpGroup = new InterpreterGroup();
     intpGroup.put("note", new LinkedList<Interpreter>());
 
-    if (sparkInterpreter == null) {
-      sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
-      intpGroup.get("note").add(sparkInterpreter);
-      sparkInterpreter.setInterpreterGroup(intpGroup);
-      sparkInterpreter.open();
-    }
+    sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
+    intpGroup.get("note").add(sparkInterpreter);
+    sparkInterpreter.setInterpreterGroup(intpGroup);
+    sparkInterpreter.open();
 
-    if (pyspark == null) {
-      pyspark = new AltPySparkInterpreter(getPySparkTestProperties());
-      intpGroup.get("note").add(pyspark);
-      pyspark.setInterpreterGroup(intpGroup);
-      pyspark.open();
-    }
+    pyspark = new AltPySparkInterpreter(getPySparkTestProperties());
+    intpGroup.get("note").add(pyspark);
+    pyspark.setInterpreterGroup(intpGroup);
+    pyspark.open();
 
     context = new InterpreterContext("note", "id", null, "title", "text",
       new AuthenticationInfo(),
@@ -135,6 +131,12 @@ public class PySparkInterpreterMatplotlibTest {
       new InterpreterOutput(null));
   }
 
+  @AfterClass
+  public static void tearDown() {
+    pyspark.close();
+    sparkInterpreter.close();
+  }
+
   @Test
   public void dependenciesAreInstalled() {
     // matplotlib

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
index 55c405d..3697512 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
@@ -23,10 +23,7 @@ import org.apache.zeppelin.interpreter.*;
 import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
 import org.apache.zeppelin.resource.LocalResourcePool;
 import org.apache.zeppelin.user.AuthenticationInfo;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.TemporaryFolder;
 import org.junit.runners.MethodSorters;
 import org.slf4j.Logger;
@@ -44,16 +41,16 @@ import static org.junit.Assert.*;
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class PySparkInterpreterTest {
 
-  @Rule
-  public TemporaryFolder tmpDir = new TemporaryFolder();
+  @ClassRule
+  public static TemporaryFolder tmpDir = new TemporaryFolder();
 
-  public static SparkInterpreter sparkInterpreter;
-  public static PySparkInterpreter pySparkInterpreter;
-  public static InterpreterGroup intpGroup;
-  public static Logger LOGGER = 
LoggerFactory.getLogger(PySparkInterpreterTest.class);
-  private InterpreterContext context;
+  static SparkInterpreter sparkInterpreter;
+  static PySparkInterpreter pySparkInterpreter;
+  static InterpreterGroup intpGroup;
+  static Logger LOGGER = LoggerFactory.getLogger(PySparkInterpreterTest.class);
+  static InterpreterContext context;
 
-  private Properties getPySparkTestProperties() throws IOException {
+  private static Properties getPySparkTestProperties() throws IOException {
     Properties p = new Properties();
     p.setProperty("master", "local[*]");
     p.setProperty("spark.app.name", "Zeppelin Test");
@@ -79,24 +76,20 @@ public class PySparkInterpreterTest {
     return version;
   }
 
-  @Before
-  public void setUp() throws Exception {
+  @BeforeClass
+  public static void setUp() throws Exception {
     intpGroup = new InterpreterGroup();
     intpGroup.put("note", new LinkedList<Interpreter>());
 
-    if (sparkInterpreter == null) {
-      sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
-      intpGroup.get("note").add(sparkInterpreter);
-      sparkInterpreter.setInterpreterGroup(intpGroup);
-      sparkInterpreter.open();
-    }
+    sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
+    intpGroup.get("note").add(sparkInterpreter);
+    sparkInterpreter.setInterpreterGroup(intpGroup);
+    sparkInterpreter.open();
 
-    if (pySparkInterpreter == null) {
-      pySparkInterpreter = new PySparkInterpreter(getPySparkTestProperties());
-      intpGroup.get("note").add(pySparkInterpreter);
-      pySparkInterpreter.setInterpreterGroup(intpGroup);
-      pySparkInterpreter.open();
-    }
+    pySparkInterpreter = new PySparkInterpreter(getPySparkTestProperties());
+    intpGroup.get("note").add(pySparkInterpreter);
+    pySparkInterpreter.setInterpreterGroup(intpGroup);
+    pySparkInterpreter.open();
 
     context = new InterpreterContext("note", "id", null, "title", "text",
       new AuthenticationInfo(),
@@ -108,6 +101,12 @@ public class PySparkInterpreterTest {
       new InterpreterOutput(null));
   }
 
+  @AfterClass
+  public static void tearDown() {
+    pySparkInterpreter.close();
+    sparkInterpreter.close();
+  }
+
   @Test
   public void testBasicIntp() {
     if (getSparkVersionNumber() > 11) {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index 1410890..8552e24 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -35,10 +35,7 @@ import org.apache.zeppelin.user.AuthenticationInfo;
 import org.apache.zeppelin.display.GUI;
 import org.apache.zeppelin.interpreter.*;
 import org.apache.zeppelin.interpreter.InterpreterResult.Code;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.TemporaryFolder;
 import org.junit.runners.MethodSorters;
 import org.slf4j.Logger;
@@ -47,19 +44,19 @@ import org.slf4j.LoggerFactory;
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class SparkInterpreterTest {
 
-  @Rule
-  public TemporaryFolder tmpDir = new TemporaryFolder();
+  @ClassRule
+  public static TemporaryFolder tmpDir = new TemporaryFolder();
 
-  public static SparkInterpreter repl;
-  public static InterpreterGroup intpGroup;
-  private InterpreterContext context;
-  public static Logger LOGGER = 
LoggerFactory.getLogger(SparkInterpreterTest.class);
+  static SparkInterpreter repl;
+  static InterpreterGroup intpGroup;
+  static InterpreterContext context;
+  static Logger LOGGER = LoggerFactory.getLogger(SparkInterpreterTest.class);
 
   /**
    * Get spark version number as a numerical value.
    * eg. 1.1.x => 11, 1.2.x => 12, 1.3.x => 13 ...
    */
-  public static int getSparkVersionNumber() {
+  public static int getSparkVersionNumber(SparkInterpreter repl) {
     if (repl == null) {
       return 0;
     }
@@ -81,16 +78,14 @@ public class SparkInterpreterTest {
     return p;
   }
 
-  @Before
-  public void setUp() throws Exception {
-    if (repl == null) {
-      intpGroup = new InterpreterGroup();
-      intpGroup.put("note", new LinkedList<Interpreter>());
-      repl = new SparkInterpreter(getSparkTestProperties(tmpDir));
-      repl.setInterpreterGroup(intpGroup);
-      intpGroup.get("note").add(repl);
-      repl.open();
-    }
+  @BeforeClass
+  public static void setUp() throws Exception {
+    intpGroup = new InterpreterGroup();
+    intpGroup.put("note", new LinkedList<Interpreter>());
+    repl = new SparkInterpreter(getSparkTestProperties(tmpDir));
+    repl.setInterpreterGroup(intpGroup);
+    intpGroup.get("note").add(repl);
+    repl.open();
 
     context = new InterpreterContext("note", "id", null, "title", "text",
         new AuthenticationInfo(),
@@ -102,6 +97,11 @@ public class SparkInterpreterTest {
         new InterpreterOutput(null));
   }
 
+  @AfterClass
+  public static void tearDown() {
+    repl.close();
+  }
+
   @Test
   public void testBasicIntp() {
     assertEquals(InterpreterResult.Code.SUCCESS,
@@ -150,7 +150,7 @@ public class SparkInterpreterTest {
 
   @Test
   public void testCreateDataFrame() {
-    if (getSparkVersionNumber() >= 13) {
+    if (getSparkVersionNumber(repl) >= 13) {
       repl.interpret("case class Person(name:String, age:Int)\n", context);
       repl.interpret("val people = sc.parallelize(Seq(Person(\"moon\", 33), 
Person(\"jobs\", 51), Person(\"gates\", 51), Person(\"park\", 34)))\n", 
context);
       repl.interpret("people.toDF.count", context);
@@ -166,7 +166,7 @@ public class SparkInterpreterTest {
     String code = "";
     repl.interpret("case class Person(name:String, age:Int)\n", context);
     repl.interpret("val people = sc.parallelize(Seq(Person(\"moon\", 33), 
Person(\"jobs\", 51), Person(\"gates\", 51), Person(\"park\", 34)))\n", 
context);
-    if (getSparkVersionNumber() < 13) {
+    if (getSparkVersionNumber(repl) < 13) {
       repl.interpret("people.registerTempTable(\"people\")", context);
       code = "z.show(sqlc.sql(\"select * from people\"))";
     } else {
@@ -182,7 +182,8 @@ public class SparkInterpreterTest {
     assertEquals(Code.SUCCESS, repl.interpret("people.take(3)", 
context).code());
 
 
-    if (getSparkVersionNumber() <= 11) { // spark 1.2 or later does not allow 
create multiple SparkContext in the same jvm by default.
+    if (getSparkVersionNumber(repl) <= 11) { // spark 1.2 or later does not 
allow create multiple
+      // SparkContext in the same jvm by default.
       // create new interpreter
       SparkInterpreter repl2 = new 
SparkInterpreter(getSparkTestProperties(tmpDir));
       repl2.setInterpreterGroup(intpGroup);
@@ -235,7 +236,7 @@ public class SparkInterpreterTest {
 
   @Test
   public void testEnableImplicitImport() throws IOException {
-    if (getSparkVersionNumber() >= 13) {
+    if (getSparkVersionNumber(repl) >= 13) {
       // Set option of importing implicits to "true", and initialize new Spark 
repl
       Properties p = getSparkTestProperties(tmpDir);
       p.setProperty("zeppelin.spark.importImplicit", "true");
@@ -252,7 +253,7 @@ public class SparkInterpreterTest {
 
   @Test
   public void testDisableImplicitImport() throws IOException {
-    if (getSparkVersionNumber() >= 13) {
+    if (getSparkVersionNumber(repl) >= 13) {
       // Set option of importing implicits to "false", and initialize new 
Spark repl
       // this test should return error status when creating DataFrame from 
sequence
       Properties p = getSparkTestProperties(tmpDir);

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
index 89cd712..5984645 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
@@ -27,9 +27,7 @@ import org.apache.zeppelin.user.AuthenticationInfo;
 import org.apache.zeppelin.display.GUI;
 import org.apache.zeppelin.interpreter.*;
 import org.apache.zeppelin.interpreter.InterpreterResult.Type;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.TemporaryFolder;
 
 import static org.junit.Assert.assertEquals;
@@ -37,45 +35,38 @@ import static org.junit.Assert.assertTrue;
 
 public class SparkSqlInterpreterTest {
 
-  @Rule
-  public TemporaryFolder tmpDir = new TemporaryFolder();
+  @ClassRule
+  public static TemporaryFolder tmpDir = new TemporaryFolder();
 
-  private SparkSqlInterpreter sql;
-  private SparkInterpreter repl;
-  private InterpreterContext context;
-  private InterpreterGroup intpGroup;
+  static SparkSqlInterpreter sql;
+  static SparkInterpreter repl;
+  static InterpreterContext context;
+  static InterpreterGroup intpGroup;
 
-  @Before
-  public void setUp() throws Exception {
+  @BeforeClass
+  public static void setUp() throws Exception {
     Properties p = new Properties();
     p.putAll(SparkInterpreterTest.getSparkTestProperties(tmpDir));
     p.setProperty("zeppelin.spark.maxResult", "1000");
     p.setProperty("zeppelin.spark.concurrentSQL", "false");
     p.setProperty("zeppelin.spark.sql.stacktrace", "false");
 
-    if (repl == null) {
-
-      if (SparkInterpreterTest.repl == null) {
-        repl = new SparkInterpreter(p);
-        intpGroup = new InterpreterGroup();
-        repl.setInterpreterGroup(intpGroup);
-        repl.open();
-        SparkInterpreterTest.repl = repl;
-        SparkInterpreterTest.intpGroup = intpGroup;
-      } else {
-        repl = SparkInterpreterTest.repl;
-        intpGroup = SparkInterpreterTest.intpGroup;
-      }
-
-      sql = new SparkSqlInterpreter(p);
-
-      intpGroup = new InterpreterGroup();
-      intpGroup.put("note", new LinkedList<Interpreter>());
-      intpGroup.get("note").add(repl);
-      intpGroup.get("note").add(sql);
-      sql.setInterpreterGroup(intpGroup);
-      sql.open();
-    }
+    repl = new SparkInterpreter(p);
+    intpGroup = new InterpreterGroup();
+    repl.setInterpreterGroup(intpGroup);
+    repl.open();
+    SparkInterpreterTest.repl = repl;
+    SparkInterpreterTest.intpGroup = intpGroup;
+
+    sql = new SparkSqlInterpreter(p);
+
+    intpGroup = new InterpreterGroup();
+    intpGroup.put("note", new LinkedList<Interpreter>());
+    intpGroup.get("note").add(repl);
+    intpGroup.get("note").add(sql);
+    sql.setInterpreterGroup(intpGroup);
+    sql.open();
+
     context = new InterpreterContext("note", "id", null, "title", "text", new 
AuthenticationInfo(),
         new HashMap<String, Object>(), new GUI(),
         new AngularObjectRegistry(intpGroup.getId(), null),
@@ -83,8 +74,14 @@ public class SparkSqlInterpreterTest {
         new LinkedList<InterpreterContextRunner>(), new 
InterpreterOutput(null));
   }
 
+  @AfterClass
+  public static void tearDown() {
+    sql.close();
+    repl.close();
+  }
+
   boolean isDataFrameSupported() {
-    return SparkInterpreterTest.getSparkVersionNumber() >= 13;
+    return SparkInterpreterTest.getSparkVersionNumber(repl) >= 13;
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
index 19e40bc..7ea2774 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -31,6 +31,7 @@ import java.util.regex.Pattern;
 import org.apache.commons.exec.CommandLine;
 import org.apache.commons.exec.DefaultExecutor;
 import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.httpclient.Header;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpMethodBase;
 import org.apache.commons.httpclient.cookie.CookiePolicy;
@@ -127,6 +128,11 @@ public abstract class AbstractTestRestApi {
     if (!wasRunning) {
       
System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), 
"../");
       
System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_WAR.getVarName(), 
"../zeppelin-web/dist");
+
+      // some test profile does not build zeppelin-web.
+      // to prevent zeppelin starting up fail, create zeppelin-web/dist 
directory
+      new File("../zeppelin-web/dist").mkdirs();
+
       LOG.info("Staring test Zeppelin up...");
       ZeppelinConfiguration conf = ZeppelinConfiguration.create();
 
@@ -328,7 +334,7 @@ public abstract class AbstractTestRestApi {
     GetMethod request = null;
     boolean isRunning = true;
     try {
-      request = httpGet("/");
+      request = httpGet("/version");
       isRunning = request.getStatusCode() == 200;
     } catch (IOException e) {
       LOG.error("AbstractTestRestApi.checkIfServerIsRunning() fails .. 
ZeppelinServer is not running");
@@ -422,8 +428,14 @@ public abstract class AbstractTestRestApi {
     httpClient.executeMethod(postMethod);
     LOG.info("{} - {}", postMethod.getStatusCode(), 
postMethod.getStatusText());
     Pattern pattern = Pattern.compile("JSESSIONID=([a-zA-Z0-9-]*)");
-    java.util.regex.Matcher matcher = 
pattern.matcher(postMethod.getResponseHeaders("Set-Cookie")[0].toString());
-    return matcher.find()? matcher.group(1) : StringUtils.EMPTY;
+    Header[] setCookieHeaders = postMethod.getResponseHeaders("Set-Cookie");
+    for (Header setCookie : setCookieHeaders) {
+      java.util.regex.Matcher matcher = pattern.matcher(setCookie.toString());
+      if (matcher.find()) {
+        return matcher.group(1);
+      }
+    }
+    return StringUtils.EMPTY;
   }
 
   protected static boolean userAndPasswordAreNotBlank(String user, String pwd) 
{

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java
index b56763a..bc38f74 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java
@@ -40,7 +40,7 @@ public class SecurityRestApiTest extends AbstractTestRestApi {
 
   @BeforeClass
   public static void init() throws Exception {
-    AbstractTestRestApi.startUpWithAuthenticationEnable();;
+    AbstractTestRestApi.startUpWithAuthenticationEnable();
   }
 
   @AfterClass
@@ -50,21 +50,21 @@ public class SecurityRestApiTest extends 
AbstractTestRestApi {
 
   @Test
   public void testTicket() throws IOException {
-    GetMethod get = httpGet("/security/ticket");
+    GetMethod get = httpGet("/security/ticket", "admin", "password1");
     get.addRequestHeader("Origin", "http://localhost";);
     Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(),
         new TypeToken<Map<String, Object>>(){}.getType());
     Map<String, String> body = (Map<String, String>) resp.get("body");
     collector.checkThat("Paramater principal", body.get("principal"),
-        CoreMatchers.equalTo("anonymous"));
+        CoreMatchers.equalTo("admin"));
     collector.checkThat("Paramater ticket", body.get("ticket"),
-        CoreMatchers.equalTo("anonymous"));
+        CoreMatchers.not("anonymous"));
     get.releaseConnection();
   }
 
   @Test
   public void testGetUserList() throws IOException {
-    GetMethod get = httpGet("/security/userlist/admi");
+    GetMethod get = httpGet("/security/userlist/admi", "admin", "password1");
     get.addRequestHeader("Origin", "http://localhost";);
     Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(),
         new TypeToken<Map<String, Object>>(){}.getType());
@@ -75,7 +75,7 @@ public class SecurityRestApiTest extends AbstractTestRestApi {
         CoreMatchers.equalTo(true));
     get.releaseConnection();
 
-    GetMethod notUser = httpGet("/security/userlist/randomString");
+    GetMethod notUser = httpGet("/security/userlist/randomString", "admin", 
"password1");
     notUser.addRequestHeader("Origin", "http://localhost";);
     Map<String, Object> notUserResp = 
gson.fromJson(notUser.getResponseBodyAsString(),
         new TypeToken<Map<String, Object>>(){}.getType());
@@ -85,6 +85,5 @@ public class SecurityRestApiTest extends AbstractTestRestApi {
 
     notUser.releaseConnection();
   }
-
 }
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
 
b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
index bd7d664..3e20d80 100644
--- 
a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
+++ 
b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
@@ -140,8 +140,8 @@ public class InterpreterSetting {
       key = SHARED_PROCESS;
     }
 
-    logger.debug("getInterpreterProcessKey: {} for InterpreterSetting Id: {}, 
Name: {}",
-        key, getId(), getName());
+    //logger.debug("getInterpreterProcessKey: {} for InterpreterSetting Id: 
{}, Name: {}",
+    //    key, getId(), getName());
     return key;
   }
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/zeppelin-zengine/src/main/resources/helium/webpack.config.js
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/resources/helium/webpack.config.js 
b/zeppelin-zengine/src/main/resources/helium/webpack.config.js
index 69592ae..ded2d4e 100644
--- a/zeppelin-zengine/src/main/resources/helium/webpack.config.js
+++ b/zeppelin-zengine/src/main/resources/helium/webpack.config.js
@@ -17,7 +17,7 @@
 
 module.exports = {
     entry: './load.js',
-    output: { path: './', filename: 'helium.bundle.js', },
+    output: { path: './', filename: 'vis.bundle.js', },
     module: {
         loaders: [{
             test: /\.js$/,

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/bfa812a9/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java 
b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
index 434dd5b..48a4e2e 100644
--- 
a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
+++ 
b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
@@ -1186,7 +1186,7 @@ public class NotebookTest implements JobListenerFactory{
     assertEquals(notebookAuthorization.getOwners(notePublic.getId()).size(), 
1);
     assertEquals(notebookAuthorization.getReaders(notePublic.getId()).size(), 
0);
     assertEquals(notebookAuthorization.getWriters(notePublic.getId()).size(), 
0);
-    
+
     // case of private note
     System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_PUBLIC.getVarName(), 
"false");
     ZeppelinConfiguration conf2 = ZeppelinConfiguration.create();
@@ -1208,8 +1208,7 @@ public class NotebookTest implements JobListenerFactory{
     notes2 = notebook.getAllNotes(user2);
     assertEquals(notes1.size(), 2);
     assertEquals(notes2.size(), 1);
-    assertEquals(notes1.get(1).getId(), notePrivate.getId());
-    
+
     // user1 have all rights
     assertEquals(notebookAuthorization.getOwners(notePrivate.getId()).size(), 
1);
     assertEquals(notebookAuthorization.getReaders(notePrivate.getId()).size(), 
1);

Reply via email to