This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new bf3a0ee  [ZEPPELIN-4396]. HvPlot is broken
bf3a0ee is described below

commit bf3a0eec28497499e7cca5bf100df7f4c1bebea2
Author: Jeff Zhang <zjf...@apache.org>
AuthorDate: Mon Oct 28 16:57:42 2019 +0800

    [ZEPPELIN-4396]. HvPlot is broken
    
    ### What is this PR for?
    
    HvPlot is broken due to bug in `ipython_server.py`. IPython may produce 
multiple types of output for one single python object. And the orders of these 
output matters, we should output html type over plain text type.
    
    ### What type of PR is it?
    [Bug Fix]
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
    * https://issues.apache.org/jira/browse/ZEPPELIN-4396
    
    ### How should this be tested?
    * CI pass
    
    ### Screenshots (if appropriate)
    
![image](https://user-images.githubusercontent.com/164491/68451302-325b9980-0229-11ea-825f-0093a558fd1a.png)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Jeff Zhang <zjf...@apache.org>
    
    Closes #3515 from zjffdu/ZEPPELIN-4396 and squashes the following commits:
    
    f71dbf231 [Jeff Zhang] [ZEPPELIN-4396]. HvPlot is broken
---
 .travis.yml                                        |  14 +--
 .../apache/zeppelin/python/IPythonInterpreter.java |   3 +-
 .../main/resources/grpc/python/ipython_server.py   |  19 ++--
 .../zeppelin/python/BasePythonInterpreterTest.java |   4 +-
 .../zeppelin/python/IPythonInterpreterTest.java    | 119 +++++++++++++++------
 python/src/test/resources/log4j.properties         |   5 +-
 .../src/test/resources/log4j.properties            |   6 +-
 testing/install_external_dependencies.sh           |  12 ++-
 .../src/test/resources/log4j.properties            |   3 -
 9 files changed, 118 insertions(+), 67 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 0cdd114..95fe8dd 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -98,7 +98,7 @@ matrix:
     # Test interpreter modules
     - jdk: "openjdk8"
       dist: xenial
-      env: PYTHON="3" SPARKR="true" SCALA_VER="2.10" TENSORFLOW="1.0.0" 
PROFILE="-Pscala-2.10" BUILD_FLAG="install -DskipTests -DskipRat -am" 
TEST_FLAG="test -DskipRat" MODULES="-pl $(echo 
.,zeppelin-interpreter,zeppelin-interpreter-api,${INTERPRETERS} | sed 
's/!//g')" TEST_PROJECTS=""
+      env: PYTHON="3" SPARKR="true" SCALA_VER="2.10" TENSORFLOW="1.13.1" 
PROFILE="-Pscala-2.10" BUILD_FLAG="install -DskipTests -DskipRat -am" 
TEST_FLAG="test -DskipRat" MODULES="-pl $(echo 
.,zeppelin-interpreter,zeppelin-interpreter-api,${INTERPRETERS} | sed 
's/!//g')" TEST_PROJECTS=""
 
     # Run Spark integration test and unit test separately for each spark 
version
 
@@ -106,19 +106,19 @@ matrix:
     - sudo: required
       jdk: "openjdk8"
       dist: xenial
-      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" 
PROFILE="-Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,JdbcIntegrationTest,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" 
PROFILE="-Pspark-2.4 -Pspark-scala-2.11 -Phadoop2 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,JdbcIntegrationTest,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
 
     # ZeppelinSparkClusterTest24, SparkIntegrationTest24, JdbcIntegrationTest, 
Unit test of Spark 2.4 (Scala-2.12)
     - sudo: required
       jdk: "openjdk8"
       dist: xenial
-      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.12" 
PROFILE="-Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,JdbcIntegrationTest,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.12" 
PROFILE="-Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,jdbc,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest24,SparkIntegrationTest24,JdbcIntegrationTest,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
 
     # ZeppelinSparkClusterTest23, SparkIntegrationTest23, Unit test of Spark 
2.3 (Scala-2.11) and Unit test PythonInterpreter under python2
     - sudo: required
       jdk: "openjdk8"
       dist: xenial
-      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.11" 
PROFILE="-Pspark-2.3 -Pspark-scala-2.11 -Phadoop2 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest23,SparkIntegrationTest23,org.apache.zeppelin.spark.*,apache.zeppelin.python.*
 -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.11" 
PROFILE="-Pspark-2.3 -Pspark-scala-2.11 -Phadoop2 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest23,SparkIntegrationTest23,org.apache.zeppelin.spark.*,apache.zeppelin.python.*
 -DfailIfNoTests=false"
 
     # ZeppelinSparkClusterTest22, SparkIntegrationTest22, Unit test of Spark 
2.2 (Scala-2.10) and Unit test PythonInterpreter under python3
     - sudo: required
@@ -132,17 +132,17 @@ matrix:
       dist: xenial
       env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" 
PROFILE="-Pspark-2.1 -Phadoop2 -Pspark-scala-2.10 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest21,SparkIntegrationTest21,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
 
-    # ZeppelinSparkClusterTest20, SparkIntegrationTest20, Unit test of Spark 
2.0  (Scala-2.10)
+    # ZeppelinSparkClusterTest20, SparkIntegrationTest20, Unit test of Spark 
2.0  (Scala-2.10), Use python 3.5 because spark 2.0 doesn't support python 3.6 +
     - sudo: required
       jdk: "openjdk8"
       dist: xenial
-      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" 
PROFILE="-Pspark-2.0 -Phadoop2 -Pspark-scala-2.10 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest20,SparkIntegrationTest20,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.10" 
PROFILE="-Pspark-2.0 -Phadoop2 -Pspark-scala-2.10 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest20,SparkIntegrationTest20,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
 
     # ZeppelinSparkClusterTest16, SparkIntegrationTest16, Unit test of Spark 
1.6  (Scala-2.10)
     - sudo: required
       jdk: "openjdk8"
       dist: xenial
-      env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" 
PROFILE="-Pspark-1.6 -Phadoop2 -Pspark-scala-2.10 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest16,SparkIntegrationTest16,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
+      env: BUILD_PLUGINS="true" PYTHON="2" SCALA_VER="2.10" 
PROFILE="-Pspark-1.6 -Phadoop2 -Pspark-scala-2.10 -Pintegration" SPARKR="true" 
BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" 
MODULES="-pl 
zeppelin-interpreter-integration,zeppelin-web,spark/spark-dependencies" 
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest16,SparkIntegrationTest16,org.apache.zeppelin.spark.*
 -DfailIfNoTests=false"
 
     # Test python/pyspark with python 2, livy 0.5
     - sudo: required
diff --git 
a/python/src/main/java/org/apache/zeppelin/python/IPythonInterpreter.java 
b/python/src/main/java/org/apache/zeppelin/python/IPythonInterpreter.java
index 9a5cb11..674d096 100644
--- a/python/src/main/java/org/apache/zeppelin/python/IPythonInterpreter.java
+++ b/python/src/main/java/org/apache/zeppelin/python/IPythonInterpreter.java
@@ -144,7 +144,8 @@ public class IPythonInterpreter extends Interpreter {
       launchIPythonKernel(ipythonPort);
       setupJVMGateway(jvmGatewayPort);
     } catch (Exception e) {
-      throw new InterpreterException("Fail to open IPythonInterpreter", e);
+      throw new InterpreterException("Fail to open IPythonInterpreter\n" +
+              ExceptionUtils.getStackTrace(e), e);
     }
   }
 
diff --git a/python/src/main/resources/grpc/python/ipython_server.py 
b/python/src/main/resources/grpc/python/ipython_server.py
index 31d7527..dbc3afd 100644
--- a/python/src/main/resources/grpc/python/ipython_server.py
+++ b/python/src/main/resources/grpc/python/ipython_server.py
@@ -59,32 +59,33 @@ class IPython(ipython_pb2_grpc.IPythonServicer):
             msg_type = msg['header']['msg_type']
             content = msg['content']
             print("******************* CONTENT ******************")
-            print(str(content)[:400])
             outStatus, outType, output = ipython_pb2.SUCCESS, None, None
             # prepare the reply
             if msg_type == 'stream':
                 outType = ipython_pb2.TEXT
                 output = content['text']
             elif msg_type in ('display_data', 'execute_result'):
-                if 'image/jpeg' in content['data']:
+                print(content['data'])
+                # The if-else order matters, can not be changed. Because 
ipython may provide multiple output.
+                # TEXT is the last resort type.
+                if 'text/html' in content['data']:
+                    outType = ipython_pb2.HTML
+                    output = content['data']['text/html']
+                elif 'image/jpeg' in content['data']:
                     outType = ipython_pb2.JPEG
                     output = content['data']['image/jpeg']
                 elif 'image/png' in content['data']:
                     outType = ipython_pb2.PNG
                     output = content['data']['image/png']
-                elif 'text/plain' in content['data']:
-                    outType = ipython_pb2.TEXT
-                    output = content['data']['text/plain']
-                elif 'text/html' in content['data']:
-                    outType = ipython_pb2.HTML
-                    output = content['data']['text/html']
                 elif 'application/javascript' in content['data']:
                     outType = ipython_pb2.HTML
                     output = '<script> ' + 
content['data']['application/javascript'] + ' </script>\n'
-                    print('add to html output: ' + str(content)[:100])
                 elif 'application/vnd.holoviews_load.v0+json' in 
content['data']:
                     outType = ipython_pb2.HTML
                     output = '<script> ' + 
content['data']['application/vnd.holoviews_load.v0+json'] + ' </script>\n'
+                elif 'text/plain' in content['data']:
+                    outType = ipython_pb2.TEXT
+                    output = content['data']['text/plain']
             elif msg_type == 'error':
                 outStatus = ipython_pb2.ERROR
                 outType = ipython_pb2.TEXT
diff --git 
a/python/src/test/java/org/apache/zeppelin/python/BasePythonInterpreterTest.java
 
b/python/src/test/java/org/apache/zeppelin/python/BasePythonInterpreterTest.java
index 7eece35..51a3f31 100644
--- 
a/python/src/test/java/org/apache/zeppelin/python/BasePythonInterpreterTest.java
+++ 
b/python/src/test/java/org/apache/zeppelin/python/BasePythonInterpreterTest.java
@@ -46,6 +46,7 @@ public abstract class BasePythonInterpreterTest extends 
ConcurrentTestCase {
 
   protected InterpreterGroup intpGroup;
   protected Interpreter interpreter;
+  protected boolean isPython2;
 
   @Before
   public abstract void setUp() throws InterpreterException;
@@ -301,7 +302,8 @@ public abstract class BasePythonInterpreterTest extends 
ConcurrentTestCase {
     context = getInterpreterContext();
     result = interpreter.interpret("import pandas as pd\n" +
         "df = pd.DataFrame({'id':[1,2,3], 'name':['a','b','c']})\nz.show(df)", 
context);
-    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(context.out.toInterpreterResultMessage().toString(),
+            InterpreterResult.Code.SUCCESS, result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
     assertEquals(1, interpreterResultMessages.size());
     assertEquals(InterpreterResult.Type.TABLE, 
interpreterResultMessages.get(0).getType());
diff --git 
a/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java 
b/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java
index 4268ebb..99579b0 100644
--- 
a/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java
+++ 
b/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java
@@ -70,6 +70,21 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
   public void setUp() throws InterpreterException {
     Properties properties = initIntpProperties();
     startInterpreter(properties);
+
+    InterpreterContext context = getInterpreterContext();
+    InterpreterResult result = interpreter.interpret("import 
sys\nsys.version_info.major", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    try {
+      List<InterpreterResultMessage> messages = 
context.out.toInterpreterResultMessage();
+      if (messages.get(0).getData().equals("2")) {
+        isPython2 = true;
+      } else {
+        isPython2 = false;
+      }
+    } catch (IOException e) {
+      throw new InterpreterException(e);
+    }
+
   }
 
   @Override
@@ -141,7 +156,6 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
     // ipython help
     InterpreterContext context = getInterpreterContext();
     InterpreterResult result = interpreter.interpret("range?", context);
-    Thread.sleep(100);
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
     List<InterpreterResultMessage> interpreterResultMessages =
         context.out.toInterpreterResultMessage();
@@ -150,7 +164,6 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
     // timeit
     context = getInterpreterContext();
     result = interpreter.interpret("%timeit range(100)", context);
-    Thread.sleep(100);
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
     assertTrue(interpreterResultMessages.get(0).getData().contains("loops"));
@@ -173,7 +186,6 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
       }
     }.start();
     result = interpreter.interpret("import time\ntime.sleep(10)", context2);
-    Thread.sleep(100);
     assertEquals(InterpreterResult.Code.ERROR, result.code());
     interpreterResultMessages = context2.out.toInterpreterResultMessage();
     
assertTrue(interpreterResultMessages.get(0).getData().contains("KeyboardInterrupt"));
@@ -185,7 +197,6 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
     InterpreterContext context = getInterpreterContext();
     InterpreterResult result = interpreter.interpret("%matplotlib inline\n" +
         "import matplotlib.pyplot as 
plt\ndata=[1,1,2,3,4]\nplt.figure()\nplt.plot(data)", context);
-    Thread.sleep(100);
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
     List<InterpreterResultMessage> interpreterResultMessages =
         context.out.toInterpreterResultMessage();
@@ -212,14 +223,24 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
         "from bokeh.plotting import figure\n" +
         "import bkzep\n" +
         "output_notebook(notebook_type='zeppelin')", context);
-    Thread.sleep(100);
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
-    assertEquals(2, interpreterResultMessages.size());
-    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(0).getType());
-    assertTrue(interpreterResultMessages.get(0).getData().contains("Loading 
BokehJS"));
-    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
-    assertTrue(interpreterResultMessages.get(1).getData().contains("BokehJS is 
being loaded"));
+
+    if (interpreterResultMessages.size() == 3) {
+      // the first InterpreterResultMessage is empty text for python3 or spark 
1.6
+      assertEquals(3, interpreterResultMessages.size());
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
+      assertTrue(interpreterResultMessages.get(1).getData().contains("Loading 
BokehJS"));
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(2).getType());
+      assertTrue(interpreterResultMessages.get(2).getData().contains("BokehJS 
is being loaded"));
+    } else {
+      // the size of interpreterResultMessage is 3 in other cases
+      assertEquals(2, interpreterResultMessages.size());
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(0).getType());
+      assertTrue(interpreterResultMessages.get(0).getData().contains("Loading 
BokehJS"));
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
+      assertTrue(interpreterResultMessages.get(1).getData().contains("BokehJS 
is being loaded"));
+    }
 
     // bokeh plotting
     context = getInterpreterContext();
@@ -229,33 +250,61 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
         "p = figure(title=\"simple line example\", x_axis_label='x', 
y_axis_label='y')\n" +
         "p.line(x, y, legend=\"Temp.\", line_width=2)\n" +
         "show(p)", context);
-    Thread.sleep(100);
-    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    assertEquals(context.out.toInterpreterResultMessage().toString(),
+            InterpreterResult.Code.SUCCESS, result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
-    assertEquals(2, interpreterResultMessages.size());
-    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(0).getType());
-    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
-    // docs_json is the source data of plotting which bokeh would use to 
render the plotting.
-    
assertTrue(interpreterResultMessages.get(1).getData().contains("docs_json"));
+    if (interpreterResultMessages.size() == 3) {
+      // the first InterpreterResultMessage is empty text for python3 or spark 
1.6
+      assertEquals(3, interpreterResultMessages.size());
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(2).getType());
+      // docs_json is the source data of plotting which bokeh would use to 
render the plotting.
+      
assertTrue(interpreterResultMessages.get(2).getData().contains("docs_json"));
+    } else {
+      // the size of interpreterResultMessage is 3 in other cases
+      assertEquals(2, interpreterResultMessages.size());
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(0).getType());
+      assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
+      // docs_json is the source data of plotting which bokeh would use to 
render the plotting.
+      
assertTrue(interpreterResultMessages.get(1).getData().contains("docs_json"));
+    }
 
+    // TODO(zjffdu) ggplot is broken https://github.com/yhat/ggpy/issues/662
     // ggplot
+    //    context = getInterpreterContext();
+    //    result = interpreter.interpret("from ggplot import *\n" +
+    //        "ggplot(diamonds, aes(x='price', fill='cut')) +\\\n" +
+    //        "    geom_density(alpha=0.25) +\\\n" +
+    //        "    facet_wrap(\"clarity\")", context);
+    //    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    //    interpreterResultMessages = context.out.toInterpreterResultMessage();
+    //    // the order of IMAGE and TEXT is not determined
+    //    // check there must be one IMAGE output
+    //    hasImageOutput = false;
+    //    for (InterpreterResultMessage msg : interpreterResultMessages) {
+    //      if (msg.getType() == InterpreterResult.Type.IMG) {
+    //        hasImageOutput = true;
+    //      }
+    //    }
+    //    assertTrue("No Image Output", hasImageOutput);
+
+    // hvplot
     context = getInterpreterContext();
-    result = interpreter.interpret("from ggplot import *\n" +
-        "ggplot(diamonds, aes(x='price', fill='cut')) +\\\n" +
-        "    geom_density(alpha=0.25) +\\\n" +
-        "    facet_wrap(\"clarity\")", context);
-    Thread.sleep(100);
+    result = interpreter.interpret(
+        "import pandas as pd, numpy as np\n" +
+        "idx = pd.date_range('1/1/2000', periods=1000)\n" +
+        "df = pd.DataFrame(np.random.randn(1000, 4), index=idx, 
columns=list('ABCD')).cumsum()\n" +
+        "import hvplot.pandas\n" +
+        "df.hvplot()", context);
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
     interpreterResultMessages = context.out.toInterpreterResultMessage();
-    // the order of IMAGE and TEXT is not determined
-    // check there must be one IMAGE output
-    hasImageOutput = false;
-    for (InterpreterResultMessage msg : interpreterResultMessages) {
-      if (msg.getType() == InterpreterResult.Type.IMG) {
-        hasImageOutput = true;
-      }
-    }
-    assertTrue("No Image Output", hasImageOutput);
+    assertEquals(5, interpreterResultMessages.size());
+    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(1).getType());
+    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(2).getType());
+    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(3).getType());
+    assertEquals(InterpreterResult.Type.HTML, 
interpreterResultMessages.get(4).getType());
+    // docs_json is the source data of plotting which bokeh would use to 
render the plotting.
+    
assertTrue(interpreterResultMessages.get(4).getData().contains("docs_json"));
   }
 
 
@@ -325,7 +374,7 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
 
     // We ensure that running and auto completion are not hanging.
     InterpreterResult res = interpretFuture.get(20000, TimeUnit.MILLISECONDS);
-    List<InterpreterCompletion> autoRes = completionFuture.get(1000, 
TimeUnit.MILLISECONDS);
+    List<InterpreterCompletion> autoRes = completionFuture.get(3000, 
TimeUnit.MILLISECONDS);
     assertTrue(res.code().name().equals("SUCCESS"));
     assertTrue(autoRes.size() > 0);
   }
@@ -335,7 +384,7 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
     tearDown();
 
     Properties properties = initIntpProperties();
-    properties.setProperty("zeppelin.ipython.grpc.message_size", "3000");
+    properties.setProperty("zeppelin.ipython.grpc.message_size", "4000");
 
     startInterpreter(properties);
 
@@ -345,12 +394,12 @@ public class IPythonInterpreterTest extends 
BasePythonInterpreterTest {
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
 
     InterpreterContext context = getInterpreterContext();
-    result = interpreter.interpret("print('1'*3000)", context);
+    result = interpreter.interpret("print('1'*4000)", context);
     assertEquals(InterpreterResult.Code.ERROR, result.code());
     List<InterpreterResultMessage> interpreterResultMessages =
         context.out.toInterpreterResultMessage();
     assertEquals(1, interpreterResultMessages.size());
-    assertTrue(interpreterResultMessages.get(0).getData().contains("exceeds 
maximum size 3000"));
+    assertTrue(interpreterResultMessages.get(0).getData().contains("exceeds 
maximum size 4000"));
 
     // next call continue work
     result = interpreter.interpret("print(1)", context);
diff --git a/python/src/test/resources/log4j.properties 
b/python/src/test/resources/log4j.properties
index 2b9a7c6..6be1af3 100644
--- a/python/src/test/resources/log4j.properties
+++ b/python/src/test/resources/log4j.properties
@@ -23,7 +23,4 @@ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
 log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n
 
-
-log4j.logger.org.apache.zeppelin.python=DEBUG
-log4j.logger.org.apache.zeppelin.interpreter.util=DEBUG
-
+# log4j.logger.org.apache.zeppelin.python=DEBUG
diff --git a/spark/interpreter/src/test/resources/log4j.properties 
b/spark/interpreter/src/test/resources/log4j.properties
index 987a08c..8c89e1e 100644
--- a/spark/interpreter/src/test/resources/log4j.properties
+++ b/spark/interpreter/src/test/resources/log4j.properties
@@ -44,9 +44,7 @@ log4j.logger.DataNucleus.Datastore=ERROR
 log4j.logger.org.hibernate.type=ALL
 
 log4j.logger.org.apache.zeppelin.interpreter=WARN
-log4j.logger.org.apache.zeppelin.spark=DEBUG
-
-log4j.logger.org.apache.zeppelin.python=DEBUG
-log4j.logger.org.apache.zeppelin.interpreter.util=DEBUG
+#log4j.logger.org.apache.zeppelin.spark=DEBUG
+#log4j.logger.org.apache.zeppelin.python=DEBUG
 log4j.logger.org.apache.spark.repl.Main=WARN
 
diff --git a/testing/install_external_dependencies.sh 
b/testing/install_external_dependencies.sh
index 47ab9e7..944efaa 100755
--- a/testing/install_external_dependencies.sh
+++ b/testing/install_external_dependencies.sh
@@ -36,7 +36,8 @@ fi
 
 # Install Python dependencies for Python specific tests
 if [[ -n "$PYTHON" ]] ; then
-  wget 
https://repo.continuum.io/miniconda/Miniconda${PYTHON}-4.2.12-Linux-x86_64.sh 
-O miniconda.sh
+  wget 
https://repo.continuum.io/miniconda/Miniconda${PYTHON}-4.6.14-Linux-x86_64.sh 
-O miniconda.sh
+
   bash miniconda.sh -b -p $HOME/miniconda
   echo "export PATH='$HOME/miniconda/bin:$PATH'" >> ~/.environ
   source ~/.environ
@@ -47,8 +48,13 @@ if [[ -n "$PYTHON" ]] ; then
   conda info -a
   conda config --add channels conda-forge
 
-  conda install -q numpy=1.13.3 pandas=0.21.1 matplotlib=2.1.1 pandasql=0.7.3 
ipython=5.4.1 jupyter_client=5.1.0 ipykernel=4.7.0 bokeh=0.12.10
-  pip install -q scipy==0.18.0 ggplot==0.11.5 grpcio==1.8.2 bkzep==0.4.0
+  if [[ $PYTHON == "2" ]] ; then
+    pip install -q numpy==1.14.5 pandas==0.21.1 matplotlib==2.1.1 scipy==1.2.1 
grpcio==1.19.0 bkzep==0.6.1 hvplot==0.5.2 \
+    protobuf==3.7.0 pandasql==0.7.3 ipython==5.8.0 ipykernel==4.10.0 
bokeh==1.3.4
+  else
+    pip install -q numpy==1.17.3 pandas==0.25.0 scipy==1.3.1 grpcio==1.19.0 
bkzep==0.6.1 hvplot==0.5.2 protobuf==3.6.1 \
+    pandasql==0.7.3 ipython==7.8.0 matplotlib==3.0.3 ipykernel==5.1.2 
jupyter_client==5.3.4 bokeh==1.3.4
+  fi
 
   if [[ -n "$TENSORFLOW" ]] ; then
     check_results=`conda search -c conda-forge tensorflow`
diff --git 
a/zeppelin-interpreter-integration/src/test/resources/log4j.properties 
b/zeppelin-interpreter-integration/src/test/resources/log4j.properties
index 95c474c..773d0af 100644
--- a/zeppelin-interpreter-integration/src/test/resources/log4j.properties
+++ b/zeppelin-interpreter-integration/src/test/resources/log4j.properties
@@ -42,6 +42,3 @@ log4j.logger.DataNucleus.Datastore=ERROR
 # Log all JDBC parameters
 log4j.logger.org.hibernate.type=ALL
 log4j.logger.org.apache.hadoop=WARN
-
-log4j.logger.org.apache.zeppelin.interpreter=DEBUG
-log4j.logger.org.apache.zeppelin.util=DEBUG

Reply via email to