This is an automated email from the ASF dual-hosted git repository. zjffdu pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/master by this push: new 4d4d532 [ZEPPELIN-4848]. Switch for enabling Spark REPL output for each paragraph 4d4d532 is described below commit 4d4d532d3b5d2fae552e8adfe3ad8e187ed67314 Author: Jeff Zhang <zjf...@apache.org> AuthorDate: Fri Jun 5 22:43:22 2020 +0800 [ZEPPELIN-4848]. Switch for enabling Spark REPL output for each paragraph ### What is this PR for? This PR add paragraph local property `printREPLOutput` for spark interpreter, so that user can switch for enabling Spark REPL output for each paragraph, e.g. ``` %spark(printREPLOutput=false) System.out.println("hello ") println("hello world") ``` ### What type of PR is it? [ Feature ] ### Todos * [ ] - Task ### What is the Jira issue? * https://issues.apache.org/jira/browse/ZEPPELIN-4848 ### How should this be tested? * CI pass ### Screenshots (if appropriate) ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Jeff Zhang <zjf...@apache.org> Closes #3787 from zjffdu/ZEPPELIN-4848 and squashes the following commits: b6997d99b [Jeff Zhang] [ZEPPELIN-4848]. Switch for enabling Spark REPL output for each paragraph --- .../zeppelin/spark/SparkInterpreterTest.java | 44 ++++++++++++++++++++++ .../zeppelin/spark/BaseSparkScalaInterpreter.scala | 7 +++- 2 files changed, 50 insertions(+), 1 deletion(-) diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java index f3f9dec..711635f 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java @@ -419,6 +419,50 @@ public class SparkInterpreterTest { } @Test + public void testDisableReplOutputForParagraph() throws InterpreterException { + Properties properties = new Properties(); + properties.setProperty("spark.master", "local"); + properties.setProperty("spark.app.name", "test"); + properties.setProperty("zeppelin.spark.maxResult", "100"); + properties.setProperty("zeppelin.spark.printREPLOutput", "true"); + // disable color output for easy testing + properties.setProperty("zeppelin.spark.scala.color", "false"); + properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); + + InterpreterContext.set(getInterpreterContext()); + interpreter = new SparkInterpreter(properties); + interpreter.setInterpreterGroup(mock(InterpreterGroup.class)); + interpreter.open(); + + InterpreterResult result = interpreter.interpret("val a=\"hello world\"", getInterpreterContext()); + assertEquals(InterpreterResult.Code.SUCCESS, result.code()); + assertEquals("a: String = hello world\n", output); + + result = interpreter.interpret("print(a)", getInterpreterContext()); + assertEquals(InterpreterResult.Code.SUCCESS, result.code()); + // output from print statement will still be displayed + assertEquals("hello world", output); + + // disable REPL output + InterpreterContext context = getInterpreterContext(); + context.getLocalProperties().put("printREPLOutput", "false"); + result = interpreter.interpret("print(a)", context); + assertEquals(InterpreterResult.Code.SUCCESS, result.code()); + // output from print statement will disappear + assertEquals("", output); + + // REPL output get back if we don't set printREPLOutput in paragraph local properties + result = interpreter.interpret("val a=\"hello world\"", getInterpreterContext()); + assertEquals(InterpreterResult.Code.SUCCESS, result.code()); + assertEquals("a: String = hello world\n", output); + + result = interpreter.interpret("print(a)", getInterpreterContext()); + assertEquals(InterpreterResult.Code.SUCCESS, result.code()); + // output from print statement will still be displayed + assertEquals("hello world", output); + } + + @Test public void testSchedulePool() throws InterpreterException { Properties properties = new Properties(); properties.setProperty("spark.master", "local"); diff --git a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala b/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala index 0361c94..f05acb0 100644 --- a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala +++ b/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala @@ -103,11 +103,16 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf, def interpret(code: String, context: InterpreterContext): InterpreterResult = { val originalOut = System.out + val printREPLOutput = context.getStringLocalProperty("printREPLOutput", "true").toBoolean def _interpret(code: String): scala.tools.nsc.interpreter.Results.Result = { Console.withOut(interpreterOutput) { System.setOut(Console.out) - interpreterOutput.setInterpreterOutput(context.out) + if (printREPLOutput) { + interpreterOutput.setInterpreterOutput(context.out) + } else { + interpreterOutput.setInterpreterOutput(null) + } interpreterOutput.ignoreLeadingNewLinesFromScalaReporter() val status = scalaInterpret(code) match {