This is an automated email from the ASF dual-hosted git repository. zjffdu pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/master by this push: new 150c7a4 [minor] Add more logging for spark interpreter 150c7a4 is described below commit 150c7a4907a48240df87238654ce2829f3999438 Author: Jeff Zhang <zjf...@apache.org> AuthorDate: Tue Jan 7 17:57:44 2020 +0800 [minor] Add more logging for spark interpreter (cherry picked from commit 46322ccc902aed2397774a57efa7f9efcff793d9) --- .../zeppelin/spark/PySparkInterpreterTest.java | 2 -- .../zeppelin/spark/SparkInterpreterTest.java | 6 ---- .../zeppelin/spark/SparkRInterpreterTest.java | 1 - .../zeppelin/spark/BaseSparkScalaInterpreter.scala | 42 +++++++--------------- .../integration/ZeppelinSparkClusterTest.java | 2 -- 5 files changed, 12 insertions(+), 41 deletions(-) diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java index 2445cce..7e48666 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java @@ -54,7 +54,6 @@ public class PySparkInterpreterTest extends PythonInterpreterTest { properties.setProperty("zeppelin.pyspark.python", "python"); properties.setProperty("zeppelin.dep.localrepo", Files.createTempDir().getAbsolutePath()); properties.setProperty("zeppelin.pyspark.useIPython", "false"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1"); properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); @@ -109,7 +108,6 @@ public class PySparkInterpreterTest extends PythonInterpreterTest { properties.setProperty("spark.pyspark.python", "invalid_python"); properties.setProperty("zeppelin.python.useIPython", "false"); properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("zeppelin.spark.maxResult", "3"); interpreter = new LazyOpenInterpreter(new PySparkInterpreter(properties)); diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java index 1feb002..b453b13 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java @@ -72,7 +72,6 @@ public class SparkInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("zeppelin.spark.uiWebUrl", "fake_spark_weburl"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); @@ -388,7 +387,6 @@ public class SparkInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("zeppelin.spark.printREPLOutput", "false"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); @@ -416,7 +414,6 @@ public class SparkInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("spark.scheduler.mode", "FAIR"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); @@ -446,7 +443,6 @@ public class SparkInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("spark.ui.enabled", "false"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); @@ -472,7 +468,6 @@ public class SparkInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("zeppelin.spark.ui.hidden", "true"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); @@ -497,7 +492,6 @@ public class SparkInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); // disable color output for easy testing properties.setProperty("zeppelin.spark.scala.color", "false"); properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java index 6584391..011812e 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java @@ -54,7 +54,6 @@ public class SparkRInterpreterTest { properties.setProperty("spark.master", "local"); properties.setProperty("spark.app.name", "test"); properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.spark.test", "true"); properties.setProperty("zeppelin.R.knitr", "true"); properties.setProperty("spark.r.backendConnectionTimeout", "10"); properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); diff --git a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala b/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala index 8750011..e62fc96 100644 --- a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala +++ b/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala @@ -52,8 +52,6 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf, protected lazy val LOGGER: Logger = LoggerFactory.getLogger(getClass) - private val isTest = conf.getBoolean("zeppelin.spark.test", false) - protected var sc: SparkContext = _ protected var sqlContext: SQLContext = _ @@ -197,9 +195,7 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf, private def spark1CreateContext(): Unit = { this.sc = SparkContext.getOrCreate(conf) - if (!isTest) { - interpreterOutput.write("Created SparkContext.\n".getBytes()) - } + LOGGER.info("Created SparkContext") getUserFiles().foreach(file => sc.addFile(file)) sc.getClass.getMethod("ui").invoke(sc).asInstanceOf[Option[_]] match { @@ -214,19 +210,13 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf, if (hiveEnabled && hiveSiteExisted) { sqlContext = Class.forName("org.apache.spark.sql.hive.HiveContext") .getConstructor(classOf[SparkContext]).newInstance(sc).asInstanceOf[SQLContext] - if (!isTest) { - interpreterOutput.write("Created sql context (with Hive support).\n".getBytes()) - } + LOGGER.info("Created sql context (with Hive support)") } else { - if (hiveEnabled && !hiveSiteExisted && !isTest) { - interpreterOutput.write(("spark.useHiveContext is set as true but no hive-site.xml" + - " is found in classpath, so zeppelin will fallback to SQLContext.\n").getBytes()) - } + LOGGER.warn("spark.useHiveContext is set as true but no hive-site.xml" + + " is found in classpath, so zeppelin will fallback to SQLContext"); sqlContext = Class.forName("org.apache.spark.sql.SQLContext") .getConstructor(classOf[SparkContext]).newInstance(sc).asInstanceOf[SQLContext] - if (!isTest) { - interpreterOutput.write("Created sql context.\n".getBytes()) - } + LOGGER.info("Created sql context (without Hive support)") } bind("sc", "org.apache.spark.SparkContext", sc, List("""@transient""")) @@ -258,28 +248,20 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf, if (hiveSiteExisted && hiveClassesPresent) { builder.getClass.getMethod("enableHiveSupport").invoke(builder) sparkSession = builder.getClass.getMethod("getOrCreate").invoke(builder) - if (!isTest) { - interpreterOutput.write("Created Spark session (with Hive support).\n".getBytes()) - } + LOGGER.info("Created Spark session (with Hive support)"); } else { - if (!hiveClassesPresent && !isTest) { - interpreterOutput.write( - "Hive support can not be enabled because spark is not built with hive\n".getBytes) + if (!hiveClassesPresent) { + LOGGER.warn("Hive support can not be enabled because spark is not built with hive") } - if (!hiveSiteExisted && !isTest) { - interpreterOutput.write( - "Hive support can not be enabled because no hive-site.xml found\n".getBytes) + if (!hiveSiteExisted) { + LOGGER.warn("Hive support can not be enabled because no hive-site.xml found") } sparkSession = builder.getClass.getMethod("getOrCreate").invoke(builder) - if (!isTest) { - interpreterOutput.write("Created Spark session.\n".getBytes()) - } + LOGGER.info("Created Spark session (without Hive support)"); } } else { sparkSession = builder.getClass.getMethod("getOrCreate").invoke(builder) - if (!isTest) { - interpreterOutput.write("Created Spark session.\n".getBytes()) - } + LOGGER.info("Created Spark session (without Hive support)"); } sc = sparkSession.getClass.getMethod("sparkContext").invoke(sparkSession) diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java index 0cb6db8..81e476f 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java @@ -109,8 +109,6 @@ public abstract class ZeppelinSparkClusterTest extends AbstractTestRestApi { new InterpreterProperty("zeppelin.pyspark.useIPython", "false")); sparkProperties.put("zeppelin.spark.useNew", new InterpreterProperty("zeppelin.spark.useNew", "true")); - sparkProperties.put("zeppelin.spark.test", - new InterpreterProperty("zeppelin.spark.test", "true")); sparkProperties.put("spark.serializer", new InterpreterProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")); sparkProperties.put("zeppelin.spark.scala.color",