This is an automated email from the ASF dual-hosted git repository. zjffdu pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/master by this push: new d5d2658 [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime d5d2658 is described below commit d5d26583f723d9638d69020aab885afefab82c5c Author: huage1994 <guanhua...@foxmail.com> AuthorDate: Mon Nov 22 17:46:41 2021 +0800 [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime ### What is this PR for? Currently we detect scala version via scala.util.Properties.versionString(); but it depends on the resource file library.version on classpath, sometimes user may package this resource of scala-2.11 into his jar which cause we detect the wrong scala version. ### What type of PR is it? [Bug Fix] ### Todos * [ ] - Task ### What is the Jira issue? https://issues.apache.org/jira/browse/ZEPPELIN-5580 ### How should this be tested? CI pass ### Screenshots (if appropriate) ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: huage1994 <guanhua...@foxmail.com> Closes #4267 from huage1994/ZEPPELIN-5580 and squashes the following commits: 7945faa7e6 [huage1994] [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime --- .../apache/zeppelin/spark/SparkInterpreter.java | 25 ++++++++++++++++------ .../launcher/SparkInterpreterLauncher.java | 9 +++++++- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java index 16faf99..fef0998 100644 --- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java +++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java @@ -74,6 +74,7 @@ public class SparkInterpreter extends AbstractInterpreter { private Object sparkSession; private SparkVersion sparkVersion; + private String scalaVersion; private boolean enableSupportedVersionCheck; public SparkInterpreter(Properties properties) { @@ -149,7 +150,7 @@ public class SparkInterpreter extends AbstractInterpreter { * @throws Exception */ private AbstractSparkScalaInterpreter loadSparkScalaInterpreter(SparkConf conf) throws Exception { - String scalaVersion = extractScalaVersion(); + scalaVersion = extractScalaVersion(conf); ClassLoader scalaInterpreterClassLoader = Thread.currentThread().getContextClassLoader(); String zeppelinHome = System.getenv("ZEPPELIN_HOME"); @@ -252,12 +253,24 @@ public class SparkInterpreter extends AbstractInterpreter { return sparkVersion; } - private String extractScalaVersion() throws InterpreterException { - String scalaVersionString = scala.util.Properties.versionString(); + private String extractScalaVersion(SparkConf conf) throws InterpreterException { + // Use the scala version if SparkLauncher pass it by name of "zeppelin.spark.scala.version". + + // If not, detect scala version by resource file library.version on classpath. + // Library.version is sometimes inaccurate and it is mainly used for unit test. + String scalaVersionString; + if (conf.contains("zeppelin.spark.scala.version")) { + scalaVersionString = conf.get("zeppelin.spark.scala.version"); + } else { + scalaVersionString = scala.util.Properties.versionString(); + } LOGGER.info("Using Scala: " + scalaVersionString); - if (scalaVersionString.contains("version 2.11")) { + + if (StringUtils.isEmpty(scalaVersionString)) { + throw new InterpreterException("Scala Version is empty"); + } else if (scalaVersionString.contains("2.11")) { return "2.11"; - } else if (scalaVersionString.contains("version 2.12")) { + } else if (scalaVersionString.contains("2.12")) { return "2.12"; } else { throw new InterpreterException("Unsupported scala version: " + scalaVersionString); @@ -265,7 +278,7 @@ public class SparkInterpreter extends AbstractInterpreter { } public boolean isScala212() throws InterpreterException { - return extractScalaVersion().equals("2.12"); + return scalaVersion.equals("2.12"); } private List<String> getDependencyFiles() throws InterpreterException { diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java index 8f8eedc..239976d 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java @@ -123,6 +123,14 @@ public class SparkInterpreterLauncher extends StandardInterpreterLauncher { } + String scalaVersion = null; + try { + scalaVersion = detectSparkScalaVersion(getEnv("SPARK_HOME"), env); + context.getProperties().put("zeppelin.spark.scala.version", scalaVersion); + } catch (Exception e) { + throw new IOException("Fail to detect scala version, the reason is:"+ e.getMessage()); + } + if (isYarnMode() && getDeployMode().equals("cluster")) { try { @@ -138,7 +146,6 @@ public class SparkInterpreterLauncher extends StandardInterpreterLauncher { } } - String scalaVersion = detectSparkScalaVersion(getEnv("SPARK_HOME"), env); Path scalaFolder = Paths.get(zConf.getZeppelinHome(), "/interpreter/spark/scala-" + scalaVersion); if (!scalaFolder.toFile().exists()) { throw new IOException("spark scala folder " + scalaFolder.toFile() + " doesn't exist");