This is an automated email from the ASF dual-hosted git repository. xxyu pushed a commit to branch main in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/main by this push: new c29b8f5 KYLIN-5160 Fix driver extra classpath c29b8f5 is described below commit c29b8f5b5ffa4d33ea2120a4efe261a6fbab1b01 Author: XiaoxiangYu <x...@apache.org> AuthorDate: Tue Feb 15 18:06:27 2022 +0800 KYLIN-5160 Fix driver extra classpath --- .../java/org/apache/kylin/engine/spark/job/NSparkExecutable.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java index eef9ca8..79ae4aa 100644 --- a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java +++ b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java @@ -408,9 +408,12 @@ public class NSparkExecutable extends AbstractExecutable { appendSparkConf(sb, "spark.executor.extraClassPath", Paths.get(kylinJobJar).getFileName().toString()); } // In yarn cluster mode, make sure class SparkDriverHdfsLogAppender will be in NM container's classpath. - appendSparkConf(sb, "spark.driver.extraClassPath", isYarnCluster ? // + String extraClassPath = sparkConfs.getOrDefault("spark.driver.extraClassPath", ""); + String parquetJarPath = isYarnCluster ? // String.format(Locale.ROOT, "%s:%s", APP_JAR_NAME, - Paths.get(kylinJobJar).getFileName().toString()) : kylinJobJar); + Paths.get(kylinJobJar).getFileName().toString()) : kylinJobJar; + extraClassPath = extraClassPath.equals("") ? parquetJarPath : String.format(Locale.ROOT, "%s:%s", parquetJarPath, extraClassPath); + appendSparkConf(sb, "spark.driver.extraClassPath", extraClassPath); String sparkUploadFiles = config.sparkUploadFiles(isLocalMaster(sparkConfs), isYarnCluster); if (StringUtils.isNotBlank(sparkUploadFiles)) {