This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this 
push:
     new 27a29d0  KYLIN-4621 Avoid annoying log message when build cube and 
query
27a29d0 is described below

commit 27a29d01e83f69b4e021bab67f19fd23ab2d3099
Author: rupengwang <wangrup...@live.cn>
AuthorDate: Tue Jul 7 14:17:18 2020 +0800

    KYLIN-4621 Avoid annoying log message when build cube and query
---
 ...j.properties => kylin-parquet-log4j.properties} | 27 ++++++++--------------
 build/conf/kylin-server-log4j.properties           |  1 +
 build/conf/kylin-tools-log4j.properties            |  3 +++
 .../org/apache/kylin/common/KylinConfigBase.java   | 13 +++++++++++
 .../kylin/engine/spark/job/NSparkExecutable.java   | 13 +++--------
 .../engine/spark/job/NSparkExecutableTest.java     |  2 +-
 6 files changed, 31 insertions(+), 28 deletions(-)

diff --git a/build/conf/kylin-tools-log4j.properties 
b/build/conf/kylin-parquet-log4j.properties
similarity index 52%
copy from build/conf/kylin-tools-log4j.properties
copy to build/conf/kylin-parquet-log4j.properties
index d4d32bf..bdecb25 100644
--- a/build/conf/kylin-tools-log4j.properties
+++ b/build/conf/kylin-parquet-log4j.properties
@@ -17,22 +17,15 @@
 #
 
 
-# the kylin-tools-log4j.properties is mainly for configuring log properties on 
kylin tools, including:
-#   1. tools launched by kylin.sh script, e.g. DeployCoprocessorCLI
-#   2. DebugTomcat
-#   3. others
-# 
-# It's called kylin-tools-log4j.properties so that it won't distract users 
from the other more important log4j config file: kylin-server-log4j.properties  
-# enable this by -Dlog4j.configuration=kylin-tools-log4j.properties
-
-log4j.rootLogger=INFO,stderr
-
-log4j.appender.stderr=org.apache.log4j.ConsoleAppender
-log4j.appender.stderr.Target=System.err
-log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
-log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}:%L 
: %m%n
-
-#log4j.logger.org.apache.hadoop=ERROR
+#overall config
+log4j.rootLogger=WARN,stdout
 log4j.logger.org.apache.kylin=DEBUG
 log4j.logger.org.springframework=WARN
-log4j.logger.org.apache.kylin.tool.shaded=INFO
+log4j.logger.org.springframework.security=WARN
+log4j.logger.org.apache.spark=WARN
+log4j.logger.org.apache.spark.ContextCleaner=WARN
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.err
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}:%L 
: %m%n
\ No newline at end of file
diff --git a/build/conf/kylin-server-log4j.properties 
b/build/conf/kylin-server-log4j.properties
index bcaea65..8846a21 100644
--- a/build/conf/kylin-server-log4j.properties
+++ b/build/conf/kylin-server-log4j.properties
@@ -38,6 +38,7 @@ log4j.rootLogger=INFO
 log4j.logger.org.apache.kylin=DEBUG,file
 log4j.logger.org.springframework=WARN,file
 log4j.logger.org.springframework.security=INFO,file
+log4j.logger.org.apache.kylin.spark.classloader=INFO,file
 
 log4j.additivity.logger.org.apache.kylin.stream=false
 log4j.logger.org.apache.kylin.stream=TRACE,realtime
diff --git a/build/conf/kylin-tools-log4j.properties 
b/build/conf/kylin-tools-log4j.properties
index d4d32bf..9c4e2bd 100644
--- a/build/conf/kylin-tools-log4j.properties
+++ b/build/conf/kylin-tools-log4j.properties
@@ -35,4 +35,7 @@ log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} 
%-5p [%t] %c{2}:%L :
 #log4j.logger.org.apache.hadoop=ERROR
 log4j.logger.org.apache.kylin=DEBUG
 log4j.logger.org.springframework=WARN
+log4j.logger.org.springframework.security=INFO
 log4j.logger.org.apache.kylin.tool.shaded=INFO
+log4j.logger.org.apache.spark=WARN
+log4j.logger.org.apache.spark.ContextCleaner=WARN
diff --git 
a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java 
b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index fb6f796..023fe2c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -21,6 +21,7 @@ package org.apache.kylin.common;
 import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
+import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
@@ -2718,6 +2719,18 @@ public abstract class KylinConfigBase implements 
Serializable {
         return 
Boolean.parseBoolean(getOptional("kylin.query.spark-engine.enabled", "true"));
     }
 
+    public String getLogSparkPropertiesFile() {
+        return getLogPropertyFile("kylin-parquet-log4j.properties");
+    }
+
+    private String getLogPropertyFile(String filename) {
+        if (isDevEnv()) {
+            return Paths.get(getKylinHomeWithoutWarn(), "build", 
"conf").toString() + File.separator + filename;
+        } else {
+            return Paths.get(getKylinHomeWithoutWarn(), "conf").toString() + 
File.separator + filename;
+        }
+    }
+
     public int getQueryPartitionSplitSizeMB() {
         return 
Integer.parseInt(getOptional("kylin.query.spark-engine.partition-split-size-mb",
 "64"));
     }
diff --git 
a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
 
b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
index a877b21..a9c735b 100644
--- 
a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
+++ 
b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
@@ -34,7 +34,6 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import java.util.Objects;
 import java.util.Set;
 
 import org.apache.kylin.engine.spark.utils.MetaDumpUtil;
@@ -286,21 +285,15 @@ public class NSparkExecutable extends AbstractExecutable {
             logger.warn("use the InetAddress get local ip failed!", e);
         }
 
+        String log4jConfiguration = "file:" + 
config.getLogSparkPropertiesFile();
+
         String sparkDriverExtraJavaOptionsKey = 
"spark.driver.extraJavaOptions";
         StringBuilder sb = new StringBuilder();
         if (sparkConfigOverride.containsKey(sparkDriverExtraJavaOptionsKey)) {
             sb.append(sparkConfigOverride.get(sparkDriverExtraJavaOptionsKey));
         }
 
-        String sparkDriverHdfsLogPath = null;
-        if (config instanceof KylinConfigExt) {
-            Map<String, String> extendedOverrides = ((KylinConfigExt) 
config).getExtendedOverrides();
-            if (Objects.nonNull(extendedOverrides)) {
-                sparkDriverHdfsLogPath = 
extendedOverrides.get("spark.driver.log4j.appender.hdfs.File");
-            }
-        }
-
-        sb.append(String.format(Locale.ROOT, " 
-Dspark.driver.log4j.appender.hdfs.File=%s ", sparkDriverHdfsLogPath));
+        sb.append(String.format(Locale.ROOT, " -Dlog4j.configuration=%s ", 
log4jConfiguration));
         sb.append(String.format(Locale.ROOT, " 
-Dspark.driver.rest.server.ip=%s ", serverIp));
         sb.append(String.format(Locale.ROOT, " -Dspark.driver.param.taskId=%s 
", getId()));
 
diff --git 
a/kylin-spark-project/kylin-spark-engine/src/test/java/org/apache/kylin/engine/spark/job/NSparkExecutableTest.java
 
b/kylin-spark-project/kylin-spark-engine/src/test/java/org/apache/kylin/engine/spark/job/NSparkExecutableTest.java
index 4f869bb..c52b013 100644
--- 
a/kylin-spark-project/kylin-spark-engine/src/test/java/org/apache/kylin/engine/spark/job/NSparkExecutableTest.java
+++ 
b/kylin-spark-project/kylin-spark-engine/src/test/java/org/apache/kylin/engine/spark/job/NSparkExecutableTest.java
@@ -58,7 +58,7 @@ public class NSparkExecutableTest extends 
LocalWithSparkSessionTest {
             Assert.assertTrue(cmd.contains("export HADOOP_CONF_DIR"));
             Assert.assertTrue(cmd.contains("spark-submit"));
             
Assert.assertTrue(cmd.contains("spark.executor.extraClassPath=job.jar"));
-            
Assert.assertTrue(cmd.contains("spark.driver.log4j.appender.hdfs.File="));
+            Assert.assertTrue(cmd.contains("log4j.configuration="));
         } finally {
             if (StringUtils.isEmpty(kylinHome)) {
                 System.clearProperty("KYLIN_HOME");

Reply via email to