This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/main by this push:
     new fd2977e21c Remove useless code
fd2977e21c is described below

commit fd2977e21c51f1afed668f2d9713cf562f2dc42d
Author: XiaoxiangYu <x...@apache.org>
AuthorDate: Wed Nov 2 18:34:54 2022 +0800

    Remove useless code
---
 .../src/main/java/org/apache/kylin/common/KylinConfigBase.java    | 8 --------
 .../java/org/apache/kylin/engine/spark/job/NSparkExecutable.java  | 3 +--
 .../org/apache/kylin/rest/controller/DiagnosisController.java     | 3 ++-
 3 files changed, 3 insertions(+), 11 deletions(-)

diff --git 
a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java 
b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 0c22c0d94e..fc46325cb5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -2107,14 +2107,6 @@ public abstract class KylinConfigBase implements 
Serializable {
         return getFileName(kylinHome + File.separator + "lib", 
PARQUET_JOB_JAR_NAME_PATTERN);
     }
 
-    /**
-     * Use https://github.com/spektom/spark-flamegraph for Spark profile
-     */
-    @ConfigTag(ConfigTag.Tag.DEBUG_HACK)
-    public String getSparkSubmitCmd() {
-        return getOptional("kylin.engine.spark-cmd", null);
-    }
-
     public void overrideKylinParquetJobJarPath(String path) {
         logger.info("override {} to {}", KYLIN_ENGINE_PARQUET_JOB_JAR, path);
         System.setProperty(KYLIN_ENGINE_PARQUET_JOB_JAR, path);
diff --git 
a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
 
b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
index af2314563d..bb7e6e7f9c 100644
--- 
a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
+++ 
b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
@@ -397,8 +397,7 @@ public class NSparkExecutable extends AbstractExecutable {
                                       String appArgs) {
         StringBuilder sb = new StringBuilder();
 
-        String sparkSubmitCmd = config.getSparkSubmitCmd() != null ?
-                config.getSparkSubmitCmd() : KylinConfig.getSparkHome() + 
"/bin/spark-submit";
+        String sparkSubmitCmd = KylinConfig.getSparkHome() + 
"/bin/spark-submit";
         sb.append("export HADOOP_CONF_DIR=%s && %s --class 
org.apache.kylin.engine.spark.application.SparkEntry ");
 
         Map<String, String> sparkConfs = getSparkConfigOverride(config);
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
 
b/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
index 190ee438d2..89bed507eb 100644
--- 
a/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
+++ 
b/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
@@ -26,6 +26,7 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.kylin.common.persistence.AutoDeleteDirectory;
+import org.apache.kylin.common.util.ParameterFilter;
 import org.apache.kylin.metadata.badquery.BadQueryEntry;
 import org.apache.kylin.metadata.badquery.BadQueryHistory;
 import org.apache.kylin.rest.exception.InternalErrorException;
@@ -95,7 +96,7 @@ public class DiagnosisController extends BasicController {
     public void dumpJobDiagnosisInfo(@PathVariable String jobId, final 
HttpServletRequest request,
             final HttpServletResponse response) {
         try (AutoDeleteDirectory diagDir = new AutoDeleteDirectory("diag_job", 
"")) {
-            String filePath = dgService.dumpJobDiagnosisInfo(jobId, 
diagDir.getFile());
+            String filePath = 
dgService.dumpJobDiagnosisInfo(ParameterFilter.checkParameter(jobId), 
diagDir.getFile());
             setDownloadResponse(filePath, response);
         } catch (IOException e) {
             throw new InternalErrorException("Failed to dump job diagnosis 
info. " + e.getMessage(), e);

Reply via email to