This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 8de5c7a7121dc37729a12ee231041f8d89d1494c
Author: XiaoxiangYu <x...@apache.org>
AuthorDate: Sun Oct 8 16:45:43 2023 +0800

    Add package info in front end
---
 build/release/package.sh                           |   4 +-
 dev-support/contributor/README.md                  | 127 ++++++++++-----------
 .../setting/SettingBasic/SettingBasic.vue          |   4 +
 .../kylin/rest/controller/LightningController.java |   1 +
 .../kylin/rest/response/ProjectConfigResponse.java |   7 ++
 .../apache/kylin/rest/service/ProjectService.java  |   7 ++
 .../java/org/apache/kylin/common/KylinVersion.java |  34 ++++--
 .../org/apache/kylin/common/msg/CnMessage.java     |   4 +-
 .../java/org/apache/kylin/common/msg/Message.java  |   2 +-
 .../src/main/resources/kylin-defaults0.properties  |   9 +-
 .../apache/kylin/common/KylinConfigBaseTest.java   |   4 +-
 .../job/impl/threadpool/NDefaultScheduler.java     |   6 -
 .../job/runners/LicenseCapacityCheckRunner.java    |  63 ----------
 .../kylin/job/runners/QuotaStorageCheckRunner.java |  75 ------------
 .../apache/kylin/metadata/query/QueryHistory.java  |   2 +-
 .../metrics/prometheus/PrometheusMetrics.java      |  24 ++--
 .../kylin/common/metric/InfluxDBInstanceTest.java  |   4 +-
 .../org/apache/kylin/rest/ddl/ViewCheck.scala      |   6 +-
 .../apache/kylin/rest/service/SparkDDLTest.java    |  22 ++--
 .../spark/sql/common/SparkDDLTestUtils.scala       |   2 +-
 .../test_case_data/sandbox/kylin.properties        |   2 +-
 .../bisync/tableau/datasource/column/Column.java   |   2 +-
 .../bisync/tableau/datasource/connection/Col.java  |   2 +-
 src/server/pom.xml                                 |   2 +-
 .../org/apache/kylin/rest/BootstrapServer.java     |   2 +-
 src/server/src/main/resources/application.yaml     |   2 +-
 .../src/main/resources/config/config_library.csv   |  28 ++---
 .../src/main/resources/config/init.properties      |   3 +-
 .../apache/kylin/rest/InitConfigurationTest.java   |   4 +-
 src/spark-project/engine-build-sdk/pom.xml         |   2 +-
 .../engine/spark/builder/DFBuilderHelper.scala     |   2 +-
 .../engine/spark/job/stage/build/BuildStage.scala  |   2 +-
 .../job/stage/build/FlatTableAndDictBase.scala     |   2 +-
 .../builder/v3dict/GlobalDictionarySuite.scala     |   4 +-
 .../apache/spark/sql/KylinDataFrameManager.scala   |   3 -
 35 files changed, 176 insertions(+), 293 deletions(-)

diff --git a/build/release/package.sh b/build/release/package.sh
index a9e8d2f581..22f227977d 100755
--- a/build/release/package.sh
+++ b/build/release/package.sh
@@ -41,8 +41,8 @@ echo "${kylin_commit_sha1}@${current_branch}" > 
build/commit_SHA1
 if [ -z "$ASF_USERNAME" ]; then
     ASF_USERNAME="SOMEONE"
 fi
-echo "Build by ${ASF_USERNAME} at" `date "+%Y-%m-%d %H:%M:%S"` >> 
build/commit_SHA1
-echo "OS  : `uname -a`" >> build/commit_SHA1
+echo "package.timestamp:" `date "+%Y-%m-%d %H:%M:%S %z"` >> build/commit_SHA1
+echo "os: $(uname -a)" >> build/commit_SHA1
 
 KYLIN_VERSION_NAME="Apache Kylin ${release_version}"
 
diff --git a/dev-support/contributor/README.md 
b/dev-support/contributor/README.md
index 975a894054..1c4d32bfe8 100644
--- a/dev-support/contributor/README.md
+++ b/dev-support/contributor/README.md
@@ -1,6 +1,6 @@
 # How to Run/Debug Kylin in IDE(with some backgrounds)
 > This guide is an enhanced version for [How to debug Kylin in IDEA with 
 > Hadoop](https://kylin.apache.org/5.0/docs/development/how_to_debug_kylin_in_ide)
 >  , <br>
-> it is not only step-by-step guide, but give more background knowledge and 
explanation. 
+> it is not only step-by-step guide, but give more background knowledge and 
explanation. A video is [here](https://www.bilibili.com/video/BV19C4y1Z7AN).
 
 ## Part One - Before your read
 
@@ -31,58 +31,58 @@ to be updated
 | build       | Scripts for building, packaging, running Kylin                 
                                      |
 | dev-support | Scripts and guides for contributors to develop/debug/test, for 
committers to release/publish website |
 | kystudio    | Frontend source code, mainly using Vue.js                      
                                      |
-| src         | Backend source code, mainly using Java & Scala, managed by 
Maven                                     |
-| pom.xml     | Project definition                                             
                                      |
-| README.md   | General guide to Kylin 5 project                               
                                      |
+| src         | Backend source code, wrote by Java & Scala, using 
Spark/Calcite/Hadoop/Spring etc                    |
+| pom.xml     | Project definition by Apache Maven                             
                                      |
+| README.md   | General guide to the development of Kylin 5 project            
                                      |
 | LICENSE     | A must-to-have file by ASF                                     
                                      |
 | NOTICE      | A must-to-have file by ASF                                     
                                      |
 
 ### Step 4: Understand Maven modules
 
 
-| Module Name                | Brief Description                               
                                       |           Tags           | 
-|:---------------------------|:---------------------------------------------------------------------------------------|:------------------------:|
-| Core Common                | todo                                            
                                       |           Core           |
-| Core Metadata              | todo                                            
                                       |           Core           |
-| Core Metrics               | todo                                            
                                       |           Core           |
-| Core Job                   | todo                                            
                                       |           Core           |
-| Core Storage               | todo                                            
                                       |           Core           |
-| Query Common               | todo                                            
                                       |           Core           |
-| Local Data Cache           | Improve query performance by caching parquet 
files in spark executor's disk/memory     |          Add-on          |
-| Spark Common               | todo                                            
                                       |
-| Query Engine Spark         | todo                                            
                                       |
-| Hive Source                | todo                                            
                                       |
-| Build Engine               | todo                                            
                                       |           Core           |
-| Distributed Lock Extension | todo                                            
                                       |          Add-on          |
-| Build Engine Spark         | todo                                            
                                       |
-| Query                      | Transfer sql text to logical/physical plan and 
optimize using Apache Calcite.          |           Core           |
-| Streaming SDK              | Not ready. Used to parse Kafka message in 
custom way.                                  | Add-on, Not-Ready-Module |
-| Streaming                  | Not ready. Make Apache Kafka as a data source 
for Kylin 5.                             |     Not-Ready-Module     |
-| Tool                       | Different tools for metadata backup, Diagnose 
etc.                                     |           Tool           |
-| Common Service             | todo                                            
                                       |
-| Datasource Service         | todo                                            
                                       |
-| Modeling Service           | todo                                            
                                       |
-| Data Loading Service       | todo                                            
                                       |
-| Query Service              | todo                                            
                                       |
-| Common Server              | todo                                            
                                       |
-| Job Service                | todo                                            
                                       |
-| Streaming Service          | todo                                            
                                       |     Not-Ready-Module     |
-| Data Loading Server        | todo                                            
                                       |
-| Query Server               | todo                                            
                                       |
-| Metadata Server            | todo                                            
                                       |
-| REST Server                | Main entry of Kylin process, including Spring 
config files.                            |          Spring          |
-| Datasource SDK             | Not ready. Framework to add data source for 
Kylin 5.                                   | Add-on, Not-Ready-Module |
-| JDBC Source                | Not ready. Make some RDBMS as a data source fro 
Kylin 5.                               |     Not-Ready-Module     |
-| Integration Test           | Major code for Integration Test                 
                                       |         Testing          |
-| Integration Test Spark     | Some code for Integration Test                  
                                       |         Testing          |
-| Source Assembly            | Used to create jars for build engine in 
spark-submit cmd.                              |          Build           |
-| Integration Test Server    | Some code for Integration Test                  
                                       |         Testing          |
-| Data loading Booter        | For micro-service deployment such as k8s. 
Process build/refresh index/segment request. |      Micro-service       |
-| Query Booter               | For micro-service deployment such as k8s. 
Process query request.                       |      Micro-service       |
-| Common Booter              | For micro-service deployment such as k8s. 
Process crud of metadata request.            |      Micro-service       |
-| JDBC Driver                | Connect Kylin using JDBC, for SQL Client or BI  
                                       |           Tool           |
-
-### STEP 5: Install required software in laptop
+| Module Name                | Brief Description                               
                                   |              Tags               | 
+|:---------------------------|:-----------------------------------------------------------------------------------|:-------------------------------:|
+| Core Common                | Utility method, config entry                    
                                   |              Core               |
+| Core Metadata              | Definition of metadata, CRUD of metadata        
                                   |              Core               |
+| Core Metrics               | Metrics, monitor                                
                                   |              Core               |
+| Core Job                   | Job Engine. Define of executable, submit 
different job                             |              Core               |
+| Core Storage               |                                                 
                                   |              Core               |
+| Query Common               | Query parser, transformer, process              
                                   |              Core               |
+| Local Data Cache           | Improve query performance by caching parquet 
files in spark executor's disk/memory |             Add-on              |
+| Spark Common               | Logic, profiler, optimizer of Spark Execution   
                                   |
+| Query Engine Spark         |                                                 
                                   |
+| Hive Source                | Outdated code                                   
                                   |
+| Build Engine SDK           |                                                 
                                   |              Core               |
+| Distributed Lock Extension | Different implementations of distributed lock   
                                   |             Add-on              |
+| Build Engine Spark         |                                                 
                                   |
+| Query                      | Transfer sql text to logical/physical plan and 
optimize using Apache Calcite       |              Core               |
+| Streaming SDK              | Not ready. Used to parse Kafka message in 
custom way                               |    Add-on, Not-Ready-Module     |
+| Streaming                  | Not ready. Make Apache Kafka as a data source 
for Kylin 5                          |        Not-Ready-Module         |
+| Tool                       | Different tools for metadata backup, Diagnose 
etc                                  |              Tool               |
+| Common Service             |                                                 
                                   |
+| Datasource Service         |                                                 
                                   |
+| Modeling Service           |                                                 
                                   |
+| Data Loading Service       |                                                 
                                   |
+| Query Service              | Controller&Service for SQL Query                
                                   |
+| Common Server              |                                                 
                                   |
+| Job Service                |                                                 
                                   |
+| Streaming Service          | Not ready.                                      
                                   |        Not-Ready-Module         |
+| Data Loading Server        |                                                 
                                   |
+| Query Server               |                                                 
                                   |
+| Metadata Server            | Controller for CRUD of metadata                 
                                   |
+| REST Server                | Starter of Kylin process, including Spring 
config files                            |             Spring              |
+| Datasource SDK             | Not ready. Framework to add data source for 
Kylin 5                                |    Add-on, Not-Ready-Module     |
+| JDBC Source                | Not ready. Make some RDBMS as a data source fro 
Kylin 5                            |        Not-Ready-Module         |
+| Integration Test           | Major code for Integration Test                 
                                   |             Testing             |
+| Integration Test Spark     | Some code for Integration Test                  
                                   |             Testing             |
+| Source Assembly            | Use shade plugin to create jars for build 
engine in spark-submit cmd               |              Build              |
+| Integration Test Server    | Some code for Integration Test                  
                                   |             Testing             |
+| Data loading Booter        | Not ready. Starter for micro-service. Process 
build/refresh index/segment request  | Micro-service, Not-Ready-Module |
+| Query Booter               | Not ready. Starter for micro-service. Process 
query request                        | Micro-service, Not-Ready-Module |
+| Common Booter              | Not ready. Starter for micro-service. Process 
crud of metadata request             | Micro-service, Not-Ready-Module |
+| JDBC Driver                | Connect Kylin using JDBC, for SQL Client or BI  
                                   |              Tool               |
+
+### STEP 5: Install required software in laptop(Mac)
 
 | Component                | Version                                    | 
Comment/Link                    |
 
|--------------------------|--------------------------------------------|---------------------------------|
@@ -128,6 +128,15 @@ Wed Sep 20 11:15:45 CST 2023
 Make sure these port is available to use.
 
 ### STEP 6: Prepare a linux machine(Optional)
+
+Use real Hadoop Cluster(compare to local mode) make you test your patch in 
immersive way and reproduce issue easily.
+
+When you run Kylin 5 in laptop, and submitting spark job to building cube/mode 
at the same time. 
+These programs may consume a lot of hardware resource, and you will suffer 
poor experience. So it is better not to run hadoop on your laptop.
+
+If you like, you can choose to connect to your own test/production Hadoop 
cluster.
+
+```shell
 kylin@worker-03:~$ docker -v
 Docker version 20.10.17, build 100c701
 kylin@worker-03:~$ uname -a
@@ -136,15 +145,8 @@ kylin@worker-03:~$ free -h
 total        used        free      shared  buff/cache   available
 Mem:           47Gi       2.8Gi        37Gi       2.0Mi       8.0Gi        44Gi
 Swap:         8.0Gi          0B       8.0Gi
+```
 
-### Terms
-
-| Term          | Comment  |
-|:--------------|:---------|
-| Build Engine  |          |
-| Job Engine    |          |
-| Query Engine  |          |
-| Metadata      |          |
 
 ### Attention
 This guide is verified using this [verified 
tag](https://github.com/apache/kylin/releases/tag/ide-run-2023) <br>
@@ -245,10 +247,7 @@ npm install
 bash build/release/download-spark.sh
 ```
 
-Result of `ls -al build/spark/jars | wc -l` should be 282.
-By default, the `SPARK_HOME` is pointed to `$PROJECT_DIR/build/spark` .
-
-You are free to move spark directory to other place.
+By default, the `SPARK_HOME` is pointed to `$PROJECT_DIR/build/spark`. You are 
free to move spark directory to other place.
 
 ### STEP 5: Import and configure project in IDE 
 
@@ -321,7 +320,7 @@ IDEA official reference is 
https://www.jetbrains.com/help/idea/run-debug-configu
 ```shell 
 KYLIN_HOME=$PROJECT_DIR
 KYLIN_CONF=$PROJECT_DIR/dev-support/contributor/sandbox/conf
-SPARK_HOME=/Users/xiaoxiang.yu/LacusDir/kyspark
+SPARK_HOME=
 HADOOP_CONF_DIR=$PROJECT_DIR/dev-support/contributor/sandbox/conf
 HADOOP_USER_NAME=root
 ```
@@ -388,14 +387,7 @@ rm -rf  ~/.m2/repository/org/apache/kylin
 rm -rf $PROJECT_DIR
 ```
 
-#### How to check out a specific git commit
-
-```shell
-// todo
-```
-
 #### Fix Invalid method name: 'get_all_functions'
-This is usually caused by hive .
 ```sh
 org.apache.hadoop.hive.ql.metadata.HiveException: 
org.apache.thrift.TApplicationException: Invalid method name: 
'get_all_functions'
        at 
org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3904) 
~[hive-exec-2.3.9.jar:2.3.9]
@@ -416,7 +408,6 @@ org.apache.hadoop.hdfs.CannotObtainBlockLengthException: 
Cannot obtain block len
        at 
org.apache.hadoop.hdfs.DFSInputStream.readBlockLength(DFSInputStream.java:470) 
~[hadoop-hdfs-client-2.10.1.jar:?]
 ```
 
-todo
 
 #### Query Failed by InvalidClassException
 
diff --git a/kystudio/src/components/setting/SettingBasic/SettingBasic.vue 
b/kystudio/src/components/setting/SettingBasic/SettingBasic.vue
index c2ffa2ad27..47b52460bc 100644
--- a/kystudio/src/components/setting/SettingBasic/SettingBasic.vue
+++ b/kystudio/src/components/setting/SettingBasic/SettingBasic.vue
@@ -11,6 +11,10 @@
         <div class="setting-label font-medium">{{$t('projectName')}}</div>
         <div class="setting-value fixed">{{project.alias || 
project.project}}</div>
       </div>
+      <div class="setting-item">
+        <div class="setting-label font-medium">Package Info</div>
+        <div class="setting-value fixed">kylin.version 
{{project.package_version}}, {{project.package_timestamp}}, commit: 
{{project.git_commit}}</div>
+      </div>
       <div class="setting-item clearfix">
         <div class="setting-label font-medium">{{$t('description')}}</div>
         <div class="setting-value">{{project.description}}</div>
diff --git 
a/src/common-server/src/main/java/org/apache/kylin/rest/controller/LightningController.java
 
b/src/common-server/src/main/java/org/apache/kylin/rest/controller/LightningController.java
index ace78b75cc..d03faa0ac6 100644
--- 
a/src/common-server/src/main/java/org/apache/kylin/rest/controller/LightningController.java
+++ 
b/src/common-server/src/main/java/org/apache/kylin/rest/controller/LightningController.java
@@ -32,6 +32,7 @@ import org.springframework.web.bind.annotation.RestController;
 
 import io.swagger.annotations.ApiOperation;
 
+@Deprecated
 @RestController
 @RequestMapping(value = "/api/lightning", produces = { 
HTTP_VND_APACHE_KYLIN_JSON,
         HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON })
diff --git 
a/src/common-service/src/main/java/org/apache/kylin/rest/response/ProjectConfigResponse.java
 
b/src/common-service/src/main/java/org/apache/kylin/rest/response/ProjectConfigResponse.java
index 613915a7ad..2649a9f7ef 100644
--- 
a/src/common-service/src/main/java/org/apache/kylin/rest/response/ProjectConfigResponse.java
+++ 
b/src/common-service/src/main/java/org/apache/kylin/rest/response/ProjectConfigResponse.java
@@ -144,6 +144,13 @@ public class ProjectConfigResponse {
     @JsonProperty("jdbc_source_driver")
     private String jdbcSourceDriver;
 
+    @JsonProperty("package_timestamp")
+    private String packageTimestamp;
+    @JsonProperty("git_commit")
+    private String gitCommit;
+    @JsonProperty("package_version")
+    private String packageVersion;
+
     public void setFrequencyTimeWindow(int frequencyTimeWindow) {
         switch (frequencyTimeWindow) {
         case 1:
diff --git 
a/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
 
b/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index c3d1b8d0b6..f10df5bc0c 100644
--- 
a/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ 
b/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -63,6 +63,7 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigBase;
+import org.apache.kylin.common.KylinVersion;
 import org.apache.kylin.common.event.ProjectCleanOldQueryResultEvent;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.Message;
@@ -74,6 +75,7 @@ import 
org.apache.kylin.common.scheduler.SourceUsageUpdateNotifier;
 import org.apache.kylin.common.util.EncryptUtil;
 import org.apache.kylin.common.util.JdbcUtils;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.SetThreadName;
 import org.apache.kylin.job.constant.JobStatusEnum;
 import org.apache.kylin.job.execution.AbstractExecutable;
@@ -611,6 +613,11 @@ public class ProjectService extends BasicService {
         response.setJdbcSourceEnable(config.getJdbcEnable());
         response.setJdbcSourceDriver(config.getJdbcDriver());
 
+        Pair<String, String> infos = KylinVersion.getGitCommitInfo();
+        response.setGitCommit(infos.getFirst());
+        
response.setPackageVersion(KylinVersion.getCurrentVersion().toString());
+        response.setPackageTimestamp(infos.getSecond());
+
         if (SecondStorageUtil.isGlobalEnable()) {
             
response.setSecondStorageEnabled(SecondStorageUtil.isProjectEnable(project));
             
response.setSecondStorageNodes(SecondStorageUtil.listProjectNodes(project));
diff --git 
a/src/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java 
b/src/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
index 078573c394..7422bff2d7 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
@@ -25,8 +25,8 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
 
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.guava30.shaded.common.base.Preconditions;
 import org.apache.kylin.guava30.shaded.common.collect.Iterables;
 
@@ -92,7 +92,7 @@ public class KylinVersion implements Comparable {
     /**
      * Require MANUAL updating kylin version per ANY upgrading.
      */
-    private static final KylinVersion CURRENT_KYLIN_VERSION = new 
KylinVersion("5.0.0");
+    private static final KylinVersion CURRENT_KYLIN_VERSION = new 
KylinVersion("5.0-SNAPSHOT");
 
     private static final KylinVersion VERSION_200 = new KylinVersion("2.0.0");
 
@@ -160,14 +160,17 @@ public class KylinVersion implements Comparable {
         return !signatureIncompatible;
     }
 
+
     public static String getKylinClientInformation() {
         StringBuilder buf = new StringBuilder();
 
+        Pair<String, String> info = getGitCommitInfo();
         buf.append("kylin.home: ").append(
                 KylinConfig.getKylinHome() == null ? "UNKNOWN" : new 
File(KylinConfig.getKylinHome()).getAbsolutePath())
                 .append("\n");
         
buf.append("kylin.version:").append(KylinVersion.getCurrentVersion()).append("\n");
-        buf.append("commit:").append(getGitCommitInfo()).append("\n");
+        buf.append("commit:").append(info.getFirst()).append("\n");
+        buf.append(info.getSecond()).append("\n"); // 
package.timestamp:%Y-%m-%d %H:%M:%S
         
buf.append("os.name:").append(System.getProperty("os.name")).append("\n");
         
buf.append("os.arch:").append(System.getProperty("os.arch")).append("\n");
         
buf.append("os.version:").append(System.getProperty("os.version")).append("\n");
@@ -177,23 +180,36 @@ public class KylinVersion implements Comparable {
         return buf.toString();
     }
 
-    public static String getGitCommitInfo() {
+    public static Pair<String, String> getGitCommitInfo() {
+        if (gitInfo != null) {
+            return gitInfo;
+        }
+        gitInfo = new Pair<>("N/A", "N/A");
         try {
             File commitFile = new File(KylinConfig.getKylinHome(), 
COMMIT_SHA1_v15);
             if (!commitFile.exists()) {
                 commitFile = new File(KylinConfig.getKylinHome(), 
COMMIT_SHA1_v13);
             }
             List<String> lines = FileUtils.readLines(commitFile, 
Charset.defaultCharset());
-            StringBuilder sb = new StringBuilder();
             for (String line : lines) {
-                if (!line.startsWith("#")) {
-                    sb.append(line).append(";");
+                if (line.contains("@") && gitInfo.getFirst().equals("N/A")) {
+                    gitInfo.setFirst(line);
+                } else if (line.contains("timestamp") && 
gitInfo.getSecond().equals("N/A")) {
+                    String[] words = line.split(":");
+                    if (words.length == 2) {
+                        gitInfo.setSecond(words[1]);
+                    } else {
+                        gitInfo.setSecond(line);
+                    }
                 }
             }
-            return sb.toString();
+            return gitInfo;
         } catch (Exception e) {
-            return StringUtils.EMPTY;
+            return gitInfo;
         }
     }
 
+    /* Git Commit and Package Timestamp */
+    private static Pair<String, String> gitInfo;
+
 }
diff --git 
a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java 
b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
index a1685cb587..203eece840 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
@@ -1651,7 +1651,7 @@ public class CnMessage extends Message {
 
     @Override
     public String getDDLViewNameError() {
-        return "视图名需要以 KE_ 开头";
+        return "视图名需要以 KYLIN_ 开头";
     }
 
     @Override
@@ -1661,7 +1661,7 @@ public class CnMessage extends Message {
 
     @Override
     public String getDDLDropError() {
-        return "仅支持删除 view 类型表且 view 名称需要以 KE_ 开头";
+        return "仅支持删除 view 类型表且 view 名称需要以 KYLIN_ 开头";
     }
 
     @Override
diff --git 
a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java 
b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
index 020b9d0efd..4a76464ce2 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
@@ -48,7 +48,7 @@ public class Message {
 
     private static final String PARAMETER_EMPTY = "Please enter the value for 
the parameter '%s'.";
     private static final String DDL_UNSUPPORTED = "Unsupported DDL syntax, 
only support single `create view`, `drop view`,  `alter view`, `show create 
table`";
-    private static final String DDL_VIEW_NAME_ERROR = "View names need to 
start with KE_";
+    private static final String DDL_VIEW_NAME_ERROR = "View names need to 
start with KYLIN_";
     private static final String DDL_VIEW_NAME_DUPLICATE_ERROR = "Logical View 
names is duplicate";
     private static final String DDL_DROP_ERROR = "Only support drop view";
     private static final String DDL_TABLE_NOT_LOADED = "Table '%s' is not 
loaded into the data source ";
diff --git a/src/core-common/src/main/resources/kylin-defaults0.properties 
b/src/core-common/src/main/resources/kylin-defaults0.properties
index 022ac5e57d..8007bc0ab8 100644
--- a/src/core-common/src/main/resources/kylin-defaults0.properties
+++ b/src/core-common/src/main/resources/kylin-defaults0.properties
@@ -14,7 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-kylin.metadata.url=ke_metadata@jdbc,driverClassName=org.postgresql.Driver,url=jdbc:postgresql://localhost:5432/kylin,username=postgres,password=
+
+spring.session.store-type=JDBC
+# session timeout is set to 10 minutes, 
https://docs.spring.io/spring-boot/docs/2.0.x/reference/html/boot-features-session.html
+spring.session.timeout=600
+
+kylin.metadata.url=kylin_metadata@jdbc,driverClassName=org.postgresql.Driver,url=jdbc:postgresql://localhost:5432/kylin,username=postgres,password=
 kylin.metadata.audit-log.max-size=500000
 kylin.metadata.ops-cron=0 0 0 * * *
 kylin.metadata.top-recs-filter-cron=0 0 0 * * *
@@ -38,7 +43,7 @@ kylin.garbage.storage.executable-survival-time-threshold=30d
 # Kylin server mode, valid value [all, query, job]
 kylin.server.mode=all
 
-# KE server address, best to use HA domain name in PROD, used in yarn-cluster 
mode to update job info back to KE.
+# Kylin server address, best to use HA domain name in PROD, used in 
yarn-cluster mode to update job info back to KE.
 # eg. 'kylin.server.address=10.1.2.30:7070'
 
 # Kylin server port
diff --git 
a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
 
b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
index 4dccc8a383..055fcaf5fe 100644
--- 
a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
+++ 
b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
@@ -1116,7 +1116,7 @@ class KylinConfigBaseTest {
         val config = KylinConfig.getInstanceFromEnv();
         assertEquals(config.getStreamingStatsUrl().toString(), 
config.getMetadataUrl().toString());
         assertEquals(config.getQueryHistoryUrl().toString(), 
config.getMetadataUrl().toString());
-        val pgUrl = "ke_metadata@jdbc,driverClassName=org.postgresql.Driver,"
+        val pgUrl = 
"kylin_metadata@jdbc,driverClassName=org.postgresql.Driver,"
                 + 
"url=jdbc:postgresql://sandbox:5432/kylin,username=postgres,password";
         config.setStreamingStatsUrl(pgUrl);
         assertEquals(pgUrl, config.getStreamingStatsUrl().toString());
@@ -1126,7 +1126,7 @@ class KylinConfigBaseTest {
 
     @Test
     void testMetadataUrlContainsComma() {
-        String url = "ke_metadata@jdbc,driverClassName=com.mysql.jdbc.Driver,"
+        String url = 
"kylin_metadata@jdbc,driverClassName=com.mysql.jdbc.Driver,"
                 + 
"url=\"jdbc:mysql:replication://10.1.3.12:3306,10.1.3.11:3306/kylin_test?useUnicode=true&characterEncoding=utf8\","
                 + "username=kylin,password=test,maxTotal=20,maxIdle=20";
         StorageURL storageURL = StorageURL.valueOf(url);
diff --git 
a/src/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/NDefaultScheduler.java
 
b/src/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/NDefaultScheduler.java
index 319ad29a60..368785b51a 100644
--- 
a/src/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/NDefaultScheduler.java
+++ 
b/src/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/NDefaultScheduler.java
@@ -43,7 +43,6 @@ import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.NExecutableManager;
 import org.apache.kylin.job.runners.FetcherRunner;
 import org.apache.kylin.job.runners.JobCheckRunner;
-import org.apache.kylin.job.runners.QuotaStorageCheckRunner;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.slf4j.Logger;
@@ -178,11 +177,6 @@ public class NDefaultScheduler implements 
Scheduler<AbstractExecutable> {
         logger.info("Fetching jobs every {} seconds", pollSecond);
         val fetcher = new FetcherRunner(this, jobPool, fetcherPool);
 
-        if (config.isStorageQuotaEnabled()) {
-            fetcherPool.scheduleWithFixedDelay(new 
QuotaStorageCheckRunner(this), RandomUtils.nextInt(0, pollSecond),
-                    pollSecond, TimeUnit.SECONDS);
-        }
-
         fetcherPool.scheduleWithFixedDelay(new JobCheckRunner(this), 
RandomUtils.nextInt(0, pollSecond), pollSecond,
                 TimeUnit.SECONDS);
         fetcherPool.scheduleWithFixedDelay(fetcher, RandomUtils.nextInt(0, 
pollSecond), pollSecond, TimeUnit.SECONDS);
diff --git 
a/src/core-job/src/main/java/org/apache/kylin/job/runners/LicenseCapacityCheckRunner.java
 
b/src/core-job/src/main/java/org/apache/kylin/job/runners/LicenseCapacityCheckRunner.java
deleted file mode 100644
index 1b51dd2c3b..0000000000
--- 
a/src/core-job/src/main/java/org/apache/kylin/job/runners/LicenseCapacityCheckRunner.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kylin.job.runners;
-
-import static 
org.apache.kylin.common.exception.CommonErrorCode.LICENSE_OVER_CAPACITY;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.annotation.Clarification;
-import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
-import org.apache.kylin.metadata.sourceusage.SourceUsageManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import lombok.val;
-
-@Clarification(priority = Clarification.Priority.MAJOR, msg = "Enterprise")
-public class LicenseCapacityCheckRunner extends AbstractDefaultSchedulerRunner 
{
-    private static final Logger logger = 
LoggerFactory.getLogger(LicenseCapacityCheckRunner.class);
-
-    public LicenseCapacityCheckRunner(NDefaultScheduler nDefaultScheduler) {
-        super(nDefaultScheduler);
-    }
-
-    @Override
-    protected void doRun() {
-        logger.info("start check license capacity for project {}", project);
-        context.setLicenseOverCapacity(isLicenseOverCapacity());
-    }
-
-    private boolean isLicenseOverCapacity() {
-        val sourceUsageManager = 
SourceUsageManager.getInstance(KylinConfig.getInstanceFromEnv());
-
-        try {
-            sourceUsageManager.checkIsOverCapacity(project);
-        } catch (KylinException e) {
-            if (LICENSE_OVER_CAPACITY.toErrorCode() == e.getErrorCode()) {
-                logger.warn("Source usage over capacity, no job will be 
scheduled.", e);
-                return true;
-            }
-        } catch (Throwable e) {
-            logger.warn("Check source usage over capacity failed.", e);
-        }
-
-        // not over capacity
-        return false;
-    }
-}
diff --git 
a/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java
 
b/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java
deleted file mode 100644
index 19691addee..0000000000
--- 
a/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kylin.job.runners;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.annotation.Clarification;
-import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
-import org.apache.kylin.metadata.cube.storage.ProjectStorageInfoCollector;
-import org.apache.kylin.metadata.cube.storage.StorageInfoEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.kylin.guava30.shaded.common.collect.Lists;
-
-import lombok.val;
-import lombok.var;
-
-@Clarification(priority = Clarification.Priority.MAJOR, msg = "Enterprise")
-public class QuotaStorageCheckRunner extends AbstractDefaultSchedulerRunner {
-    private static final Logger logger = 
LoggerFactory.getLogger(QuotaStorageCheckRunner.class);
-
-    private final ProjectStorageInfoCollector collector;
-
-    public QuotaStorageCheckRunner(NDefaultScheduler nDefaultScheduler) {
-        super(nDefaultScheduler);
-        collector = new 
ProjectStorageInfoCollector(Lists.newArrayList(StorageInfoEnum.STORAGE_QUOTA, 
StorageInfoEnum.TOTAL_STORAGE));
-    }
-
-    @Override
-    protected void doRun() {
-        logger.info("start check project {} storage quota.", 
nDefaultScheduler.getProject());
-        context.setReachQuotaLimit(reachStorageQuota());
-    }
-
-    private boolean reachStorageQuota() {
-        var storageVolumeInfo = 
collector.getStorageVolumeInfo(KylinConfig.getInstanceFromEnv(),
-                nDefaultScheduler.getProject());
-        var totalSize = storageVolumeInfo.getTotalStorageSize();
-        int retryCount = 3;
-        while (retryCount-- > 0 && totalSize < 0) {
-            storageVolumeInfo = 
collector.getStorageVolumeInfo(KylinConfig.getInstanceFromEnv(),
-                    nDefaultScheduler.getProject());
-            totalSize = storageVolumeInfo.getTotalStorageSize();
-        }
-        val storageQuotaSize = storageVolumeInfo.getStorageQuotaSize();
-        if (totalSize < 0) {
-            logger.error(
-                    "Project '{}' : an exception occurs when getting storage 
volume info, no job will be scheduled!!! The error info : {}",
-                    nDefaultScheduler.getProject(),
-                    
storageVolumeInfo.getThrowableMap().get(StorageInfoEnum.TOTAL_STORAGE));
-            return true;
-        }
-        if (totalSize >= storageQuotaSize) {
-            logger.info("Project '{}' reach storage quota, no job will be 
scheduled!!!",
-                    nDefaultScheduler.getProject());
-            return true;
-        }
-        return false;
-    }
-}
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/QueryHistory.java
 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/QueryHistory.java
index 59b2b3f0c5..2f3393b1af 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/QueryHistory.java
+++ 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/QueryHistory.java
@@ -43,7 +43,7 @@ public class QueryHistory {
     public static final String ADJ_SLOW = "Slow";
     public static final String QUERY_HISTORY_SUCCEEDED = "SUCCEEDED";
     public static final String QUERY_HISTORY_FAILED = "FAILED";
-    public static final String DEFAULT_DATABASE = "KE_HISTORY";
+    public static final String DEFAULT_DATABASE = "KYLIN_HISTORY";
 
     // database name
     public static final String DB_NAME = DEFAULT_DATABASE;
diff --git 
a/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/prometheus/PrometheusMetrics.java
 
b/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/prometheus/PrometheusMetrics.java
index 52c1e3d892..bff885211d 100644
--- 
a/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/prometheus/PrometheusMetrics.java
+++ 
b/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/prometheus/PrometheusMetrics.java
@@ -26,25 +26,25 @@ import lombok.Getter;
 @Getter
 public enum PrometheusMetrics {
 
-    JVM_DB_CONNECTIONS("ke_db_connections", Type.INSTANCE_METRIC), //
+    JVM_DB_CONNECTIONS("kylin_db_connections", Type.INSTANCE_METRIC), //
 
     SPARK_TASKS("spark_tasks", Type.INSTANCE_METRIC), //
     SPARK_TASK_UTILIZATION("spark_tasks_utilization", Type.INSTANCE_METRIC), //
 
-    QUERY_SECONDS("ke_queries_seconds", Type.PROJECT_METRIC), //
-    QUERY_SCAN_BYTES("ke_queries_scan_bytes", Type.PROJECT_METRIC), //
-    QUERY_RESULT_ROWS("ke_queries_result_rows", Type.PROJECT_METRIC), //
-    QUERY_JOBS("ke_queries_jobs", Type.PROJECT_METRIC), //
-    QUERY_STAGES("ke_queries_stages", Type.PROJECT_METRIC), //
-    QUERY_TASKS("ke_queries_tasks", Type.PROJECT_METRIC), //
+    QUERY_SECONDS("kylin_queries_seconds", Type.PROJECT_METRIC), //
+    QUERY_SCAN_BYTES("kylin_queries_scan_bytes", Type.PROJECT_METRIC), //
+    QUERY_RESULT_ROWS("kylin_queries_result_rows", Type.PROJECT_METRIC), //
+    QUERY_JOBS("kylin_queries_jobs", Type.PROJECT_METRIC), //
+    QUERY_STAGES("kylin_queries_stages", Type.PROJECT_METRIC), //
+    QUERY_TASKS("kylin_queries_tasks", Type.PROJECT_METRIC), //
 
-    SPARDER_UP("ke_sparder_up", Type.INSTANCE_METRIC), //
+    SPARDER_UP("kylin_sparder_up", Type.INSTANCE_METRIC), //
 
-    JOB_COUNTS("ke_job_counts", Type.PROJECT_METRIC), //
-    JOB_MINUTES("ke_job_minutes", Type.PROJECT_METRIC), //
-    JOB_LONG_RUNNING("ke_long_running_jobs", Type.PROJECT_METRIC), //
+    JOB_COUNTS("kylin_job_counts", Type.PROJECT_METRIC), //
+    JOB_MINUTES("kylin_job_minutes", Type.PROJECT_METRIC), //
+    JOB_LONG_RUNNING("kylin_long_running_jobs", Type.PROJECT_METRIC), //
 
-    MODEL_BUILD_DURATION("ke_model_build_minutes", Type.PROJECT_METRIC | 
Type.MODEL_METRIC);
+    MODEL_BUILD_DURATION("kylin_model_build_minutes", Type.PROJECT_METRIC | 
Type.MODEL_METRIC);
 
     private static class Type {
         public static final int GLOBAL = 0;
diff --git 
a/src/core-metrics/src/test/java/org/apache/kylin/common/metric/InfluxDBInstanceTest.java
 
b/src/core-metrics/src/test/java/org/apache/kylin/common/metric/InfluxDBInstanceTest.java
index c7963e29bd..e4c69c9e5c 100644
--- 
a/src/core-metrics/src/test/java/org/apache/kylin/common/metric/InfluxDBInstanceTest.java
+++ 
b/src/core-metrics/src/test/java/org/apache/kylin/common/metric/InfluxDBInstanceTest.java
@@ -43,13 +43,13 @@ import 
org.apache.kylin.shaded.influxdb.org.influxdb.dto.QueryResult;
 @MetadataInfo(onlyProps = true)
 public class InfluxDBInstanceTest {
 
-    private final String SHOW_DATABASES = 
"{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"databases\",\"columns\":[\"name\"],\"values\":[[\"_internal\"],[\"KE_HISTORY\"]]}]}]}\n";
+    private final String SHOW_DATABASES = 
"{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"databases\",\"columns\":[\"name\"],\"values\":[[\"_internal\"],[\"KYLIN_HISTORY\"]]}]}]}\n";
 
     private InfluxDBInstance influxDBInstance;
 
     @BeforeEach
     public void setup() throws Exception {
-        influxDBInstance = new InfluxDBInstance("KE_HISTORY", 
"KYLIN_MONITOR_RP", "", "", 1, false);
+        influxDBInstance = new InfluxDBInstance("KYLIN_HISTORY", 
"KYLIN_MONITOR_RP", "", "", 1, false);
         influxDBInstance.init();
         influxDBInstance.setInfluxDB(mockInfluxDB());
     }
diff --git 
a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
 
b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
index f86f2e0267..9b93e6b4f7 100644
--- 
a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
+++ 
b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
@@ -40,7 +40,7 @@ import org.apache.spark.sql.execution.command._
 
 class ViewCheck extends DDLCheck {
   private val LOGGER = LoggerFactory.getLogger(classOf[ViewCheck])
-  private val PREFIX = "KE_"
+  private val PREFIX = "KYLIN_"
   private val SOURCE = new NSparkMetadataExplorer
   private val LOGICAL_VIEW_TYPE = "GlobalTempView"
 
@@ -53,8 +53,8 @@ class ViewCheck extends DDLCheck {
     if ("hive".equalsIgnoreCase(pageType)) {
       databasesHasAccess.append(listAllDatabasesHasAccess(project))
       syntaxSupport.append("`create view`,`alter view`,`drop view`,`show 
create table`")
-      cnDescription.append("Hive View 名称需要以 `KE_` 开头\t")
-      enDescription.append("Hive View name should start with `KE_`\t")
+      cnDescription.append("Hive View 名称需要以 `KYLIN_` 开头\t")
+      enDescription.append("Hive View name should start with `KYLIN_`\t")
       cnDescription
         .append(s"仅支持 ${syntaxSupport} 语法\t")
       enDescription
diff --git 
a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
 
b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
index 260879a1bc..cf23d159aa 100644
--- 
a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
+++ 
b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
@@ -74,22 +74,22 @@ public class SparkDDLTest extends 
NLocalFileMetadataTestCase {
 
   // Hive View
   private static final String CREATEVIEW_SQL1 =
-      "CREATE VIEW `ssb`.`ke_order_view` as select LO_ORDERKEY, C_NAME from 
SSB.p_lineorder t1 left join "
+      "CREATE VIEW `ssb`.`kylin_order_view` as select LO_ORDERKEY, C_NAME from 
SSB.p_lineorder t1 left join "
           + "SSB. CUSTOMER t2 on t1. LO_CUSTKEY = t2. C_CUSTKEY";
   private static final String CREATEVIEW_SQL2 = "CREATE VIEW 
`ssb`.`order_view2` as select * from SSB.P_LINEORDER";
   private static final String CREATEVIEW_SQL3 = "CREATE VIEW 
`ssb`.`order_view2` as abc";
-  private static final String CREATEVIEW_SQL4 = "CREATE VIEW 
`ssb`.`ke_order_view2` as select * from SSB.unload_table";
-  private static final String CREATEVIEW_SQL5 = "CREATE VIEW `ke_order_view2` 
as select * from SSB.P_LINEORDER";
+  private static final String CREATEVIEW_SQL4 = "CREATE VIEW 
`ssb`.`kylin_order_view2` as select * from SSB.unload_table";
+  private static final String CREATEVIEW_SQL5 = "CREATE VIEW 
`kylin_order_view2` as select * from SSB.P_LINEORDER";
   private static final String CREATEVIEW_SQL6 = "abc";
-  private static final String CREATEVIEW_SQL7 = "CREATE VIEW 
`ssb`.`ke_order_view3` as select * from SSB.P_LINEORDER";
+  private static final String CREATEVIEW_SQL7 = "CREATE VIEW 
`ssb`.`kylin_order_view3` as select * from SSB.P_LINEORDER";
   private static final String ALTERVIEW_SQL =
-      "alter view `ssb`.`ke_order_view` as select lo_orderkey from 
SSB.P_LINEORDER";
-  private static final String DROPVIEW_SQL1 = "drop view 
`ssb`.`ke_order_view`";
-  private static final String DROPVIEW_SQL2 = "drop table `ssb`.`ke_table1`";
-  private static final String DROPVIEW_SQL3 = "drop table 
`ssb`.`ke_order_view`";
-  private static final String DROPVIEW_SQL4 = "drop table `ke_table2`";
+      "alter view `ssb`.`kylin_order_view` as select lo_orderkey from 
SSB.P_LINEORDER";
+  private static final String DROPVIEW_SQL1 = "drop view 
`ssb`.`kylin_order_view`";
+  private static final String DROPVIEW_SQL2 = "drop table 
`ssb`.`kylin_table1`";
+  private static final String DROPVIEW_SQL3 = "drop table 
`ssb`.`kylin_order_view`";
+  private static final String DROPVIEW_SQL4 = "drop table `kylin_table2`";
 
-  private static final String SHOWVIEW_SQL = "show create table 
ssb.ke_order_view";
+  private static final String SHOWVIEW_SQL = "show create table 
ssb.kylin_order_view";
 
   // Logical View
   private static final String CREATE_LOGICAL_VIEW_SQL1 = "CREATE LOGICAL VIEW  
"
@@ -201,7 +201,7 @@ public class SparkDDLTest extends 
NLocalFileMetadataTestCase {
     ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL1, HIVE_VIEW));
     ddlService.executeSQL(new ViewRequest("ssb", ALTERVIEW_SQL, HIVE_VIEW));
     String createViewSQL = ddlService.executeSQL(new ViewRequest("ssb", 
SHOWVIEW_SQL, HIVE_VIEW));
-    Assert.assertTrue(createViewSQL.contains("ke_order_view"));
+    Assert.assertTrue(createViewSQL.contains("kylin_order_view"));
     ddlService.executeSQL(new ViewRequest("ssb", DROPVIEW_SQL1, HIVE_VIEW));
   }
 
diff --git 
a/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
 
b/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
index ae46511dbb..9ec00b46fa 100644
--- 
a/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
+++ 
b/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
@@ -130,7 +130,7 @@ object SparkDDLTestUtils {
        """.stripMargin)
     spark.sql(
       s"""
-         |CREATE TABLE if not exists `ssb`.`ke_table1`(
+         |CREATE TABLE if not exists `ssb`.`kylin_table1`(
          |  `c1` int,
          |  `c2` string)
        """.stripMargin)
diff --git a/src/examples/test_case_data/sandbox/kylin.properties 
b/src/examples/test_case_data/sandbox/kylin.properties
index 5fe04b2fe6..854b2ae343 100644
--- a/src/examples/test_case_data/sandbox/kylin.properties
+++ b/src/examples/test_case_data/sandbox/kylin.properties
@@ -20,7 +20,7 @@
 kylin.env=DEV
 
 # The metadata store, by default stored in jdbc
-kylin.metadata.url=ke_metadata@jdbc,driverClassName=org.postgresql.Driver,url=jdbc:postgresql://sandbox:5432/kylin,username=postgres,password=
+kylin.metadata.url=kylin_metadata@jdbc,driverClassName=org.postgresql.Driver,url=jdbc:postgresql://sandbox:5432/kylin,username=postgres,password=
 
 kylin.source.record-source-usage-enabled=false
 
diff --git 
a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
 
b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
index ce2b31f4a2..9f032be1e9 100644
--- 
a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
+++ 
b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
@@ -41,7 +41,7 @@ public class Column {
     @JacksonXmlProperty(localName = "hidden", isAttribute = true)
     private String hidden;
 
-    @JacksonXmlProperty(localName = "ke_cube_used", isAttribute = true)
+    @JacksonXmlProperty(localName = "kylin_cube_used", isAttribute = true)
     private String keCubeUsed;
 
     @JacksonXmlProperty(localName = "aggregation", isAttribute = true)
diff --git 
a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
 
b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
index 2e34be82c6..577035ee36 100644
--- 
a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
+++ 
b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
@@ -29,7 +29,7 @@ public class Col {
     @JacksonXmlProperty(localName = "value", isAttribute = true)
     private String value;
 
-    @JacksonXmlProperty(localName = "ke_cc_expr", isAttribute = true)
+    @JacksonXmlProperty(localName = "kylin_cc_expr", isAttribute = true)
     private String keCcExpr;
 
     public String getKey() {
diff --git a/src/server/pom.xml b/src/server/pom.xml
index 8a058ed74d..fdf7d8182e 100644
--- a/src/server/pom.xml
+++ b/src/server/pom.xml
@@ -21,7 +21,7 @@
     <artifactId>kylin-server</artifactId>
     <packaging>jar</packaging>
     <name>Kylin - REST Server</name>
-    <url>http://kylin.apache.org</url>
+    <url>https://kylin.apache.org</url>
     <description>REST Service and RPC Server</description>
 
     <parent>
diff --git 
a/src/server/src/main/java/org/apache/kylin/rest/BootstrapServer.java 
b/src/server/src/main/java/org/apache/kylin/rest/BootstrapServer.java
index b96663bbbb..ecf702fb54 100644
--- a/src/server/src/main/java/org/apache/kylin/rest/BootstrapServer.java
+++ b/src/server/src/main/java/org/apache/kylin/rest/BootstrapServer.java
@@ -52,7 +52,7 @@ import org.apache.kylin.guava30.shaded.common.hash.Hashing;
 import lombok.val;
 
 @ImportResource(locations = { "applicationContext.xml", "kylinSecurity.xml" })
-@SpringBootApplication
+@SpringBootApplication(scanBasePackages="org.apache.kylin", excludeName = 
"io.kyligence.kap.secondstorage.management.ManagementConfig")
 @EnableScheduling
 @EnableAsync
 @EnableCaching
diff --git a/src/server/src/main/resources/application.yaml 
b/src/server/src/main/resources/application.yaml
index 8f7ba16fd8..f548512b7a 100644
--- a/src/server/src/main/resources/application.yaml
+++ b/src/server/src/main/resources/application.yaml
@@ -69,7 +69,7 @@ spring:
       max-file-size: "5MB"
       max-request-size: "5MB"
   application:
-    name: ${kylin.server.mode}
+    name: ${kylin.server.mode:all}
   web:
     resources:
       chain:
diff --git a/src/server/src/main/resources/config/config_library.csv 
b/src/server/src/main/resources/config/config_library.csv
index 1de0dfe3fd..ad99e57757 100644
--- a/src/server/src/main/resources/config/config_library.csv
+++ b/src/server/src/main/resources/config/config_library.csv
@@ -159,20 +159,20 @@ kylin.guardian.check-init-delay,Int,KG开始检测时间,KG starts 
detection tim
 kylin.guardian.check-interval,Int,KG周期检测间隔,KG cycle detection 
interval,TRUE,TRUE
 kylin.guardian.checkers,String,置检查的规则,Rules for checking inspections,TRUE,TRUE
 kylin.guardian.downgrade-mode-parallel-query-threshold,Int,并行查询最大阈值,The 
maximum threshold of parallel query,TRUE,TRUE
-kylin.guardian.downgrade-on-full-gc-busy-enabled,boolean,Full 
GC时长占比过大时处理KE规则繁忙处理开关,"Full GC duration is too large to process KE rules, busy 
processing switch",TRUE,TRUE
+kylin.guardian.downgrade-on-full-gc-busy-enabled,boolean,Full 
GC时长占比过大时处理KE规则繁忙处理开关,"Full GC duration is too large to process Kylin rules, 
busy processing switch",TRUE,TRUE
 kylin.guardian.enabled,boolean,KG守护进程是否开启,Is the KG Guardian process 
open?,TRUE,TRUE
-kylin.guardian.full-gc-busy-high-watermark,Double,Full 
GC时长占比过大时处理KE规则FullGC占比高繁忙阈值,Full GC duration is too large to process KE rules 
FullGC accounts for high busy threshold,TRUE,TRUE
-kylin.guardian.full-gc-busy-low-watermark,Double,Full 
GC时长占比过大时处理KE规则FullGC占比低繁忙阈值,Full GC duration is too large to process KE rules 
FullGC accounts for low busy threshold,TRUE,TRUE
-kylin.guardian.full-gc-check-factor,Int,Full GC时长占比过大时处理KE规则检测时长,FULL GC 
duration to process KE rules detection duration when the proportion is too 
large,TRUE,TRUE
-kylin.guardian.full-gc-duration-ratio-restart-enabled,boolean,Full 
GC时长占比过大时处理KE规则过载重启开关,Full GC duration is too large to process the KE rule 
overload and restart the switch,TRUE,TRUE
-kylin.guardian.full-gc-duration-ratio-threshold,Double,Full 
GC时长占比过大时处理KE规则FullGC占比过载阈值,"Full GC duration to process KE rules when the 
proportion is too large, FullGC accounts for overload threshold",TRUE,TRUE
-kylin.guardian.ha-check-init-delay,Int,开始检测KE时间,Start detect KE time,TRUE,TRUE
-kylin.guardian.ha-check-interval,Int,KG检测KE周期间隔,KG detection KE weekly 
interval,TRUE,TRUE
-kylin.guardian.ha-enabled,boolean,KE保证KG HA开关,KE guarantee KG HA 
switch,TRUE,TRUE
-kylin.guardian.kill-slow-query-fail-restart-enabled,boolean,杀死超时查询失败次数过多时重启KE规则开关,The
 kill timeout query fails too much time to restart the KE rules switch,TRUE,TRUE
-kylin.guardian.kill-slow-query-fail-threshold,Int,杀死超时查询失败次数过多时重启KE规则杀死失败次数阈值,Killout
 time query fails too much time to restart the KE rules to kill the failure 
number of the number of failures,TRUE,TRUE
-kylin.guardian.restart-spark-fail-restart-enabled,boolean,Spark重启失败次数过多时重启KE规则开关,"SPARK
 restarts too many times, restart the KE rules switch",TRUE,TRUE
-kylin.guardian.restart-spark-fail-threshold,Int,Spark重启失败次数过多时重启KE规则重启失败阈值,"SPARK
 restart failure times when there are too many times, restart the KE rules 
restart the failure threshold",TRUE,TRUE
+kylin.guardian.full-gc-busy-high-watermark,Double,Full 
GC时长占比过大时处理KE规则FullGC占比高繁忙阈值,Full GC duration is too large to process Kylin 
rules FullGC accounts for high busy threshold,TRUE,TRUE
+kylin.guardian.full-gc-busy-low-watermark,Double,Full 
GC时长占比过大时处理KE规则FullGC占比低繁忙阈值,Full GC duration is too large to process Kylin 
rules FullGC accounts for low busy threshold,TRUE,TRUE
+kylin.guardian.full-gc-check-factor,Int,Full GC时长占比过大时处理KE规则检测时长,FULL GC 
duration to process Kylin rules detection duration when the proportion is too 
large,TRUE,TRUE
+kylin.guardian.full-gc-duration-ratio-restart-enabled,boolean,Full 
GC时长占比过大时处理KE规则过载重启开关,Full GC duration is too large to process the Kylin rule 
overload and restart the switch,TRUE,TRUE
+kylin.guardian.full-gc-duration-ratio-threshold,Double,Full 
GC时长占比过大时处理KE规则FullGC占比过载阈值,"Full GC duration to process Kylin rules when the 
proportion is too large, FullGC accounts for overload threshold",TRUE,TRUE
+kylin.guardian.ha-check-init-delay,Int,开始检测KE时间,Start detect Kylin 
time,TRUE,TRUE
+kylin.guardian.ha-check-interval,Int,KG检测KE周期间隔,KG detection Kylin weekly 
interval,TRUE,TRUE
+kylin.guardian.ha-enabled,boolean,Kylin 保证KG HA开关,Kylin guarantee KG HA 
switch,TRUE,TRUE
+kylin.guardian.kill-slow-query-fail-restart-enabled,boolean,杀死超时查询失败次数过多时重启KE规则开关,The
 kill timeout query fails too much time to restart the Kylin rules 
switch,TRUE,TRUE
+kylin.guardian.kill-slow-query-fail-threshold,Int,杀死超时查询失败次数过多时重启KE规则杀死失败次数阈值,Killout
 time query fails too much time to restart the Kylin rules to kill the failure 
number of the number of failures,TRUE,TRUE
+kylin.guardian.restart-spark-fail-restart-enabled,boolean,Spark重启失败次数过多时重启KE规则开关,"SPARK
 restarts too many times, restart the Kylin rules switch",TRUE,TRUE
+kylin.guardian.restart-spark-fail-threshold,Int,Spark重启失败次数过多时重启KE规则重启失败阈值,"SPARK
 restart failure times when there are too many times, restart the Kylin rules 
restart the failure threshold",TRUE,TRUE
 kylin.health.metastore-error-response-ms,Int,metastore健康检查,超时时长,"Metastore 
health checkup, timeout time",TRUE,TRUE
 kylin.health.metastore-warning-response-ms,Int,metastore健康检查,警告状态时长,"Metastore 
health check, warning state durability",TRUE,TRUE
 kylin.index.base-index-auto-update,boolean,是否自动更新基础索引,Whether to automatically 
update the basic index,TRUE,TRUE
@@ -484,7 +484,7 @@ kylin.source.jdbc.source.enable,boolean,是否开启 jdbc 
数据源,Whether to t
 kylin.source.jdbc.user,String,JDBC 连接用户名,JDBC connect user name,TRUE,TRUE
 kylin.source.kafka.poll-message-timeout-ms,String,拉取 kafka 数据超时时间,Pull Kafka 
dataout time timeout,TRUE,TRUE
 
kylin.source.load-hive-tablename-enabled,boolean,前端:加载Hive表元数据-立即刷新按钮的展示,后端:是否支持加载Hive表,"Front
 end: Load the HIVE table metadata-immediately refresh the display of the 
button, back end: whether to support loading the Hive table",TRUE,TRUE
-kylin.source.load-hive-tablename-interval-seconds,Long,定时加载hive表信息到内存,前端可以根据列表加载对应的hive表到KE,Load
 the hive table information to the memory regularly. The front end can load the 
corresponding Hive table to KE according to the list,TRUE,TRUE
+kylin.source.load-hive-tablename-interval-seconds,Long,定时加载hive表信息到内存,前端可以根据列表加载对应的hive表到KE,Load
 the hive table information to the memory regularly. The front end can load the 
corresponding Hive table to Kylin according to the list,TRUE,TRUE
 kylin.source.name-case-sensitive-enabled,boolean,数据源列名是否敏感,Is the data source 
name sensitive?,TRUE,TRUE
 kylin.spark-conf.auto-prior,boolean,是否需要自动配置 spark conf,Do you need to 
automatically configure Spark Conf,TRUE,TRUE
 
kylin.storage.columnar.dimension-range-filter-enabled,boolean,是否开启维度范围过滤,Whether
 to open the dimension range filter,TRUE,TRUE
diff --git a/src/server/src/main/resources/config/init.properties 
b/src/server/src/main/resources/config/init.properties
index 2bddfae312..94b780aa7a 100644
--- a/src/server/src/main/resources/config/init.properties
+++ b/src/server/src/main/resources/config/init.properties
@@ -38,7 +38,7 @@ kylin.garbage.storage.executable-survival-time-threshold=30d
 # Kylin server mode, valid value [all, query, job]
 kylin.server.mode=all
 
-# KE server address, best to use HA domain name in PROD, used in yarn-cluster 
mode to update job info back to KE.
+# Kylin server address, best to use HA domain name in PROD, used in 
yarn-cluster mode to update job info back to Kylin.
 # eg. 'kylin.server.address=10.1.2.30:7070'
 
 # Kylin server port
@@ -103,7 +103,6 @@ 
kylin.engine.spark-conf.spark.sql.legacy.timeParserPolicy=LEGACY
 
kylin.engine.spark-conf.spark.sql.optimizer.dynamicPartitionPruning.enabled=false
 
 # ==================== QUERY SPARK CONTEXT & COLUMNAR STORAGE 
====================
-kylin.storage.quota-in-giga-bytes=10240
 
 kylin.storage.columnar.shard-size-mb=256
 # for any spark config entry in 
http://spark.apache.org/docs/latest/configuration.html#environment-variables, 
prefix it with "kap.storage.columnar.env" and append here
diff --git 
a/src/server/src/test/java/org/apache/kylin/rest/InitConfigurationTest.java 
b/src/server/src/test/java/org/apache/kylin/rest/InitConfigurationTest.java
index aa5b9a6302..838754b141 100644
--- a/src/server/src/test/java/org/apache/kylin/rest/InitConfigurationTest.java
+++ b/src/server/src/test/java/org/apache/kylin/rest/InitConfigurationTest.java
@@ -36,7 +36,7 @@ class InitConfigurationTest {
     void testInit() {
         HostInfoFetcher hostInfoFetcher = Mockito.spy(new 
DefaultHostInfoFetcher());
         ReflectionTestUtils.setField(configuration, "hostInfoFetcher", 
hostInfoFetcher);
-        Mockito.when(hostInfoFetcher.getHostname()).thenReturn("ke_host");
+        Mockito.when(hostInfoFetcher.getHostname()).thenReturn("kylin_host");
         
KylinConfig.getInstanceFromEnv().setProperty("kylin.env.hostname-check-enabled",
 "false");
         try {
             configuration.init();
@@ -50,7 +50,7 @@ class InitConfigurationTest {
         } catch (KylinException e) {
             Assert.fail();
         }
-        Mockito.when(hostInfoFetcher.getHostname()).thenReturn("ke_host");
+        Mockito.when(hostInfoFetcher.getHostname()).thenReturn("kylin_host");
         Assert.assertThrows(KylinRuntimeException.class, () -> 
configuration.init());
     }
 }
diff --git a/src/spark-project/engine-build-sdk/pom.xml 
b/src/spark-project/engine-build-sdk/pom.xml
index 609fe8758d..aa21f63085 100644
--- a/src/spark-project/engine-build-sdk/pom.xml
+++ b/src/spark-project/engine-build-sdk/pom.xml
@@ -24,7 +24,7 @@
         <relativePath>../../../pom.xml</relativePath>
     </parent>
     <modelVersion>4.0.0</modelVersion>
-    <name>Kylin - Build Engine</name>
+    <name>Kylin - Build Engine SDK</name>
     <artifactId>kylin-engine-build-sdk</artifactId>
 
     <dependencies>
diff --git 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
index 615a42fbca..9076afb6a9 100644
--- 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
+++ 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/DFBuilderHelper.scala
@@ -31,7 +31,7 @@ import scala.util.{Failure, Success, Try}
 
 object DFBuilderHelper extends Logging {
 
-  val ENCODE_SUFFIX = "_KE_ENCODE"
+  val ENCODE_SUFFIX = "_KYLIN_ENCODE"
 
   /**
    * select columns to build
diff --git 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildStage.scala
 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildStage.scala
index c552b4b6bb..d3f863a313 100644
--- 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildStage.scala
+++ 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildStage.scala
@@ -399,7 +399,7 @@ abstract class BuildStage(private val jobContext: 
SegmentJob,
     val bitmapCol = DictionaryBuilderHelper.needGlobalDict(measure)
     val columns = if (Objects.nonNull(bitmapCol)) {
       val id = dataModel.getColumnIdByColumnName(bitmapCol.getIdentity)
-      Seq(s"${id}_KE_ENCODE")
+      Seq(s"${id}_KYLIN_ENCODE")
     } else {
       Seq.empty[String]
     } ++
diff --git 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
index 7b06b362f5..16ea4763d3 100644
--- 
a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
+++ 
b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
@@ -581,7 +581,7 @@ abstract class FlatTableAndDictBase(private val jobContext: 
SegmentJob,
     val cols = matchedCols.map { dictColumn =>
       val wrapDictCol = DictionaryBuilder.wrapCol(dictColumn)
       val dbName = dictColumn.getTableRef.getTableDesc.getDatabase
-      dict_encode_v3(col(wrapDictCol), dbName).alias(wrapDictCol + 
"_KE_ENCODE")
+      dict_encode_v3(col(wrapDictCol), dbName).alias(wrapDictCol + 
"_KYLIN_ENCODE")
     }.toSeq
     val dictPlan = table
       .select(table.schema.map(ty => col(ty.name)) ++ cols: _*)
diff --git 
a/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/builder/v3dict/GlobalDictionarySuite.scala
 
b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/builder/v3dict/GlobalDictionarySuite.scala
index e93b933df9..8bbc4cf9ce 100644
--- 
a/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/builder/v3dict/GlobalDictionarySuite.scala
+++ 
b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/builder/v3dict/GlobalDictionarySuite.scala
@@ -145,7 +145,7 @@ class GlobalDictionarySuite extends SparderBaseFunSuite 
with LocalMetadata with
       Row("a"),
       Row("b"))
 
-    val dictCol = Seq(dict_encode_v3(col(encodeColName), dbName).alias(colName 
+ "_KE_ENCODE"))
+    val dictCol = Seq(dict_encode_v3(col(encodeColName), dbName).alias(colName 
+ "_KYLIN_ENCODE"))
 
     val df = spark.createDataFrame(spark.sparkContext.parallelize(data), 
schema)
     val dictDfPlan = df
@@ -170,7 +170,7 @@ class GlobalDictionarySuite extends SparderBaseFunSuite 
with LocalMetadata with
     schema = schema.add(encodeColName, StringType)
     val data = Seq.empty[Row]
 
-    val dictCol = Seq(dict_encode_v3(col(encodeColName), 
dbName).alias(encodeColName + "_KE_ENCODE"))
+    val dictCol = Seq(dict_encode_v3(col(encodeColName), 
dbName).alias(encodeColName + "_KYLIN_ENCODE"))
 
     val df = spark.createDataFrame(spark.sparkContext.parallelize(data), 
schema)
     val dictDfPlan = df
diff --git 
a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinDataFrameManager.scala
 
b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinDataFrameManager.scala
index 115e6c9f28..db5e9b41ae 100644
--- 
a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinDataFrameManager.scala
+++ 
b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinDataFrameManager.scala
@@ -20,7 +20,6 @@ package org.apache.spark.sql
 
 import java.sql.Timestamp
 
-import io.kyligence.kap.secondstorage.SecondStorage
 import org.apache.kylin.common.KylinConfig
 import org.apache.kylin.metadata.cube.model.{LayoutEntity, NDataflow, 
NDataflowManager}
 import org.apache.kylin.metadata.model.FusionModelManager
@@ -96,10 +95,8 @@ class KylinDataFrameManager(sparkSession: SparkSession) {
   }
 
   def read(dataflow: NDataflow, layout: LayoutEntity, pruningInfo: String): 
DataFrame = {
-    SecondStorage.trySecondStorage(sparkSession, dataflow, layout, 
pruningInfo).getOrElse {
       StorageStoreFactory.create(dataflow.getModel.getStorageType)
         .read(dataflow, layout, sparkSession, extraOptions.toMap)
-    }
   }
 
   /**

Reply via email to