This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin5 by this push:
     new 1b9e2794e6 Optimize release pipeline
1b9e2794e6 is described below

commit 1b9e2794e639b8a2dec94cf4a3b1df63fd96ee56
Author: XiaoxiangYu <x...@apache.org>
AuthorDate: Fri Sep 1 12:02:35 2023 +0800

    Optimize release pipeline
---
 .../release/all-in-one-docker}/.gitignore          |   0
 .../all_in_one/Dockerfile_hadoop                   |   0
 .../all-in-one-docker}/all_in_one/Dockerfile_kylin |  12 +-
 .../all-in-one-docker}/all_in_one/build_image.sh   |  38 +++-
 .../all_in_one/conf/hadoop/capacity-scheduler.xml  |   0
 .../all_in_one/conf/hadoop/core-site.xml           |   0
 .../all_in_one/conf/hadoop/hdfs-site.xml           |   0
 .../all_in_one/conf/hadoop/mapred-site.xml         |   0
 .../all_in_one/conf/hadoop/yarn-site.xml           |   8 +-
 .../all_in_one/conf/hive/hive-site.xml             |   0
 .../conf/kylin/kylin.properties.override           |   9 +-
 .../all-in-one-docker}/all_in_one/conf/zk/zoo.cfg  |   0
 .../all-in-one-docker/all_in_one/entrypoint.sh     | 121 +++++++++++
 .../all_in_one/scripts/install_mysql.sh            |   0
 .../release-machine/{Dockerfile => Dockerfile_1}   |  39 ++--
 .../release-machine/Dockerfile_2                   |  26 +--
 .../{ => release-machine}/README.md                |  39 +---
 .../release-machine/create-release-machine.sh      |  15 +-
 .../release-machine/{script => }/entrypoint.sh     |   0
 .../{script => }/release-publish.sh                | 222 +++++++++++----------
 build/sbin/bootstrap.sh                            |   4 +-
 build/sbin/guardian-get-process-status.sh          |   2 +-
 build/sbin/log-rotate-cron.sh                      |   8 +-
 .../kylin/rest/controller/NSystemController.java   |   2 +-
 .../org/apache/kylin/rest/cache/RedisCache.java    |   4 +-
 .../apache/kylin/job/runners/FetcherRunner.java    |   2 +-
 src/docker/all_in_one/entrypoint.sh                | 104 ----------
 .../java/org/apache/kylin/jdbc/KylinResultSet.java |   2 +-
 .../kylin/tool/StreamingJobDiagInfoTool.java       |   6 +-
 .../org/apache/kylin/tool/daemon/KapGuardian.java  |   2 +-
 .../tool/daemon/checker/KEProcessChecker.java      |   8 +-
 .../kylin/tool/daemon/checker/KEStatusChecker.java |   4 +-
 32 files changed, 350 insertions(+), 327 deletions(-)

diff --git a/src/docker/.gitignore b/build/release/all-in-one-docker/.gitignore
similarity index 100%
rename from src/docker/.gitignore
rename to build/release/all-in-one-docker/.gitignore
diff --git a/src/docker/all_in_one/Dockerfile_hadoop 
b/build/release/all-in-one-docker/all_in_one/Dockerfile_hadoop
similarity index 100%
rename from src/docker/all_in_one/Dockerfile_hadoop
rename to build/release/all-in-one-docker/all_in_one/Dockerfile_hadoop
diff --git a/src/docker/all_in_one/Dockerfile_kylin 
b/build/release/all-in-one-docker/all_in_one/Dockerfile_kylin
similarity index 87%
rename from src/docker/all_in_one/Dockerfile_kylin
rename to build/release/all-in-one-docker/all_in_one/Dockerfile_kylin
index fe6a210d70..2f047b36fd 100644
--- a/src/docker/all_in_one/Dockerfile_kylin
+++ b/build/release/all-in-one-docker/all_in_one/Dockerfile_kylin
@@ -16,25 +16,27 @@
 #
 
 # Docker image for apache kylin, based on the Hadoop image
-FROM hadoop3.2.1-all-in-one-for-kylin5
+# FROM hadoop3.2.1-all-in-one-for-kylin5
+FROM apachekylin/apache-kylin-standalone:5.x-base-dev-only
 
 USER root
+
+RUN apt-get -y install curl
+RUN apt-get update && apt-get -y install openjdk-8-jdk
+
 # make a new workdir
 RUN mkdir -p /home/kylin
 
 # change workdir to install Kylin
 WORKDIR /home/kylin
 
-ENV KYLIN_VERSION 5.0.0
+ENV KYLIN_VERSION 5.0.0-beta
 ENV KYLIN_HOME /home/kylin/apache-kylin-$KYLIN_VERSION-bin
 
-# TODO: Download Kylin from official website
 COPY package/apache-kylin-$KYLIN_VERSION-*.tar.gz /home/kylin/
 
 RUN tar -zxf /home/kylin/apache-kylin-$KYLIN_VERSION-*.tar.gz \
     && rm -f /home/kylin/apache-kylin-$KYLIN_VERSION-*.tar.gz \
-    && mv /home/kylin/apache-kylin-$KYLIN_VERSION-* $KYLIN_HOME \
-    && mkdir -p $KYLIN_HOME/lib/ext \
     && cp $HIVE_HOME/lib/mysql-connector-java.jar $KYLIN_HOME/lib/ext/ \
     && cp $HIVE_HOME/lib/mysql-connector-java.jar $KYLIN_HOME/spark/hive_1_2_2/
 
diff --git a/src/docker/all_in_one/build_image.sh 
b/build/release/all-in-one-docker/all_in_one/build_image.sh
similarity index 52%
copy from src/docker/all_in_one/build_image.sh
copy to build/release/all-in-one-docker/all_in_one/build_image.sh
index 058d8b4948..a3ffa76fca 100755
--- a/src/docker/all_in_one/build_image.sh
+++ b/build/release/all-in-one-docker/all_in_one/build_image.sh
@@ -16,20 +16,44 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+TAG=5.0-beta
 
 DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-cd ${DIR}
+cd ${DIR} || exit
 echo "build image in dir "${DIR}
 
-# TODO: download from the official website
 echo "package kylin in local for building image"
-rm -rf ${DIR}/package/*
 if [[ ! -d ${DIR}/package/ ]]; then
     mkdir -p ${DIR}/package/
 fi
-${DIR}/../../../build/release/release.sh
-cp ${DIR}/../../../dist/apache-kylin-*.tar.gz ${DIR}/package/
+
+# The official package didn't carry with Spark binary,
+# So I download it in my laptop, uncompress, execute download-spark-user.sh 
and re-compress
+#
+# wget 
https://archive.apache.org/dist/kylin/apache-kylin-5.0.0-beta/apache-kylin-5.0.0-beta-bin.tar.gz
 -P ${DIR}/package/
+# tar zxf apache-kylin-5.0.0-beta-bin.tar.gz
+# cd apache-kylin-5.0.0-beta-bin
+# bash sbin/download-spark-user.sh
+# tar -czf apache-kylin-5.0.0-beta-bin.tar.gz apache-kylin-5.0.0-beta-bin
 
 echo "start to build Hadoop docker image"
-docker build -f Dockerfile_hadoop -t hadoop3.2.1-all-in-one-for-kylin5 .
-docker build -f Dockerfile_kylin -t apachekylin/apache-kylin-standalone:5.0.0 .
+# docker build -f Dockerfile_hadoop -t hadoop3.2.1-all-in-one-for-kylin5 .
+docker build -f Dockerfile_kylin -t apachekylin/apache-kylin-standalone:${TAG} 
.
+
+docker image tag docker.io/apachekylin/apache-kylin-standalone:${TAG} 
apachekylin/apache-kylin-standalone:${TAG}
+docker stop Kylin5-Machine
+docker rm Kylin5-Machine
+
+docker run -d \
+  --name Kylin5-Machine \
+  --hostname Kylin5-Machine \
+  -m 8G \
+  -p 7070:7070 \
+  -p 8088:8088 \
+  -p 50070:50070 \
+  -p 8032:8032 \
+  -p 8042:8042 \
+  -p 2181:2181 \
+  apachekylin/apache-kylin-standalone:${TAG}
+
+docker logs --follow Kylin5-Machine
diff --git a/src/docker/all_in_one/conf/hadoop/capacity-scheduler.xml 
b/build/release/all-in-one-docker/all_in_one/conf/hadoop/capacity-scheduler.xml
similarity index 100%
rename from src/docker/all_in_one/conf/hadoop/capacity-scheduler.xml
rename to 
build/release/all-in-one-docker/all_in_one/conf/hadoop/capacity-scheduler.xml
diff --git a/src/docker/all_in_one/conf/hadoop/core-site.xml 
b/build/release/all-in-one-docker/all_in_one/conf/hadoop/core-site.xml
similarity index 100%
rename from src/docker/all_in_one/conf/hadoop/core-site.xml
rename to build/release/all-in-one-docker/all_in_one/conf/hadoop/core-site.xml
diff --git a/src/docker/all_in_one/conf/hadoop/hdfs-site.xml 
b/build/release/all-in-one-docker/all_in_one/conf/hadoop/hdfs-site.xml
similarity index 100%
rename from src/docker/all_in_one/conf/hadoop/hdfs-site.xml
rename to build/release/all-in-one-docker/all_in_one/conf/hadoop/hdfs-site.xml
diff --git a/src/docker/all_in_one/conf/hadoop/mapred-site.xml 
b/build/release/all-in-one-docker/all_in_one/conf/hadoop/mapred-site.xml
similarity index 100%
rename from src/docker/all_in_one/conf/hadoop/mapred-site.xml
rename to build/release/all-in-one-docker/all_in_one/conf/hadoop/mapred-site.xml
diff --git a/src/docker/all_in_one/conf/hadoop/yarn-site.xml 
b/build/release/all-in-one-docker/all_in_one/conf/hadoop/yarn-site.xml
similarity index 95%
rename from src/docker/all_in_one/conf/hadoop/yarn-site.xml
rename to build/release/all-in-one-docker/all_in_one/conf/hadoop/yarn-site.xml
index f52a27f9f2..73a38f87ad 100644
--- a/src/docker/all_in_one/conf/hadoop/yarn-site.xml
+++ b/build/release/all-in-one-docker/all_in_one/conf/hadoop/yarn-site.xml
@@ -19,7 +19,7 @@
     </property>
     <property>
         <name>yarn.scheduler.minimum-allocation-mb</name>
-        <value>512</value>
+        <value>256</value>
     </property>
     <property>
         <name>yarn.scheduler.maximum-allocation-mb</name>
@@ -35,11 +35,11 @@
     </property>
     <property>
         <name>yarn.nodemanager.resource.cpu-vcores</name>
-        <value>4</value>
+        <value>8</value>
     </property>
     <property>
         <name>yarn.scheduler.capacity.maximum-am-resource-percent</name>
-        <value>0.5</value>
+        <value>0.6</value>
     </property>
     <property>
         
<name>yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage</name>
@@ -53,4 +53,4 @@
         <name>yarn.resourcemanager.zk-address</name>
         <value>localhost:2181</value>
     </property>
-</configuration>
+</configuration>
\ No newline at end of file
diff --git a/src/docker/all_in_one/conf/hive/hive-site.xml 
b/build/release/all-in-one-docker/all_in_one/conf/hive/hive-site.xml
similarity index 100%
rename from src/docker/all_in_one/conf/hive/hive-site.xml
rename to build/release/all-in-one-docker/all_in_one/conf/hive/hive-site.xml
diff --git a/src/docker/all_in_one/conf/kylin/kylin.properties.override 
b/build/release/all-in-one-docker/all_in_one/conf/kylin/kylin.properties.override
similarity index 92%
rename from src/docker/all_in_one/conf/kylin/kylin.properties.override
rename to 
build/release/all-in-one-docker/all_in_one/conf/kylin/kylin.properties.override
index 01f23c950a..b1757f9810 100644
--- a/src/docker/all_in_one/conf/kylin/kylin.properties.override
+++ 
b/build/release/all-in-one-docker/all_in_one/conf/kylin/kylin.properties.override
@@ -26,16 +26,16 @@ kylin.env.hdfs-working-dir=/kylin
 
 # Query
 kylin.storage.columnar.spark-conf.spark.driver.memory=512M
-kylin.storage.columnar.spark-conf.spark.driver.memoryOverhead=512M
+kylin.storage.columnar.spark-conf.spark.driver.memoryOverhead=256M
 kylin.storage.columnar.spark-conf.spark.executor.cores=1
 kylin.storage.columnar.spark-conf.spark.executor.instances=1
 kylin.storage.columnar.spark-conf.spark.executor.memory=1024M
-kylin.storage.columnar.spark-conf.spark.executor.memoryOverhead=512M
+kylin.storage.columnar.spark-conf.spark.executor.memoryOverhead=256M
 
 # Build
 kylin.engine.driver-memory-strategy=2,500
 kylin.engine.spark-conf.spark.driver.memory=512M
-kylin.engine.spark-conf.spark.driver.memoryOverhead=512M
+kylin.engine.spark-conf.spark.driver.memoryOverhead=256M
 kylin.engine.spark-conf.spark.executor.cores=1
 kylin.engine.spark-conf.spark.executor.instances=1
 kylin.engine.spark-conf.spark.executor.memory=1024M
@@ -54,3 +54,6 @@ 
kylin.query.engine.push-down.enable-prepare-statement-with-params=true
 kylin.query.calcite.extras-props.FUN=standard,oracle
 kylin.circuit-breaker.threshold.project=500
 kylin.engine.resource-request-over-limit-proportion=3.0
+
+# Shutdown the lock
+kylin.job.auto-set-concurrent-jobs=false
diff --git a/src/docker/all_in_one/conf/zk/zoo.cfg 
b/build/release/all-in-one-docker/all_in_one/conf/zk/zoo.cfg
similarity index 100%
rename from src/docker/all_in_one/conf/zk/zoo.cfg
rename to build/release/all-in-one-docker/all_in_one/conf/zk/zoo.cfg
diff --git a/build/release/all-in-one-docker/all_in_one/entrypoint.sh 
b/build/release/all-in-one-docker/all_in_one/entrypoint.sh
new file mode 100644
index 0000000000..e503e16cf6
--- /dev/null
+++ b/build/release/all-in-one-docker/all_in_one/entrypoint.sh
@@ -0,0 +1,121 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+echo "127.0.0.1 sandbox" >> /etc/hosts
+START_FLAG="/home/kylin/first_run"
+TIMEOUT=600
+
+function run_command {
+  local STEP="$1"
+  shift 1
+
+  echo ""
+  echo 
"==============================================================================="
+  echo 
"*******************************************************************************"
+  echo "|"
+  echo "|   $STEP at $(date)"
+  echo "|   Command: $@"
+  echo "|"
+  "$@" 2>&1
+
+  local EC=$?
+  if [ $EC != 0 ]; then
+    echo "ERROR!!"
+    echo "[$STEP] Command FAILED : $@, please check!!!"
+    sleep 7200
+    exit $EC
+  else
+    echo "[$STEP] succeed."
+  fi
+}
+
+function check_and_monitor_status() {
+  local COMPONENT="$1"
+  shift 1
+  echo "Checking $COMPONENT's status..."
+  component_status=
+  ((time_left = TIMEOUT))
+  while ((time_left > 0)); do
+      sleep 10
+      "$@" 2>&1
+      component_status=$?
+      if [[ $component_status -eq 0 ]]; then
+          echo "+"
+          break
+      else
+          echo "-"
+      fi
+      ((timeLeft -= 10))
+  done
+  if [[ $component_status -eq 0 ]]; then
+      echo "Check $COMPONENT succeed."
+  else
+      echo "ERROR: check $COMPONENT failed."
+  fi
+  return $component_status
+}
+
+# clean pid files
+rm -f /tmp/*.pid
+rm -rf /data/zookeeper/*
+rm -f /data/zookeeper/zookeeper_server.pid
+
+##############################################
+
+run_command "Start MySQL" service mysql start
+if [ ! -f "/home/kylin/first_run" ]
+then
+    run_command "Create Database" mysql -uroot -p123456 -e "CREATE DATABASE IF 
NOT EXISTS kylin default charset utf8mb4 COLLATE utf8mb4_general_ci;"
+    run_command "Init Hive" schematool -initSchema -dbType mysql
+    run_command "Format HDFS" hdfs namenode -format
+fi
+
+run_command "Start HDFS [NameNode]" hdfs --daemon start namenode
+run_command "Start HDFS [DataNode]" hdfs --daemon start datanode
+
+# start yarn
+run_command "Start Yarn [ResourceManager]" yarn --daemon start resourcemanager
+run_command "Start Yarn [NodeManager]" yarn --daemon start nodemanager
+
+# start job history server
+# run_command "" mapred --daemon start historyserver
+
+run_command "Start Zookeeper" "$ZK_HOME"/bin/zkServer.sh start
+
+sleep 10s
+
+if [ ! -f $START_FLAG ]
+then
+    check_and_monitor_status "HDFS Usability" hadoop fs -mkdir /tmp
+    mkdir -p "$KYLIN_HOME"/logs
+    hdfs dfs -mkdir -p /kylin
+    run_command "Prepare sample data" "$KYLIN_HOME"/bin/sample.sh
+fi
+
+
+run_command "Start Kylin Instance" "$KYLIN_HOME"/bin/kylin.sh -v start
+
+check_and_monitor_status "Check Env Script" ls $KYLIN_HOME/bin/check-env-bypass
+check_and_monitor_status "Kylin Instance" cat "$KYLIN_HOME"/logs/kylin.log | 
grep -c "Initialized Spark"
+
+touch $START_FLAG
+echo "Kylin service is already available for you to preview."
+while :
+do
+    sleep 10
+done
diff --git a/src/docker/all_in_one/scripts/install_mysql.sh 
b/build/release/all-in-one-docker/all_in_one/scripts/install_mysql.sh
similarity index 100%
rename from src/docker/all_in_one/scripts/install_mysql.sh
rename to build/release/all-in-one-docker/all_in_one/scripts/install_mysql.sh
diff --git a/build/release/release-pipeline-docker/release-machine/Dockerfile 
b/build/release/release-pipeline-docker/release-machine/Dockerfile_1
similarity index 79%
rename from build/release/release-pipeline-docker/release-machine/Dockerfile
rename to build/release/release-pipeline-docker/release-machine/Dockerfile_1
index 6f2e88f8b8..ecbfc5b825 100644
--- a/build/release/release-pipeline-docker/release-machine/Dockerfile
+++ b/build/release/release-pipeline-docker/release-machine/Dockerfile_1
@@ -23,13 +23,14 @@
 
 FROM openjdk:8-slim
 
-ENV M2_HOME /root/apache-maven-3.8.8
+ENV M2_HOME /root/lib/apache-maven-3.8.8
 ENV PATH $PATH:$M2_HOME/bin
+ENV NVM_DIR="/root/.nvm"
 USER root
 
 WORKDIR /root
 
-# install tools
+# Install tools
 RUN set -eux; \
        apt-get update; \
        apt-get install -y --no-install-recommends lsof wget tar
@@ -46,32 +47,22 @@ RUN set -eux; \
        apt-get update; \
        apt-get install -y --no-install-recommends gcc g++ make bzip2
 
-# install Node JS
-RUN curl -sL https://deb.nodesource.com/setup_12.x | bash - \
-    && apt-get update \
-    && apt-get install -y --no-install-recommends nodejs
-
-# install maven
+# Install maven
 RUN wget 
https://archive.apache.org/dist/maven/maven-3/3.8.8/binaries/apache-maven-3.8.8-bin.tar.gz
 \
     && tar -zxvf apache-maven-3.8.8-bin.tar.gz \
-    && rm -f apache-maven-3.8.8-bin.tar.gz
+    && rm -f apache-maven-3.8.8-bin.tar.gz \
+    && mkdir /root/.m2 \
+    && mkdir /root/lib/ \
+    && mv apache-maven-3.8.8 /root/lib/
 
+# Install jekyll for for kylin website
 RUN apt-get install -y --fix-missing ruby-full
-
 RUN gem install jekyll:2.5.3 jekyll-multiple-languages:1.0.11  rouge:1.10.1 
kramdown:1.17.0
 
-# install Node JS(this new version is for kylin website)
-RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.4/install.sh | 
bash
-
-ENV NVM_DIR="/root/.nvm"
-
-RUN mkdir /root/.m2
-COPY conf/settings.xml /root/.m2/settings.xml
-
-COPY script/entrypoint.sh /root/scripts/entrypoint.sh
-RUN chmod u+x /root/scripts/entrypoint.sh
-
-COPY script/release-publish.sh /root/release-publish.sh
-RUN chmod u+x /root/release-publish.sh
+# Install Node JS for packaging
+RUN curl -sL https://deb.nodesource.com/setup_12.x | bash - \
+    && apt-get update \
+    && apt-get install -y --no-install-recommends nodejs
 
-#ENTRYPOINT ["/root/scripts/entrypoint.sh"]
\ No newline at end of file
+# Used to install Node JS 16.14(this new version is for kylin website)
+RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.4/install.sh | 
bash
\ No newline at end of file
diff --git a/src/docker/all_in_one/build_image.sh 
b/build/release/release-pipeline-docker/release-machine/Dockerfile_2
old mode 100755
new mode 100644
similarity index 55%
rename from src/docker/all_in_one/build_image.sh
rename to build/release/release-pipeline-docker/release-machine/Dockerfile_2
index 058d8b4948..abdb44f632
--- a/src/docker/all_in_one/build_image.sh
+++ b/build/release/release-pipeline-docker/release-machine/Dockerfile_2
@@ -1,5 +1,3 @@
-#!/usr/bin/env bash
-
 #
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
@@ -17,19 +15,13 @@
 # limitations under the License.
 #
 
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-cd ${DIR}
-echo "build image in dir "${DIR}
-
-# TODO: download from the official website
-echo "package kylin in local for building image"
-rm -rf ${DIR}/package/*
-if [[ ! -d ${DIR}/package/ ]]; then
-    mkdir -p ${DIR}/package/
-fi
-${DIR}/../../../build/release/release.sh
-cp ${DIR}/../../../dist/apache-kylin-*.tar.gz ${DIR}/package/
+# This is the Docker Image for Kylin 5 release process
+#
+# For what is release policy and release process, please refer to following 
link:
+# 1. https://www.apache.org/legal/release-policy.html
+# 2. https://infra.apache.org/release-publishing.html
 
-echo "start to build Hadoop docker image"
-docker build -f Dockerfile_hadoop -t hadoop3.2.1-all-in-one-for-kylin5 .
-docker build -f Dockerfile_kylin -t apachekylin/apache-kylin-standalone:5.0.0 .
+FROM release-machine:5.0-base
+COPY conf/settings.xml /root/.m2/settings.xml
+COPY release-publish.sh /root/release-publish.sh
+RUN chmod u+x /root/release-publish.sh
\ No newline at end of file
diff --git a/build/release/release-pipeline-docker/README.md 
b/build/release/release-pipeline-docker/release-machine/README.md
similarity index 85%
rename from build/release/release-pipeline-docker/README.md
rename to build/release/release-pipeline-docker/release-machine/README.md
index 0a4289e73d..0fc50ac49f 100644
--- a/build/release/release-pipeline-docker/README.md
+++ b/build/release/release-pipeline-docker/release-machine/README.md
@@ -5,6 +5,8 @@ for [release 
manager](https://infra.apache.org/release-publishing.html#releasema
 to complete [apache release 
process](https://www.apache.org/legal/release-policy.html) 
 and obey [apache release 
policy](https://www.apache.org/legal/release-policy.html).
 
+For maven artifacts, please check 
[publishing-maven-artifacts](https://infra.apache.org/publishing-maven-artifacts.html).
+
 Some source code are modified from [apache spark 
release](https://github.com/apache/spark/tree/master/dev/create-release) 
scripts.
 Kylin project use 
[maven-release-plugin](https://maven.apache.org/maven-release/maven-release-plugin/)
 to release source code and maven artifacts
 
@@ -24,27 +26,12 @@ It also provided a way to publish documentation for Kylin 5.
 
 -[ ] Update `CURRENT_KYLIN_VERSION` in `KylinVersion.java` .
 
-### Step 1 : Configure Basic Info and Copy GPG Private Key
-
--  Start docker container
-
 ```bash
-docker run --name release-machine --hostname release-machine -i -t 
apachekylin/release-machine:latest  bash
-# docker ps -f name=release-machine
+docker start release-machine-1 \
+    -p 4040:4040 \
+    bash 
 ```
 
-- Copy GPG Private Key from your laptop into container
-
-```bash
-docker cp ~/XXX.private.key release-machine:/root
-```
-
-### Step 2 : Configure setenv.sh
-
-- Set correct values for all variables in `/root/scripts/setenv.sh`, such as 
**ASF_PASSWORD** and **GPG_PASSPHRASE**.
-
-#### Variables in setenv.sh
-
 | Name            | Comment                                                    
                                                                                
                                      |
 
|-----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
 | ASF_USERNAME    | ID of Apache Account                                       
                                                                                
                                      |
@@ -60,21 +47,10 @@ docker cp ~/XXX.private.key release-machine:/root
 
 Otherwise, you will fail in maven-deploy-plugin with http 401 error.
 
-### Step 3 : Install GPG Private Key
-
-```bash
-gpg --import XXX.private.key
-```
-
-```bash
-gpg --list-sigs {NAME of Your Key}
-```
-
 ### Step 4 : Publish Release Candidate
 
 ```bash
-export RELEASE_STEP=publish-rc
-bash release-publish.sh
+bash release-publish.sh publish-snapshot
 ```
 
 ### Step 5 : Vote for Release Candidate
@@ -84,8 +60,7 @@ bash release-publish.sh
 ### Step 6 : Publish Release Candidate
 
 ```bash
-export RELEASE_STEP=publish
-bash release-publish.sh
+bash release-publish.sh publish-release
 ```
 
 - Prepare vote template for announcement
diff --git 
a/build/release/release-pipeline-docker/release-machine/create-release-machine.sh
 
b/build/release/release-pipeline-docker/release-machine/create-release-machine.sh
index b001e6fa6a..497944cc6b 100644
--- 
a/build/release/release-pipeline-docker/release-machine/create-release-machine.sh
+++ 
b/build/release/release-pipeline-docker/release-machine/create-release-machine.sh
@@ -20,12 +20,17 @@
 #  */
 #
 
-docker build -f Dockerfile -t release-machine:latest .
+docker image rm release-machine:latest
+build_status='1'
+while [ "$build_status" != "0" ]
+do
+  echo "Build release-machine from $(date)"
+  docker build -f Dockerfile_1 -t release-machine:5.0-base .
+  build_status="$?"
+done
+
+docker build -f Dockerfile_2 -t release-machine:latest .
 
-if [[ "$?" == "0" ]]; then
-    echo "Docker image build succeed"
-    docker image tag release-machine:latest apachekylin/release-machine:latest
-fi
 #docker login -u xiaoxiangyu
 #docker push apachekylin/release-machine:latest
 
diff --git 
a/build/release/release-pipeline-docker/release-machine/script/entrypoint.sh 
b/build/release/release-pipeline-docker/release-machine/entrypoint.sh
similarity index 100%
rename from 
build/release/release-pipeline-docker/release-machine/script/entrypoint.sh
rename to build/release/release-pipeline-docker/release-machine/entrypoint.sh
diff --git 
a/build/release/release-pipeline-docker/release-machine/script/release-publish.sh
 b/build/release/release-pipeline-docker/release-machine/release-publish.sh
similarity index 63%
rename from 
build/release/release-pipeline-docker/release-machine/script/release-publish.sh
rename to 
build/release/release-pipeline-docker/release-machine/release-publish.sh
index 8538ecb320..615e0891ef 100644
--- 
a/build/release/release-pipeline-docker/release-machine/script/release-publish.sh
+++ b/build/release/release-pipeline-docker/release-machine/release-publish.sh
@@ -36,12 +36,9 @@ Top level targets are
   // package:
   publish-snapshot: Publish snapshot release to Apache snapshots
   publish-release: Publish a release to Apache release repo
-  reset:
-  preview-site:
-  publish-site:
-
-All other inputs are environment variables.
-
+  reset: Clean when after a fail release attempt
+  preview-site: Build Kylin website on docker, so you can check/preview 
website in localhost:7070
+  publish-site: After checked, you can upload content to apache.org
 EOF
   exit 0
 }
@@ -53,14 +50,38 @@ function info {
 EOF
 }
 
+function ask_confirm {
+  if [ "$MODE" = "batch" ] ;then
+    return 0
+  fi
+  read -p "$1 Will you continue? [y/n] " ANSWER
+  if [ "$ANSWER" != "y" ]; then
+    info "Exiting."
+    exit 1
+  fi
+}
+
 if [ $# -eq 0 ]; then
   exit_with_usage
 else
-  RELEASE_STEP=$1
-  info "Running step : $RELEASE_STEP"
+  if [ "$1" = "reset" ] || \
+      [ "$1" = "publish-snapshot" ] || \
+      [ "$1" = "publish-release" ] || \
+      [ "$1" = "preview-site" ] || \
+      [ "$1" = "publish-site" ];
+  then
+      ask_confirm "You are running step [$1] as release manager"
+      RELEASE_STEP=$1
+  else
+      echo "Your input $1 is not valid."
+      exit_with_usage
+  fi
 fi
 
 function read_config {
+  if [ "$MODE" = "batch" ] ;then
+    return 0
+  fi
   local PROMPT="$1"
   local DEFAULT="$2"
   local REPLY=
@@ -73,14 +94,6 @@ function read_config {
   echo "$RETVAL"
 }
 
-function ask_confirm {
-  read -p "$1. Will you continue? [y/n] " ANSWER
-  if [ "$ANSWER" != "y" ]; then
-    info "Exiting."
-    exit 1
-  fi
-}
-
 function run_command {
   local BANNER="$1"
   shift 1
@@ -98,39 +111,54 @@ function run_command {
   fi
 }
 
-ASF_USERNAME=$(read_config "Your apache id?" "xxyu")
-GIT_USERNAME=$(read_config "Your full name(used as author of git commit)?" 
"Release manager")
-ASF_PASSWORD=$(read_config "Your apache password?" "")
+function switch_node_for_packaging {
+  # nvm use system
+  node -v # 12.22.12
+}
+
+function switch_node_for_website {
+  # nvm install 16.14.2
+  # nvm use 16.14.2
+  node -v # 16.14.2
+}
+
+ASF_USERNAME=$(read_config "Your apache id?" "$ASF_USERNAME")
+GIT_USERNAME=$(read_config "Your full name(used as author of git commit)?" 
"$GIT_USERNAME")
+ASF_PASSWORD=$(read_config "Your apache password?" "$ASF_PASSWORD")
 GIT_EMAIL=$ASF_USERNAME"@apache.org"
-GPG_KEY=$(read_config "GPG key of you(used to sign release candidate)?" 
"$GIT_EMAIL")
-GPG_PASSPHRASE=$(read_config "PASSPHRASE for your private GPG key?" "")
+GPG_KEY=$(read_config "GPG key of you(used to sign release candidate)?" 
"$GPG_KEY")
+GPG_PASSPHRASE=$(read_config "PASSPHRASE for your private GPG key?" 
"$GPG_PASSPHRASE")
 
-GIT_BRANCH=$(read_config "Git branch for release?" "kylin5")
-RELEASE_VERSION=$(read_config "Which version are you going to release?" "5.0")
-NEXT_RELEASE_VERSION=$(read_config "Which version is the next development 
version?" "5.0")
-RC_NUMBER="rc"$(read_config "Number for release candidate?" "1")
+GIT_BRANCH=$(read_config "Git branch for release?" "$GIT_BRANCH")
+RELEASE_VERSION=$(read_config "Which version are you going to release?" 
"$RELEASE_VERSION")
+NEXT_RELEASE_VERSION=$(read_config "Which version is the next development 
version?" "$NEXT_RELEASE_VERSION")
+RC_NUMBER="rc"$(read_config "Number for release candidate?" "$RC_NUMBER")
 
-export working_dir=/root/apachekylin-release-folder
+export working_dir=/root/release-folder
 source_code_folder=$working_dir/source/kylin
+svn_folder=$working_dir/svn
 packaging_folder=$source_code_folder/target/checkout
-svn_stage_folder=$working_dir/svn/stage_repo
+svn_stage_folder=$svn_folder/dev
 rc_name=apache-kylin-"${RELEASE_VERSION}"-${RC_NUMBER}
 release_candidate_folder=$svn_stage_folder/$rc_name
 
 branch_doc_1=document
 branch_doc_2=doc5.0
-doc_preview=preview
-document_folder=$working_dir/document/src
-document_folder_elder=$document_folder/$branch_doc_1
-document_folder_newer=$document_folder/$branch_doc_2
-document_folder_preview=$document_folder/$doc_preview
+document_folder=$working_dir/document
+document_folder_src=$document_folder/src
+document_folder_elder=$document_folder_src/$branch_doc_1
+document_folder_newer=$document_folder_src/$branch_doc_2
+document_folder_svn=$svn_folder/site
+
+LOG=$working_dir/build.log
 
 ASF_KYLIN_REPO="gitbox.apache.org/repos/asf/kylin.git"
 # GITHUB_REPO_URL=${GIT_REPO_URL:-https://github.com/apache/kylin.git}
 RELEASE_STAGING_LOCATION="https://dist.apache.org/repos/dist/dev/kylin";
 RELEASE_LOCATION="https://dist.apache.org/repos/dist/release/kylin";
+WEBSITE_SVN="https://svn.apache.org/repos/asf/kylin/site";
 
-GPG="gpg -u $GPG_KEY --no-tty --batch --pinentry-mode loopback"
+GPG_COMMAND="gpg -u $GPG_KEY --no-tty --batch --pinentry-mode loopback"
 
 if [[ -z "$ASF_PASSWORD" ]]; then
   echo 'The environment variable ASF_PASSWORD is not set. Enter the password.'
@@ -165,34 +193,37 @@ function reset_release {
 function prepare_release {
     info "Configuration and Clone Code"
     git config --global user.name "${GIT_USERNAME}"
-    git config --global user.email "${ASF_USERNAME}"@apache.org
+    git config --global user.email ${GIT_EMAIL}
     git config --global user.password ${ASF_PASSWORD}
     mkdir -p $working_dir
     cd $working_dir
 
     if [ ! -d "${source_code_folder}" ]
     then
+        mkdir -p ${source_code_folder}
         info "Clone source code to ${source_code_folder} ."
-        run_command "Clone Gitbox" git clone 
"https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_KYLIN_REPO"; -b "$GIT_BRANCH"
+        run_command "Clone Gitbox" git clone 
"https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_KYLIN_REPO"; -b "$GIT_BRANCH" 
${source_code_folder}
     fi
 
     if [ ! -d "${release_candidate_folder}" ]
     then
-        info "Clone svn working dir to $working_dir ."
+        mkdir -p ${release_candidate_folder}
+        info "Clone svn working dir to $svn_folder ."
         run_command "Clone ASF SVN" svn co $RELEASE_STAGING_LOCATION 
$svn_stage_folder
     fi
+    switch_node_for_packaging
     return 0
 }
 
 function publish_snapshot_source {
     info "Publish source code, maven artifact, git tag for release candidate"
-    # Go to source directory
     cd ${source_code_folder}
 
     tag_exist=`git tag --list | grep kylin-"${RELEASE_VERSION}" | wc -l`
     if [[ $tag_exist != 0 ]]; then
-       echo "Delete local tag"
+       echo "Delete local and remote tag"
        git tag --delete kylin-"${RELEASE_VERSION}"
+       git push --delete origin kylin-"${RELEASE_VERSION}"
     fi
 
     ## Prepare tag & source tarball & upload maven artifact
@@ -200,13 +231,17 @@ function publish_snapshot_source {
     # 
https://maven.apache.org/maven-release/maven-release-plugin/examples/prepare-release.html
     # https://infra.apache.org/publishing-maven-artifacts.html
     # Use `mvn release:clean`  if you want to prepare again
-    run_command "Maven Release Prepare" mvn -DskipTests 
-DreleaseVersion="${RELEASE_VERSION}" \
-      -DdevelopmentVersion="${NEXT_RELEASE_VERSION}"-SNAPSHOT 
-Papache-release,nexus -DdryRun=${DRY_RUN} \
-      -Darguments="-Dmaven.javadoc.skip=true 
-Dgpg.passphrase=${GPG_PASSPHRASE} -DskipTests" \
-      release:prepare
-    run_command "Maven Release Perform" mvn -DskipTests -Papache-release,nexus 
\
-      -Darguments="-Dmaven.javadoc.skip=true 
-Dgpg.passphrase=${GPG_PASSPHRASE} -DskipTests" \
-      release:perform
+    maven_options="-DskipTests \
+        -DreleaseVersion=${RELEASE_VERSION} \
+        -DdevelopmentVersion=${NEXT_RELEASE_VERSION}-SNAPSHOT \
+        -Papache-release,nexus \
+        -DdryRun=${DRY_RUN} \
+        -Dmaven.javadoc.skip=true \
+        -Dgpg.passphrase=${GPG_PASSPHRASE} \
+        -DgpgArguments=--no-tty --batch --pinentry-mode loopback \
+        -Dkeyname=$GPG_KEY"
+    run_command "Maven Release Prepare" mvn "${maven_options}" release:prepare
+    run_command "Maven Release Perform" mvn "${maven_options}" release:perform
 
     # Create a directory for this release candidate
     mkdir -p ${release_candidate_folder}
@@ -229,15 +264,15 @@ function publish_snapshot_package {
     ## Sign binary
     echo "publish-release-candidate binary"
     cd "${release_candidate_folder}"
-    run_command "Sign binary" gpg --armor --output 
apache-kylin-"${RELEASE_VERSION}"-bin.tar.gz.asc --detach-sig 
apache-kylin-${RELEASE_VERSION}-bin.tar.gz
-    shasum -a 256 apache-kylin-"${RELEASE_VERSION}"-bin.tar.gz > 
apache-kylin-${RELEASE_VERSION}-bin.tar.gz.sha256
+    run_command "Sign binary" echo $GPG_PASSPHRASE | $GPG_COMMAND 
--passphrase-fd 0 --armour --output 
apache-kylin-"${RELEASE_VERSION}"-bin.tar.gz.asc --detach-sig 
apache-kylin-${RELEASE_VERSION}-bin.tar.gz
+    shasum -a 512 apache-kylin-"${RELEASE_VERSION}"-bin.tar.gz > 
apache-kylin-${RELEASE_VERSION}-bin.tar.gz.sha512
 
 
     ## Upload to svn repository
     ask_confirm "You are going to upload rc, are you sure you have the 
permissions?"
     cd ${svn_stage_folder}
     svn add ${rc_name}
-    run_command "Publish release candidate dir" svn commit -m 'Check in 
release artifacts for '${rc_name}
+    run_command "Publish release candidate dir" svn commit --password 
${ASF_PASSWORD} -m 'Check in release artifacts for '${rc_name}
     echo "Please check $RELEASE_STAGING_LOCATION"
     return 0
 }
@@ -248,29 +283,6 @@ function publish_release {
     return 0
 }
 
-####################################################
-####################################################
-# Script running start from here
-
-prepare_release
-
-if [[ "$RELEASE_STEP" == "reset" ]]; then
-    reset_release
-fi
-
-if [[ "$RELEASE_STEP" == "publish-snapshot" ]]; then
-    publish_snapshot_source
-    publish_snapshot_package
-fi
-
-if [[ "$RELEASE_STEP" == "publish-release" ]]; then
-    publish_release
-fi
-
-if [[ "$RELEASE_STEP" == "preview-site" ]]; then
-    preview_site
-fi
-
 
 ####################################################
 ####################################################
@@ -278,13 +290,13 @@ fi
 
 function preview_site() {
     info "Prepare website"
-    if [ ! -d "${document_folder}" ]; then
-        mkdir -p $document_folder
-        run_command "Install nodejs for docusaurus" nvm install 16.14
+    if [ ! -d "${document_folder_src}" ]; then
+        mkdir -p $document_folder_src
+        run_command "Install nodejs for docusaurus" switch_node_for_website
     fi
-    cd $document_folder
+    cd $document_folder_src
     if [ ! -d "${document_folder_elder}" ]; then
-        run_command "Clone website for kylin4" git clone --branch 
$branch_doc_1 "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_KYLIN_REPO";
+        run_command "Clone website for kylin4" git clone --branch 
$branch_doc_1 "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_KYLIN_REPO"; 
$branch_doc_1
     else
         cd ${document_folder_elder}
         git reset --hard HEAD~4
@@ -292,68 +304,70 @@ function preview_site() {
     fi
 
     if [ ! -d "${document_folder_newer}" ]; then
-        run_command "Clone website for kylin5" git clone --branch 
$branch_doc_2 "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_KYLIN_REPO";
+        run_command "Clone website for kylin5" git clone --branch 
$branch_doc_2 "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_KYLIN_REPO"; 
$branch_doc_2
     else
         cd ${document_folder_newer}
         git reset --hard HEAD~4
         git pull -r origin $branch_doc_2
     fi
 
-    if [ ! -d "${document_folder_preview}" ]; then
-        mkdir ${document_folder_preview}
-    else
-        rm -rf ${document_folder_preview:?}/*
+    if [ ! -d "${document_folder_svn}" ]; then
+        mkdir ${document_folder_svn}
+        run_command "Checkout website files from svn" svn co $WEBSITE_SVN 
${document_folder_svn}
     fi
 
     info "Build website"
 
     # Build inner website
     cd ${document_folder_newer}/website
-    nvm use 16.14
+    # nvm use 16.14
     run_command "Install node modules" npm install
     run_command "Build inner website" npm run build
     document_folder_newer_build=${document_folder_newer}/website/build
 
     # Build outer website
     cd ${document_folder_elder}/website
-    run_command "Build outer website" jekyll b -s website -d 
${document_folder_preview}
+    run_command "Build outer website" jekyll build >>$LOG 2>&1
+    document_folder_elder_build=${document_folder_elder}/_site
 
     # Merge two websites
-    rm -rf ${document_folder_preview}/5.0
-    mv ${document_folder_newer_build} ${document_folder_preview}/5.0
+    rm -rf ${document_folder_elder_build}/5.0
+    mv ${document_folder_newer_build} ${document_folder_elder_build}/5.0
+    info "Build website should be done, and stored in 
${document_folder_elder_build} ."
 
     run_command "Preview merged website" jekyll s -P 7070
     info "Website could be previewed at localhost:7070"
-    return 0
 }
 
 function publish_site() {
     info "Publish website"
-    svn update ${document_folder_preview}
-    svn add --force ${document_folder_preview}/* --auto-props --parents 
--depth infinity -q
-    svn status ${document_folder_preview}
-    if [ `svn status ${document_folder_preview} | wc -l ` != 1 ];
-        then MSG=`git log --format=oneline | head -1`;svn commit 
${document_folder_preview} -m "${MSG:41}";
+    svn update ${document_folder_svn}
+    svn add --force ${document_folder_svn}/* --auto-props --parents --depth 
infinity -q
+    svn status ${document_folder_svn}
+    if [ `svn status ${document_folder_svn} | wc -l ` != 1 ];
+        then MSG=`git log --format=oneline | head -1`
+        svn commit --password ${ASF_PASSWORD} ${document_folder_svn} -m 
"${MSG:41}"
     else
         echo "No need to refresh website.";
     fi
-    return 0
 }
 
 
 ####################################################
 ####################################################
-# Following is for GPG Key
+# Script running start from here
 
-function fcreate_secure {
-  local FPATH="$1"
-  rm -f "$FPATH"
-  touch "$FPATH"
-  chmod 600 "$FPATH"
-}
+prepare_release
 
-function import_gpg_key() {
-  GPG="gpg --no-tty --batch"
-  echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --import "$SELF/gpg.key"
-  return 0
-}
\ No newline at end of file
+if [[ "$RELEASE_STEP" == "reset" ]]; then
+    reset_release
+elif [[ "$RELEASE_STEP" == "publish-snapshot" ]]; then
+    publish_snapshot_source
+    publish_snapshot_package
+elif [[ "$RELEASE_STEP" == "publish-release" ]]; then
+    publish_release
+elif [[ "$RELEASE_STEP" == "preview-site" ]]; then
+    preview_site
+elif [[ "$RELEASE_STEP" == "publish-site" ]]; then
+    publish_site
+fi
\ No newline at end of file
diff --git a/build/sbin/bootstrap.sh b/build/sbin/bootstrap.sh
index 3271198015..0d18cf4bff 100755
--- a/build/sbin/bootstrap.sh
+++ b/build/sbin/bootstrap.sh
@@ -251,7 +251,7 @@ function clearCrontab() {
     touch ${logrotateDir}/cron
     crontab -l | while read line
     do
-        if [[ "$line" == *${logrotateDir}/ke* ]];then
+        if [[ "$line" == *${logrotateDir}/kylin* ]];then
             continue
         fi
         echo "$line" >> ${logrotateDir}/cron
@@ -337,7 +337,7 @@ function startKE(){
 
     PID=`cat ${KYLIN_HOME}/pid`
     CUR_DATE=$(date "+%Y-%m-%d %H:%M:%S")
-    echo $CUR_DATE" new KE process pid is "$PID >> ${KYLIN_HOME}/logs/kylin.log
+    echo $CUR_DATE" new Kylin process pid is "$PID >> 
${KYLIN_HOME}/logs/kylin.log
 
     sh ${KYLIN_HOME}/bin/guardian.sh start
 
diff --git a/build/sbin/guardian-get-process-status.sh 
b/build/sbin/guardian-get-process-status.sh
index b80933e33c..c8e34d8e7b 100644
--- a/build/sbin/guardian-get-process-status.sh
+++ b/build/sbin/guardian-get-process-status.sh
@@ -17,7 +17,7 @@
 # limitations under the License.
 #
 
-## DEFAULT: get ke process status
+## DEFAULT: get kylin process status
 ## return code
 ## 0 process is running
 ## 1 process is stopped
diff --git a/build/sbin/log-rotate-cron.sh b/build/sbin/log-rotate-cron.sh
index 71116485e6..468b4383f8 100644
--- a/build/sbin/log-rotate-cron.sh
+++ b/build/sbin/log-rotate-cron.sh
@@ -19,10 +19,10 @@
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh $@
 
 function addCrontab() {
-    logrotateCmd="${cronExpress} /usr/sbin/logrotate -s ${logrotateDir}/status 
${logrotateDir}/ke > /dev/null 2>&1"
+    logrotateCmd="${cronExpress} /usr/sbin/logrotate -s ${logrotateDir}/status 
${logrotateDir}/kylin > /dev/null 2>&1"
     crontab -l | while read line
     do
-        if [[ "$line" == *${logrotateDir}/ke* ]];then
+        if [[ "$line" == *${logrotateDir}/kylin* ]];then
             continue
         fi
         echo "$line" >> ${logrotateDir}/cron
@@ -37,8 +37,8 @@ function rmCronConf() {
     fi
 }
 
-function creatConf(){
-  cat > ${logrotateDir}/ke <<EOL
+function createConf(){
+  cat > ${logrotateDir}/kylin <<EOL
 ${ERR_LOG} ${OUT_LOG} ${KYLIN_OUT}  {
 size ${file_threshold}M
 rotate ${keep_limit}
diff --git 
a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
 
b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
index 3736e5037e..a8cdcda4c2 100644
--- 
a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
+++ 
b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
@@ -125,7 +125,7 @@ public class NSystemController extends NBasicController {
         return this.aclEvaluate;
     }
 
-    @ApiOperation(value = "dump ke inner metadata responding to system 
kylinconfig")
+    @ApiOperation(value = "dump inner metadata responding to system 
kylinconfig")
     @GetMapping(value = "/metadata/dump")
     @ResponseBody
     public EnvelopeResponse<String> dumpMetadata(@RequestParam(value = 
"dump_path") String dumpPath) throws Exception {
diff --git 
a/src/common-service/src/main/java/org/apache/kylin/rest/cache/RedisCache.java 
b/src/common-service/src/main/java/org/apache/kylin/rest/cache/RedisCache.java
index 8481180789..cf49508eb3 100644
--- 
a/src/common-service/src/main/java/org/apache/kylin/rest/cache/RedisCache.java
+++ 
b/src/common-service/src/main/java/org/apache/kylin/rest/cache/RedisCache.java
@@ -174,7 +174,7 @@ public class RedisCache implements KylinCache {
         }
         logger.info("The 'kylin.cache.redis.cluster-enabled' is {}", 
redisClusterEnabled);
         if (kylinConfig.isRedisClusterEnabled()) {
-            logger.info("ke will use redis cluster");
+            logger.info("will use redis cluster");
             Set<HostAndPort> hosts = Sets.newHashSet();
             for (String hostAndPort : hostAndPorts) {
                 String host = hostAndPort.substring(0, 
hostAndPort.lastIndexOf(":"));
@@ -195,7 +195,7 @@ public class RedisCache implements KylinCache {
             }
             logger.warn("jedis cluster is not support ping");
         } else {
-            logger.info("ke will use redis pool. The redis host ke will 
connect to is {}", hostAndPorts[0]);
+            logger.info("will use redis pool. The redis host will connect to 
is {}", hostAndPorts[0]);
             String host = hostAndPorts[0].substring(0, 
hostAndPorts[0].lastIndexOf(":"));
             int port = 
Integer.parseInt(hostAndPorts[0].substring(hostAndPorts[0].lastIndexOf(":") + 
1));
             JedisPoolConfig config = new JedisPoolConfig();
diff --git 
a/src/core-job/src/main/java/org/apache/kylin/job/runners/FetcherRunner.java 
b/src/core-job/src/main/java/org/apache/kylin/job/runners/FetcherRunner.java
index 0306c382ae..4494992d8c 100644
--- a/src/core-job/src/main/java/org/apache/kylin/job/runners/FetcherRunner.java
+++ b/src/core-job/src/main/java/org/apache/kylin/job/runners/FetcherRunner.java
@@ -223,7 +223,7 @@ public class FetcherRunner extends 
AbstractDefaultSchedulerRunner {
                 jobPool.execute(new JobRunner(nDefaultScheduler, executable, 
this));
                 logger.info("{} scheduled", jobDesc);
             } else {
-                logger.info("memory is not enough, remaining: {} MB , schedule 
job : {}",
+                logger.warn("Memory is not enough, remaining: {} MB , fail to 
schedule job : {}",
                         
NDefaultScheduler.getMemoryRemaining().availablePermits(), 
executable.getDisplayName());
             }
         } catch (Exception ex) {
diff --git a/src/docker/all_in_one/entrypoint.sh 
b/src/docker/all_in_one/entrypoint.sh
deleted file mode 100644
index b2965d7a86..0000000000
--- a/src/docker/all_in_one/entrypoint.sh
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-echo "127.0.0.1 sandbox" >> /etc/hosts
-
-# clean pid files
-rm -f /tmp/*.pid
-
-# start mysql
-service mysql start
-if [ ! -f "/home/kylin/first_run" ]
-then
-    mysql -uroot -p123456 -e "CREATE DATABASE IF NOT EXISTS kylin default 
charset utf8mb4 COLLATE utf8mb4_general_ci;"
-fi
-
-# init schema for hive
-if [ ! -f "/home/kylin/first_run" ]
-then
-    schematool -initSchema -dbType mysql
-fi
-
-# start hdfs
-if [ ! -f "/home/kylin/first_run" ]
-then
-    hdfs namenode -format
-fi
-
-hdfs --daemon start namenode
-hdfs --daemon start datanode
-
-# start yarn
-yarn --daemon start resourcemanager
-yarn --daemon start nodemanager
-
-# start job history server
-mapred --daemon start historyserver
-
-# start zk
-rm -rf /data/zookeeper/*
-rm -f /data/zookeeper/zookeeper_server.pid
-$ZK_HOME/bin/zkServer.sh start
-
-sleep 10s
-mkdir -p $KYLIN_HOME/logs
-
-function check_hdfs_usability() {
-    echo "Checking HDFS's service..."
-    started_hdfs=
-    ((time_left = 60))
-    while ((time_left > 0)); do
-        hdfs dfs -test -d /tmp
-        started_hdfs=$?
-        if [[ $started_hdfs -eq 0 ]]; then
-            break
-        fi
-        sleep 5
-        ((timeLeft -= 5))
-    done
-    if [[ $started_hdfs -eq 0 ]]; then
-        echo "HDFS's service started..."
-    else
-        echo "ERROR: Check HDFS's service failed, please check the status of 
your cluster"
-    fi
-}
-
-if [ ! -f "/home/kylin/first_run" ]
-then
-    # check hdfs usability first if hdfs service was not started normally
-    check_hdfs_usability
-    hdfs dfs -mkdir -p /kylin
-fi
-
-# create sample data at the first time
-if [ ! -f "/home/kylin/first_run" ]
-then
-    $KYLIN_HOME/bin/sample.sh >> ${KYLIN_HOME}/logs/kylin-verbose.log 2>&1
-fi
-
-# start kylin
-$KYLIN_HOME/bin/kylin.sh -v start >> ${KYLIN_HOME}/logs/kylin-verbose.log 2>&1
-
-
-
-touch /home/kylin/first_run
-
-while :
-do
-    sleep 10
-done
diff --git a/src/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java 
b/src/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
index a061de2ab7..a795f2ced6 100644
--- a/src/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
+++ b/src/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
@@ -119,7 +119,7 @@ public class KylinResultSet extends AvaticaResultSet {
         try {
             result = client.executeQuery(sql, params, paramValues, 
queryToggles, queryId);
         } catch (IOException e) {
-            logger.error("Query KE failed: ", e);
+            logger.error("Query failed: ", e);
             throw new SQLException(e);
         }
 
diff --git 
a/src/tool/src/main/java/org/apache/kylin/tool/StreamingJobDiagInfoTool.java 
b/src/tool/src/main/java/org/apache/kylin/tool/StreamingJobDiagInfoTool.java
index aa9e67a307..1bb8d792ac 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/StreamingJobDiagInfoTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/StreamingJobDiagInfoTool.java
@@ -66,18 +66,18 @@ public class StreamingJobDiagInfoTool extends 
AbstractInfoExtractorTool {
     @SuppressWarnings("static-access")
     private static final Option OPTION_STREAMING_INCLUDE_CONF = 
OptionBuilder.getInstance().withArgName("includeConf")
             .hasArg().isRequired(false)
-            .withDescription("Specify whether to include ke conf files to 
extract. Default true.")
+            .withDescription("Specify whether to include conf files to 
extract. Default true.")
             .create("includeConf");
 
     @SuppressWarnings("static-access")
     private static final Option OPTION_STREAMING_META = 
OptionBuilder.getInstance().withArgName("includeMeta").hasArg()
-            .isRequired(false).withDescription("Specify whether to include ke 
metadata to extract. Default true.")
+            .isRequired(false).withDescription("Specify whether to include 
metadata to extract. Default true.")
             .create("includeMeta");
 
     @SuppressWarnings("static-access")
     private static final Option OPTION_STREAMING_AUDIT_LOG = 
OptionBuilder.getInstance().withArgName("includeAuditLog")
             .hasArg().isRequired(false)
-            .withDescription("Specify whether to include ke auditLog to 
extract. Default true.")
+            .withDescription("Specify whether to include auditLog to extract. 
Default true.")
             .create("includeAuditLog");
 
     private static final String OPT_STREAMING_JOB = "-job";
diff --git 
a/src/tool/src/main/java/org/apache/kylin/tool/daemon/KapGuardian.java 
b/src/tool/src/main/java/org/apache/kylin/tool/daemon/KapGuardian.java
index 767a31b201..5b12c670f1 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/daemon/KapGuardian.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/daemon/KapGuardian.java
@@ -79,7 +79,7 @@ public class KapGuardian {
             kapGuardian.start();
 
             Runtime.getRuntime().addShutdownHook(new Thread(() -> {
-                logger.info("Guardian Process of KE instance port[{}] 
KYLIN_HOME[{}] stopped",
+                logger.info("Guardian Process of instance port[{}] 
KYLIN_HOME[{}] stopped",
                         kapGuardian.getServerPort(), 
kapGuardian.getKylinHome());
                 kapGuardian.stop();
             }));
diff --git 
a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
 
b/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
index 3c02f32866..89d022018d 100644
--- 
a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
+++ 
b/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
@@ -50,14 +50,14 @@ public class KEProcessChecker extends AbstractHealthChecker 
{
             case 0:
                 return new CheckResult(CheckStateEnum.NORMAL);
             case 1:
-                return new CheckResult(CheckStateEnum.SUICIDE, "KE instance is 
normally stopped");
+                return new CheckResult(CheckStateEnum.SUICIDE, "instance is 
normally stopped");
             case -1:
-                return new CheckResult(CheckStateEnum.RESTART, "KE Instance is 
crashed");
+                return new CheckResult(CheckStateEnum.RESTART, "Instance is 
crashed");
             default:
-                return new CheckResult(CheckStateEnum.WARN, "Unknown ke 
process status");
+                return new CheckResult(CheckStateEnum.WARN, "Unknown process 
status");
             }
         } catch (Exception e) {
-            logger.error("Check KE process failed, cmd: {}", cmd, e);
+            logger.error("Check process failed, cmd: {}", cmd, e);
 
             return new CheckResult(CheckStateEnum.WARN,
                     "Execute shell guardian-get-process-status.sh failed. " + 
e.getMessage());
diff --git 
a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
 
b/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
index 9cd4d7f88e..056a0ad144 100644
--- 
a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
+++ 
b/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
@@ -84,7 +84,7 @@ public class KEStatusChecker extends AbstractHealthChecker {
                     setKgSecretKey(null);
                 }
 
-                throw new IllegalStateException("Get KE health status failed: 
" + response.msg);
+                throw new IllegalStateException("Get health status failed: " + 
response.msg);
             }
 
             Status status = response.getData();
@@ -122,7 +122,7 @@ public class KEStatusChecker extends AbstractHealthChecker {
             failCount = 0;
             return new CheckResult(CheckStateEnum.NORMAL);
         } catch (Exception e) {
-            logger.info("Check KE status failed! ", e);
+            logger.info("Check status failed! ", e);
 
             if (++failCount >= getKylinConfig().getGuardianApiFailThreshold()) 
{
                 return new CheckResult(CheckStateEnum.RESTART, 
String.format(Locale.ROOT,

Reply via email to