This is an automated email from the ASF dual-hosted git repository.
xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/kylin5 by this push:
new 06ba9dc5d6 KYLIN-5225 fix wrong name and classpath in shell in
kylin5.0 (#1933)
06ba9dc5d6 is described below
commit 06ba9dc5d62a4802b325e2250178331230c5a058
Author: zznlime <[email protected]>
AuthorDate: Fri Aug 12 09:37:07 2022 +0800
KYLIN-5225 fix wrong name and classpath in shell in kylin5.0 (#1933)
* [Kylin-5225] Fix wrong name and classpath in shell in kylin5.0
* Kylin-5225 remove useless shell upgrade.sh.
---
build/bin/admin-tool.sh | 2 +-
build/bin/check-env.sh | 2 +-
build/bin/diag.sh | 8 +-
build/bin/grafana.sh | 6 +-
build/bin/guardian.sh | 18 +-
build/bin/metastore.sh | 12 +-
build/bin/rollback.sh | 2 +-
build/bin/upgrade.sh | 321 ---------------------
build/sbin/bootstrap.sh | 22 +-
build/sbin/check-1400-java.sh | 2 +-
build/sbin/check-1401-kylin-config.sh | 2 +-
build/sbin/check-1500-ports.sh | 2 +-
build/sbin/check-1700-spark-kystorage.sh | 2 +-
build/sbin/find-working-dir.sh | 4 +-
build/sbin/setenv.sh | 2 +-
build/sbin/spark-test.sh | 16 +-
.../org/apache/kylin/junit/MetadataExtension.java | 2 +-
...ordResetCLI.java => KylinPasswordResetCLI.java} | 4 +-
...CLITest.java => KylinPasswordResetCLITest.java} | 8 +-
19 files changed, 58 insertions(+), 379 deletions(-)
diff --git a/build/bin/admin-tool.sh b/build/bin/admin-tool.sh
index 50c80dc059..980d05e892 100644
--- a/build/bin/admin-tool.sh
+++ b/build/bin/admin-tool.sh
@@ -45,7 +45,7 @@ if [[ "$1" == "admin-password-reset" ]]; then
exit 1
fi
- ${KYLIN_HOME}/bin/kylin.sh
org.apache.kylin.tool.security.KapPasswordResetCLI
+ ${KYLIN_HOME}/bin/kylin.sh
org.apache.kylin.tool.security.KylinPasswordResetCLI
ret=$?
printAdminPasswordResetResult ${ret}
else
diff --git a/build/bin/check-env.sh b/build/bin/check-env.sh
index 78ea6109d2..2cbd764a9f 100755
--- a/build/bin/check-env.sh
+++ b/build/bin/check-env.sh
@@ -41,7 +41,7 @@ if [[ "$CHECKENV_ING" == "" ]]; then
if [[ "$1" != "if-not-yet" || ! -f ${BYPASS} ]]; then
echo ""
- echo `setColor 33 "Kyligence Enterprise is checking installation
environment, log is at ${LOG}"`
+ echo `setColor 33 "Kylin is checking installation environment, log is
at ${LOG}"`
echo ""
rm -rf ${KYLIN_HOME}/logs/tmp
diff --git a/build/bin/diag.sh b/build/bin/diag.sh
index 26ffaf3e6a..9f18320ce0 100755
--- a/build/bin/diag.sh
+++ b/build/bin/diag.sh
@@ -122,11 +122,11 @@ if [[ ${INCLUDE_AUDIT_LOG} == "false" ]]; then
fi
if [[ ${DIAG_OPTS} == *" -job"* ]]; then
- runTool io.kyligence.kap.tool.JobDiagInfoCLI ${DIAG_OPTS}
+ runTool org.apache.kylin.tool.JobDiagInfoCLI ${DIAG_OPTS}
elif [[ ${DIAG_OPTS} == *" -streamingJob"* ]]; then
- runTool io.kyligence.kap.tool.StreamingJobDiagInfoCLI ${DIAG_OPTS}
+ runTool org.apache.kylin.tool.StreamingJobDiagInfoCLI ${DIAG_OPTS}
elif [[ ${DIAG_OPTS} == *" -query"* ]]; then
- runTool io.kyligence.kap.tool.QueryDiagInfoCLI ${DIAG_OPTS}
+ runTool org.apache.kylin.tool.QueryDiagInfoCLI ${DIAG_OPTS}
else
- runTool io.kyligence.kap.tool.DiagClientCLI ${DIAG_OPTS}
+ runTool org.apache.kylin.tool.DiagClientCLI ${DIAG_OPTS}
fi
\ No newline at end of file
diff --git a/build/bin/grafana.sh b/build/bin/grafana.sh
index 5c28a94c62..3dc929eeac 100755
--- a/build/bin/grafana.sh
+++ b/build/bin/grafana.sh
@@ -54,8 +54,8 @@ function startGrafana(){
influxdb_unsafe_ssl_enabled=`$KYLIN_HOME/bin/get-properties.sh
kylin.influxdb.https.unsafe-ssl.enabled`
influxdb_unsafe_ssl_enabled=$([[ "${influxdb_https_enabled}" == "true" ]]
&& [[ "${influxdb_unsafe_ssl_enabled}" == "true" || -z
"${influxdb_unsafe_ssl_enabled}" ]] && echo "true" || echo "false")
- export KE_METRICS_DATABASE=${metadata_url_prefix}_${metrics_db_suffix}
- export
KE_METRICS_DAILY_DATABASE=${metadata_url_prefix}_${metrics_daily_db_suffix}
+ export KYLIN_METRICS_DATABASE=${metadata_url_prefix}_${metrics_db_suffix}
+ export
KYLIN_METRICS_DAILY_DATABASE=${metadata_url_prefix}_${metrics_daily_db_suffix}
export INFLUXDB_PROTOCOL=$([[ "$influxdb_https_enabled" == "true" ]] &&
echo "https" || echo "http")
export INFLUXDB_ADDRESS=`$KYLIN_HOME/bin/get-properties.sh
kylin.influxdb.address`
export INFLUXDB_USERNAME=`$KYLIN_HOME/bin/get-properties.sh
kylin.influxdb.username`
@@ -67,7 +67,7 @@ function startGrafana(){
echo "Influxdb Connect Protocol: $INFLUXDB_PROTOCOL"
echo "Influxdb Address: $INFLUXDB_ADDRESS"
- echo "Metrics Database: $KE_METRICS_DATABASE"
+ echo "Metrics Database: $KYLIN_METRICS_DATABASE"
if [[ -f "${KYLIN_HOME}/conf/grafana.ini" ]]; then
nohup bin/grafana-server --config ${KYLIN_HOME}/conf/grafana.ini web >
/dev/null 2>&1 &
diff --git a/build/bin/guardian.sh b/build/bin/guardian.sh
index 18c314dbe4..57a1c620c7 100755
--- a/build/bin/guardian.sh
+++ b/build/bin/guardian.sh
@@ -29,7 +29,7 @@ function startKG() {
mkdir -p ${KYLIN_HOME}/logs
- echo `date "${time_format} "`"Starting KE guardian process..."
+ echo `date "${time_format} "`"Starting Kylin guardian process..."
### lock the start process
LOCK_NAME="$KYLIN_HOME/bin/kg-start.lock"
@@ -41,13 +41,13 @@ function startKG() {
if [[ -f ${KGID_FILE} ]]; then
PID=`cat ${KGID_FILE}`
if ps -p ${PID} > /dev/null; then
- quit "KE guardian process is running, stop it first"
+ quit "Kylin guardian process is running, stop it first"
fi
fi
KE_PID_FILE=${KYLIN_HOME}/pid
if [[ ! -f ${KE_PID_FILE} ]]; then
- quit "Kyligence Enterprise is not running, will not start guardian
process"
+ quit "Kylin is not running, will not start guardian process"
fi
if [[ -f ${KYLIN_HOME}/conf/kylin-guardian-log4j.xml ]]; then
@@ -69,7 +69,7 @@ function startKG() {
PID=`cat ${KYLIN_HOME}/kgid`
echo `date "${time_format} "`" new guardian process pid is "${PID} >>
${KYLIN_HOME}/logs/guardian.log
- echo `date "${time_format} "`"KE guardian process is started"
+ echo `date "${time_format} "`"Kylin guardian process is started"
echo `date "${time_format} "`"Check log in ${KYLIN_HOME}/logs/guardian.log"
### Removing lock
@@ -87,7 +87,7 @@ function stopKG() {
return 0
fi
- echo `date "${time_format} "`"Stopping KE guardian process..."
+ echo `date "${time_format} "`"Stopping Kylin guardian process..."
PID_FILE=${KYLIN_HOME}/kgid
@@ -114,7 +114,7 @@ function stopKG() {
fi
fi
- echo `date "${time_format} "`"KE guardian process is not running"
+ echo `date "${time_format} "`"Kylin guardian process is not running"
return 1
}
@@ -123,9 +123,9 @@ if [[ $1 == "start" ]]; then
elif [[ $1 == "stop" ]]; then
stopKG
elif [[ $1 == "kill" ]]; then
- echo `date "${time_format} "`"Killing Kyligence Enterprise, caused by OOM!"
+ echo `date "${time_format} "`"Killing Kylin, caused by OOM!"
- # stop KE
+ # stop Kylin
PID_FILE=${KYLIN_HOME}/pid
if [[ -f ${PID_FILE} ]]; then
PID=`cat ${PID_FILE}`
@@ -147,7 +147,7 @@ elif [[ $1 == "kill" ]]; then
exit 0
fi
fi
- quit `date "${time_format} "`"Kyligence Enterprise is not running"
+ quit `date "${time_format} "`"Kylin is not running"
else
quit "Usage: 'guardian.sh start' or 'guardian.sh stop' or 'guardian.sh
kill'"
fi
\ No newline at end of file
diff --git a/build/bin/metastore.sh b/build/bin/metastore.sh
index 0b46474f57..05c998eae1 100755
--- a/build/bin/metastore.sh
+++ b/build/bin/metastore.sh
@@ -78,7 +78,7 @@ function check_path_empty_dir() {
}
function turn_on_maintain_mode() {
- ${KYLIN_HOME}/bin/kylin.sh io.kyligence.kap.tool.MaintainModeTool -on
-reason 'metastore tool' -hidden-output true
+ ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MaintainModeTool -on
-reason 'metastore tool' -hidden-output true
local ret=$?
if [[ $ret != 0 ]]; then
echo -e "${YELLOW}Enter Maintain Mode failed. Detailed Message is at
\"logs/shell.stderr\".${RESTORE}"
@@ -87,7 +87,7 @@ function turn_on_maintain_mode() {
}
function turn_off_maintain_mode() {
- ${KYLIN_HOME}/bin/kylin.sh io.kyligence.kap.tool.MaintainModeTool -off
-hidden-output true
+ ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MaintainModeTool -off
-hidden-output true
local ret=$?
if [[ $ret != 0 ]]; then
echo -e "${YELLOW}Exit Maintain Mode failed. Detailed Message is at
\"logs/shell.stderr\".${RESTORE}"
@@ -101,7 +101,7 @@ function restore_all() {
check_path_empty_dir ${path}
turn_on_maintain_mode
printEnterMaintainModeResult
- ${KYLIN_HOME}/bin/kylin.sh io.kyligence.kap.tool.MetadataTool -restore
-dir ${path} ${2}
+ ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MetadataTool -restore
-dir ${path} ${2}
printRestoreResult $?
turn_off_maintain_mode
printExitMaintainModeResult
@@ -113,7 +113,7 @@ function restore_project() {
check_path_empty_dir ${path}
turn_on_maintain_mode
printEnterMaintainModeResult
- ${KYLIN_HOME}/bin/kylin.sh io.kyligence.kap.tool.MetadataTool -restore
-dir ${path} -project ${2} ${3}
+ ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MetadataTool -restore
-dir ${path} -project ${2} ${3}
printRestoreResult $?
turn_off_maintain_mode
printExitMaintainModeResult
@@ -131,7 +131,7 @@ then
help
fi
- ${KYLIN_HOME}/bin/kylin.sh io.kyligence.kap.tool.MetadataTool
${BACKUP_OPTS}
+ ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MetadataTool
${BACKUP_OPTS}
printBackupResult $?
elif [ "$1" == "restore" ]
@@ -155,7 +155,7 @@ then
help
fi
BACKUP_OPTS="${BACKUP_OPTS} -project $2"
- ${KYLIN_HOME}/bin/kylin.sh io.kyligence.kap.tool.MetadataTool
${BACKUP_OPTS}
+ ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MetadataTool
${BACKUP_OPTS}
printBackupResult $?
elif [ "$1" == "restore-project" ]
diff --git a/build/bin/rollback.sh b/build/bin/rollback.sh
index 300fcb9361..ecfa417934 100644
--- a/build/bin/rollback.sh
+++ b/build/bin/rollback.sh
@@ -75,7 +75,7 @@ function main() {
source ${KYLIN_HOME}/sbin/prepare-hadoop-conf-dir.sh
export SPARK_HOME=${KYLIN_HOME}/spark
- java -Xms${JAVA_VM_TOOL_XMS} -Xmx${JAVA_VM_TOOL_XMX} -cp
"${kylin_hadoop_conf_dir}:${KYLIN_HOME}/lib/ext/*:${KYLIN_HOME}/server/jars/*:${SPARK_HOME}/jars/*"
io.kyligence.kap.tool.RollbackTool -time "$TIME" $PROJECT_SECTION
$SKIP_CHECK_DATA_SECTION 2>>${ERR_LOG} | tee -a ${OUT_LOG}
+ java -Xms${JAVA_VM_TOOL_XMS} -Xmx${JAVA_VM_TOOL_XMX} -cp
"${kylin_hadoop_conf_dir}:${KYLIN_HOME}/lib/ext/*:${KYLIN_HOME}/server/jars/*:${SPARK_HOME}/jars/*"
org.apache.kylin.tool.RollbackTool -time "$TIME" $PROJECT_SECTION
$SKIP_CHECK_DATA_SECTION 2>>${ERR_LOG} | tee -a ${OUT_LOG}
printBackupResult ${PIPESTATUS[0]}
}
diff --git a/build/bin/upgrade.sh b/build/bin/upgrade.sh
deleted file mode 100644
index cd172b9080..0000000000
--- a/build/bin/upgrade.sh
+++ /dev/null
@@ -1,321 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-function help() {
- echo "Usage: upgrade.sh <OLD_KYLIN_HOME> [--silent]"
- echo
- echo "<OLD_KYLIN_HOME> Specify the old version of the Kyligence
Enterprise"
- echo " installation directory."
- echo
- echo "--silent Optional, don't enter interactive mode,
automatically complete the upgrade."
- exit 1
-}
-
-function info() {
- echo -e "\033[32m$@\033[0m"
-}
-
-function warn() {
- echo -e "\033[33m$@\033[0m"
-}
-
-function error() {
- echo -e "\033[31m$@\033[0m"
-}
-
-function logging() {
- case $1 in
- "info") shift; info $@ ;;
- "warn") shift; warn $@ ;;
- "error") shift; error $@ ;;
- *) echo -e $@ ;;
- esac
-
- (echo -e `date '+%F %H:%M:%S'` $@ >> $upgrade_log)
-}
-
-function fail() {
- error "...................................................[FAIL]"
- error "Upgrade Kyligence Enterprise failed."
- recordKylinUpgradeResult "${START_TIME}" "false" "${NEW_KYLIN_HOME}"
- exit 1
-}
-
-function prompt() {
- if [[ $silent -eq 0 ]]; then
- return 0
- fi
-
- read -p "$@ (y/n) > " answer
- if [[ -z $answer ]] || [[ $answer == "y" ]]; then
- return 0
- else
- return 1
- fi
-}
-
-function check_kylin_query_transformers() {
- query_transformers=""
- if [[ -f ${OLD_KYLIN_HOME}/conf/kylin.properties.override ]]; then
- query_transformers=$(sed -n '/^kylin.query.transformers/p'
${OLD_KYLIN_HOME}/conf/kylin.properties.override)
- fi
-
- if [[ -z "${query_transformers}" && -f
${OLD_KYLIN_HOME}/conf/kylin.properties ]]; then
- query_transformers=$(sed -n '/^kylin.query.transformers/p'
${OLD_KYLIN_HOME}/conf/kylin.properties)
- fi
-
- if [[ -n "${query_transformers}" && (! ${query_transformers} =~
io.kyligence.kap.query.security.RowFilter) ]]; then
- error "Please check the value of the configuration item
[kylin.query.transformers] in kylin.properties or kylin.properties.override,
which needs to include [org.apache.kylin.query.security.RowFilter] class."
- exit 1
- fi
-}
-
-function upgrade() {
-
- check_kylin_query_transformers
-
- # needed by km
- if [[ -f ${OLD_KYLIN_HOME}/pid ]]; then
- PID=`cat ${OLD_KYLIN_HOME}/pid`
- if ps -p $PID > /dev/null; then
- error "Please stop the Kyligence Enterprise during the upgrade
process."
- exit 1
- fi
- fi
-
- if [[ -f ${OLD_KYLIN_HOME}/grafana/pid ]]; then
- PID=`cat ${OLD_KYLIN_HOME}/grafana/pid`
- if ps -p $PID > /dev/null; then
- error "Please stop the Grafana during the upgrade process."
- exit 1
- fi
- fi
-
- echo `date '+%Y-%m-%d %H:%M:%S '`"INFO : [Operation: upgrade]
user:`whoami`, upgrade time:${START_TIME}" >>
${NEW_KYLIN_HOME}/logs/security.log
- origin_version=$(awk '{print $NF}' ${OLD_KYLIN_HOME}/VERSION)
- target_version=$(awk '{print $NF}' ${NEW_KYLIN_HOME}/VERSION)
- echo
- logging "warn" "Upgrade Kyligence Enterprise from ${origin_version} to
${target_version}"
- warn "Old KYLIN_HOME is ${OLD_KYLIN_HOME}, log is at ${upgrade_log}"
- echo
-
- # copy LICENSE
- logging "Copy LICENSE"
- if [[ -f ${OLD_KYLIN_HOME}/LICENSE ]]; then
- if prompt "'${OLD_KYLIN_HOME}/LICENSE' -> '${NEW_KYLIN_HOME}/'"; then
- \cp -vf ${OLD_KYLIN_HOME}/LICENSE ${NEW_KYLIN_HOME}/ >>
$upgrade_log || fail
- fi
- fi
- info "...................................................[DONE]"
-
- # copy kylin conf
- # exclude 'profile*' directory
- logging "Copy Kylin Conf"
- for conf_file in $(ls $OLD_KYLIN_HOME/conf); do
- if prompt "'${OLD_KYLIN_HOME}/conf/${conf_file}' ->
'${NEW_KYLIN_HOME}/conf/'"; then
- if [[ -d ${OLD_KYLIN_HOME}/conf/${conf_file} ]]; then
- # silent copy directory
- \cp -rfv ${OLD_KYLIN_HOME}/conf/${conf_file}
${NEW_KYLIN_HOME}/conf/ >> $upgrade_log || fail
- else
- # need to delete the symbolic link first
- \cp -vf --remove-destination
${OLD_KYLIN_HOME}/conf/${conf_file} ${NEW_KYLIN_HOME}/conf/ >> $upgrade_log ||
fail
- fi
-
- fi
- done
- info "...................................................[DONE]"
-
- # copy ext jars
- # copy ext/mysql*.jar to spark/jars
- logging "Copy Ext Jars"
- for jar_file in $(ls $OLD_KYLIN_HOME/lib/ext); do
- if prompt "'${OLD_KYLIN_HOME}/lib/ext/${jar_file}' ->
'${NEW_KYLIN_HOME}/lib/ext/'"; then
- \cp -vf ${OLD_KYLIN_HOME}/lib/ext/${jar_file}
${NEW_KYLIN_HOME}/lib/ext/ >> $upgrade_log || fail
- fi
-
- if [[ ${jar_file}} == mysql* ]];
- then
- if prompt "'${OLD_KYLIN_HOME}/lib/ext/${jar_file}' ->
'${NEW_KYLIN_HOME}/spark/jars/'"; then
- \cp -vf ${OLD_KYLIN_HOME}/lib/ext/${jar_file}
${NEW_KYLIN_HOME}/spark/jars/ >> $upgrade_log || fail
- fi
- fi
- done
- info "...................................................[DONE]"
-
- # copy mysql connector jar to spark jars dir for apache hadoop platform
- APACHE_HADOOP_CONF_DIR=`${NEW_KYLIN_HOME}/bin/get-properties.sh
kylin.env.apache-hadoop-conf-dir`
- if [ -n "${APACHE_HADOOP_CONF_DIR}" ]; then
- logging "Copy mysql connector jar to spark jars dir for apache hadoop
platform"
- \cp -vf ${OLD_KYLIN_HOME}/lib/ext/mysql-connector-*.jar
${NEW_KYLIN_HOME}/spark/jars/ >> $upgrade_log || fail
- info "...................................................[DONE]"
- fi
-
- # copy the customize directory under old kylin home
- # such as hadoop_conf
- logging "Copy Customize Directory"
- OLDIFS=$IFS
- IFS=$'\n'
- for diff_log in $(diff -qr $OLD_KYLIN_HOME $NEW_KYLIN_HOME); do
- if [[ $diff_log =~ (^Only in ${OLD_KYLIN_HOME}: )(.*) ]]; then
- diff_file=${BASH_REMATCH[2]}
- if [[ $diff_file == "meta_backups" || $diff_file == "appid" ||
$diff_file == "work" ]]; then
- continue
- fi
- if prompt "'${OLD_KYLIN_HOME}/${diff_file}' ->
'${NEW_KYLIN_HOME}/'"; then
- cp -rfv ${OLD_KYLIN_HOME}/${diff_file} ${NEW_KYLIN_HOME}/ >>
$upgrade_log || fail
- fi
- fi
- done
- IFS=$OLDIFS
- info "...................................................[DONE]"
-
- # Ensure krb5.conf underlying hadoop_conf if kerberos enabled
- logging "Copy krb5.conf"
- if [[ -f ${OLD_KYLIN_HOME}/conf/krb5.conf ]]; then
- if [[ -d ${NEW_KYLIN_HOME}/hadoop_conf ]]; then
- cp -rfv ${OLD_KYLIN_HOME}/conf/krb5.conf
${NEW_KYLIN_HOME}/hadoop_conf
- fi
-
- if [[ -d ${NEW_KYLIN_HOME}/write_hadoop_conf ]]; then
- cp -rfv ${OLD_KYLIN_HOME}/conf/krb5.conf
${NEW_KYLIN_HOME}/write_hadoop_conf
- fi
- fi
- info "...................................................[DONE]"
-
- logging "Copy hive*-site.xml for spark3"
- if [[ -f ${OLD_KYLIN_HOME}/hadoop_conf/hive-site.xml ]]; then
- if [[ -d ${NEW_KYLIN_HOME}/hadoop_conf ]]; then
- cp -rfv ${OLD_KYLIN_HOME}/hadoop_conf/hive-site.xml
${NEW_KYLIN_HOME}/hadoop_conf/hiveserver2-site.xml
- cp -rfv ${OLD_KYLIN_HOME}/hadoop_conf/hive-site.xml
${NEW_KYLIN_HOME}/hadoop_conf/hivemetastore-site.xml
- fi
-
- if [[ -d ${NEW_KYLIN_HOME}/write_hadoop_conf ]]; then
- cp -rfv ${OLD_KYLIN_HOME}/write_hadoop_conf/hive-site.xml
${NEW_KYLIN_HOME}/write_hadoop_conf/hiveserver2-site.xml
- cp -rfv ${OLD_KYLIN_HOME}/write_hadoop_conf/hive-site.xml
${NEW_KYLIN_HOME}/write_hadoop_conf/hivemetastore-site.xml
- fi
- fi
- info "...................................................[DONE]"
-
- # copy spark-env for spark3
- logging "Copy spark-env for spark3"
- if [[ -f ${OLD_KYLIN_HOME}/spark/conf/spark-env.sh ]]; then
- if prompt "'${OLD_KYLIN_HOME}/spark/conf/spark-env.sh' ->
'${NEW_KYLIN_HOME}/spark/conf/'"; then
- \cp -vf ${OLD_KYLIN_HOME}/spark/conf/spark-env.sh
${NEW_KYLIN_HOME}/spark/conf/ >> $upgrade_log || fail
- fi
- fi
- info "...................................................[DONE]"
-
- # sed -nE 's/^([#\t ]*)(kylin\..*|kap\..*)/\2/p' kylin.properties | awk
'{kv[substr($0,0,index($0,"=")-1)]=substr($0,index($0,"=")+1)} END{print
kv["kylin.metadata.url"]}'
- logging "Checking Kylin Conf"
-python <<PY
-from __future__ import print_function
-import os
-import sys
-try:
- import commands as cmd
-except ImportError:
- import subprocess as cmd
-
-def printer(msg, *outs):
- for o in outs: print(msg, file=o)
-
-def getProp(prop_file):
- if not os.path.exists(prop_file):
- return dict()
-
- output = cmd.getoutput("sed -nE 's/^([#\\\\t
]*)(kylin\\..*=.*|kap\\..*=.*)/\\\\2/p' %s" % prop_file)
- prop = dict()
- for x in output.split('\n'):
- if x.strip() == '':
- continue
- prop[x[0: x.index('=')]] = x[x.index('=') + 1:]
- return prop
-
-with open('${upgrade_log}', 'a+') as upgrade_log:
- origin_prop = getProp('${NEW_KYLIN_HOME}/conf/kylin.properties')
- prod_prop = dict(getProp('${NEW_KYLIN_HOME}/conf/kylin.properties'),
**getProp('${NEW_KYLIN_HOME}/conf/kylin.properties.override'))
- diffs = set(prod_prop.items()) - set(origin_prop.items())
-
- def logging(msg):
- printer(msg, sys.stdout, upgrade_log)
-
- for diff in diffs:
- logging(diff)
-PY
- info "...................................................[DONE]"
-
- logging "Install"
- if prompt "'${NEW_KYLIN_HOME}' -> '${OLD_KYLIN_HOME}'"; then
- install_dir=$(dirname $OLD_KYLIN_HOME)
- home_name=$(basename $OLD_KYLIN_HOME)
-
- # backup
- now=`date '+%Y%m%d%H%M'`
- backup_file=${home_name}_${now}.tar.gz
- cd $install_dir && tar -zcvf ${backup_file} ${home_name} >>
$upgrade_log || fail
-
- # install
- rm -rfv ${OLD_KYLIN_HOME} >> $upgrade_log || fail
- mv -vf ${NEW_KYLIN_HOME} ${OLD_KYLIN_HOME} >> $upgrade_log || fail
- info "...................................................[DONE]"
- recordKylinUpgradeResult "${START_TIME}" "true" "${OLD_KYLIN_HOME}"
- info "Upgrade finished!"
- # needed by km
- info "Backup location:${install_dir}/${backup_file}"
- else
- warn "...................................................[SKIP]"
- recordKylinUpgradeResult "${START_TIME}" "true" "${NEW_KYLIN_HOME}"
- info "Upgrade aborted because you chose to stop"
- fi
-
-}
-
-function recordKylinUpgradeResult() {
- logLevel=`[ "$2" == "true" ] && echo INFO || echo ERROR`
- echo `date '+%Y-%m-%d %H:%M:%S '`"${logLevel} : [Operation: upgrade
result] user:`whoami`, upgrade time:$1, success:$2" >> $3/logs/security.log
-}
-
-NEW_KYLIN_HOME=$(cd `dirname -- $0` && cd ../ && pwd -P)
-silent=1
-while [[ $# != 0 ]]; do
- if [[ $1 == "--silent" ]]; then
- silent=0
- else
- OLD_KYLIN_HOME=$(cd $1 && pwd)
- fi
- shift
-done
-
-if [[ -z $OLD_KYLIN_HOME ]] || [[ ! -d $OLD_KYLIN_HOME ]]; then
- help
-fi
-
-if [[ $OLD_KYLIN_HOME == $NEW_KYLIN_HOME ]]; then
- error "Please specify the old version of the Kyligence Enterprise
installation directory."
- help
-fi
-
-mkdir -p ${NEW_KYLIN_HOME}/logs
-upgrade_log=${NEW_KYLIN_HOME}/logs/upgrade-$(date '+%Y_%m_%d_%H_%M_%S').log
-
-set -o errexit
-set -o pipefail
-START_TIME=$(date "+%Y-%m-%d %H:%M:%S")
-upgrade
-
diff --git a/build/sbin/bootstrap.sh b/build/sbin/bootstrap.sh
index ad809c6c6d..3271198015 100755
--- a/build/sbin/bootstrap.sh
+++ b/build/sbin/bootstrap.sh
@@ -92,7 +92,7 @@ function checkIfStopUserSameAsStartUser() {
currentUser=`whoami`
if [ ${startUser} != ${currentUser} ]; then
- echo `setColor 33 "Warning: You started Kyligence Enterprise as user
[${startUser}], please stop the instance as the same user."`
+ echo `setColor 33 "Warning: You started Kylin as user [${startUser}],
please stop the instance as the same user."`
fi
}
@@ -221,7 +221,7 @@ function clearRedundantProcess {
then
pidKeep=$pid
else
- echo "Redundant Kyligence Enterprise process $pid to
running process $pidKeep, stop it."
+ echo "Redundant Kylin process $pid to running process
$pidKeep, stop it."
bash ${KYLIN_HOME}/sbin/kill-process-tree.sh $pid
((pidRedundant+=1))
fi
@@ -235,7 +235,7 @@ function clearRedundantProcess {
fi
if [ "$pidRedundant" -ne 0 ]
then
- quit "Kyligence Enterprise is redundant, start canceled."
+ quit "Kylin is redundant, start canceled."
fi
fi
}
@@ -341,7 +341,7 @@ function startKE(){
sh ${KYLIN_HOME}/bin/guardian.sh start
- echo "Kyligence Enterprise is starting. It may take a while. For status,
please visit http://`hostname`:$port/kylin/index.html."
+ echo "Kylin is starting. It may take a while. For status, please visit
http://`hostname`:$port/kylin/index.html."
echo "You may also check status via: PID:`cat ${KYLIN_HOME}/pid`, or Log:
${KYLIN_HOME}/logs/kylin.log."
recordKylinStartOrStop "start success" "${START_TIME}"
}
@@ -394,26 +394,26 @@ if [[ "$1" == org.apache.kylin.* ]]; then
runTool "$@"
# start command
elif [ "$1" == "start" ]; then
- echo "Starting Kyligence Enterprise..."
+ echo "Starting Kylin..."
startKE
# stop command
elif [ "$1" == "stop" ]; then
- echo `date '+%Y-%m-%d %H:%M:%S '`"Stopping Kyligence Enterprise..."
+ echo `date '+%Y-%m-%d %H:%M:%S '`"Stopping Kylin..."
stopKE
if [[ $? == 0 ]]; then
exit 0
else
- quit "Kyligence Enterprise is not running"
+ quit "Kylin is not running"
fi
# restart command
elif [ "$1" == "restart" ]; then
- echo "Restarting Kyligence Enterprise..."
- echo "--> Stopping Kyligence Enterprise first if it's running..."
+ echo "Restarting Kylin..."
+ echo "--> Stopping Kylin first if it's running..."
stopKE
if [[ $? != 0 ]]; then
- echo " Kyligence Enterprise is not running, now start it"
+ echo " Kylin is not running, now start it"
fi
- echo "--> Starting Kyligence Enterprise..."
+ echo "--> Starting Kylin..."
startKE
else
quit "Usage: 'kylin.sh [-v] start' or 'kylin.sh [-v] stop' or 'kylin.sh
[-v] restart'"
diff --git a/build/sbin/check-1400-java.sh b/build/sbin/check-1400-java.sh
index d8aa31ec06..5f0f431c4d 100755
--- a/build/sbin/check-1400-java.sh
+++ b/build/sbin/check-1400-java.sh
@@ -26,5 +26,5 @@ echo "Checking Java version..."
$JAVA -version 2>&1 || quit "ERROR: Detect java version failed. Please set
JAVA_HOME."
if [[ `isValidJavaVersion` == "false" ]]; then
- quit "ERROR: Java 1.8 or above is required for Kyligence Enterprise"
+ quit "ERROR: Java 1.8 or above is required for Kylin"
fi
diff --git a/build/sbin/check-1401-kylin-config.sh
b/build/sbin/check-1401-kylin-config.sh
index dbda062b52..af5bc24dac 100755
--- a/build/sbin/check-1401-kylin-config.sh
+++ b/build/sbin/check-1401-kylin-config.sh
@@ -36,7 +36,7 @@ if [[ -f ${KYLIN_HOME}/conf/kylin-tools-log4j.xml ]]; then
fi
mkdir -p ${KYLIN_HOME}/logs
-error_config=`java -Dlog4j.configurationFile=${kylin_tools_log4j} -cp
"${KYLIN_HOME}/lib/ext/*:${KYLIN_HOME}/server/jars/*:${SPARK_HOME}/jars/*"
io.kyligence.kap.tool.KylinConfigCheckCLI 2>>${KYLIN_HOME}/logs/shell.stderr`
+error_config=`java -Dlog4j.configurationFile=${kylin_tools_log4j} -cp
"${KYLIN_HOME}/lib/ext/*:${KYLIN_HOME}/server/jars/*:${SPARK_HOME}/jars/*"
org.apache.kylin.tool.KylinConfigCheckCLI 2>>${KYLIN_HOME}/logs/shell.stderr`
if [[ -n $error_config ]]; then
diff --git a/build/sbin/check-1500-ports.sh b/build/sbin/check-1500-ports.sh
index 16959c591a..13b7e40d5d 100755
--- a/build/sbin/check-1500-ports.sh
+++ b/build/sbin/check-1500-ports.sh
@@ -29,4 +29,4 @@ if [[ -z ${kylin_port} ]]; then
fi
kylin_port_in_use=`netstat -tlpn | grep "\b${kylin_port}\b"`
-[[ -z ${kylin_port_in_use} ]] || quit "ERROR: Port ${kylin_port} is in use,
another Kyligence Enterprise server is running?"
+[[ -z ${kylin_port_in_use} ]] || quit "ERROR: Port ${kylin_port} is in use,
another Kylin server is running?"
diff --git a/build/sbin/check-1700-spark-kystorage.sh
b/build/sbin/check-1700-spark-kystorage.sh
index 11564c09f0..9840961d7f 100755
--- a/build/sbin/check-1700-spark-kystorage.sh
+++ b/build/sbin/check-1700-spark-kystorage.sh
@@ -61,7 +61,7 @@
key_executor_instance="kylin.storage.columnar.spark-conf.spark.executor.instance
mkdir -p ${KYLIN_HOME}/logs
saveFileName=${KYLIN_HOME}/logs/cluster.info
-${KYLIN_HOME}/sbin/bootstrap.sh io.kyligence.kap.tool.setup.KapGetClusterInfo
${saveFileName}
+${KYLIN_HOME}/sbin/bootstrap.sh org.apache.kylin.tool.setup.KapGetClusterInfo
${saveFileName}
if [ $? != 0 ]; then
echo "${CHECKENV_REPORT_PFX}WARN: Failed to get cluster' info, skip the
spark config suggestion."
diff --git a/build/sbin/find-working-dir.sh b/build/sbin/find-working-dir.sh
index 2a6fe1a7db..071b2ecdf2 100755
--- a/build/sbin/find-working-dir.sh
+++ b/build/sbin/find-working-dir.sh
@@ -35,5 +35,5 @@ fi
final_working_dir=${WORKING_DIR}
-export KAP_HADOOP_PARAM=${hadoop_conf_param}
-export KAP_WORKING_DIR=${final_working_dir}
\ No newline at end of file
+export KYLIN_HADOOP_PARAM=${hadoop_conf_param}
+export KYLIN_WORKING_DIR=${final_working_dir}
\ No newline at end of file
diff --git a/build/sbin/setenv.sh b/build/sbin/setenv.sh
index 8f12813ea3..b41c8e985c 100755
--- a/build/sbin/setenv.sh
+++ b/build/sbin/setenv.sh
@@ -26,7 +26,7 @@ if [[ -d "/data/external-catalog" ]];then
fi
if [[ `isValidJavaVersion` == "false" ]]; then
- quit "ERROR: Java 1.8 or above is required for Kyligence Enterprise"
+ quit "ERROR: Java 1.8 or above is required for Kylin"
fi
if [[ -f "${KYLIN_HOME}/conf/setenv.sh" ]]; then
diff --git a/build/sbin/spark-test.sh b/build/sbin/spark-test.sh
index b3b246cb77..e74f59987a 100755
--- a/build/sbin/spark-test.sh
+++ b/build/sbin/spark-test.sh
@@ -39,8 +39,8 @@ then
export LOG4J_DIR=${KYLIN_HOME}/build/conf
export SPARK_DIR=${KYLIN_HOME}/build/spark/
export KYLIN_SPARK_TEST_JAR_PATH=`ls
$KYLIN_HOME/src/assembly/target/kap-assembly-*.jar`
- export KAP_HDFS_WORKING_DIR=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.env.hdfs-working-dir`
- export KAP_METADATA_URL=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.metadata.url`
+ export KYLIN_HDFS_WORKING_DIR=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.env.hdfs-working-dir`
+ export KYLIN_METADATA_URL=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.metadata.url`
export SPARK_ENV_PROPS=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.storage.columnar.spark-env.`
export SPARK_CONF_PROPS=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.storage.columnar.spark-conf.`
export SPARK_ENGINE_CONF_PROPS=`$KYLIN_HOME/build/bin/get-properties.sh
kylin.engine.spark-conf.`
@@ -51,8 +51,8 @@ else
export LOG4J_DIR=${KYLIN_HOME}/conf
export SPARK_DIR=${KYLIN_HOME}/spark/
export KYLIN_SPARK_TEST_JAR_PATH=`ls $KYLIN_HOME/lib/newten-job*.jar`
- export KAP_HDFS_WORKING_DIR=`$KYLIN_HOME/bin/get-properties.sh
kylin.env.hdfs-working-dir`
- export KAP_METADATA_URL=`$KYLIN_HOME/bin/get-properties.sh
kylin.metadata.url`
+ export KYLIN_HDFS_WORKING_DIR=`$KYLIN_HOME/bin/get-properties.sh
kylin.env.hdfs-working-dir`
+ export KYLIN_METADATA_URL=`$KYLIN_HOME/bin/get-properties.sh
kylin.metadata.url`
export SPARK_ENV_PROPS=`$KYLIN_HOME/bin/get-properties.sh
kylin.storage.columnar.spark-env.`
export SPARK_CONF_PROPS=`$KYLIN_HOME/bin/get-properties.sh
kylin.storage.columnar.spark-conf.`
export SPARK_ENGINE_CONF_PROPS=`$KYLIN_HOME/bin/get-properties.sh
kylin.engine.spark-conf.`
@@ -64,7 +64,7 @@ else
fi
source ${KYLIN_HOME}/sbin/prepare-hadoop-conf-dir.sh
-export KAP_SPARK_IDENTIFIER=$RANDOM
+export KYLIN_SPARK_IDENTIFIER=$RANDOM
#export KAP_HDFS_APPENDER_JAR=`basename ${KYLIN_SPARK_JAR_PATH}`
# get local ip for htrace-zipkin use
@@ -215,7 +215,7 @@ then
[[ ! -f ${full_input_file} ]] || rm -f ${full_input_file}
echo "Hello Spark Client" >> ${full_input_file};
- hadoop ${KAP_HADOOP_PARAM} fs -put -f ${full_input_file} ${KAP_WORKING_DIR}
+ hadoop ${KYLIN_HADOOP_PARAM} fs -put -f ${full_input_file}
${KAP_WORKING_DIR}
spark_submit='$SPARK_HOME/bin/spark-submit '
spark_submit_conf=' --class
org.apache.kylin.tool.setup.KapSparkTaskTestCLI --name Test
$KYLIN_SPARK_TEST_JAR_PATH ${KAP_WORKING_DIR}/${input_file} '
@@ -223,10 +223,10 @@ then
verbose "The submit command is: $submitCommand"
eval $submitCommand
if [ $? == 0 ];then
- hadoop ${KAP_HADOOP_PARAM} fs -rm -r -skipTrash
${KAP_WORKING_DIR}/${input_file}
+ hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash
${KAP_WORKING_DIR}/${input_file}
rm -rf ${full_input_file}
else
- hadoop ${KAP_HADOOP_PARAM} fs -rm -r -skipTrash
${KAP_WORKING_DIR}/${input_file}
+ hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash
${KAP_WORKING_DIR}/${input_file}
rm -rf ${full_input_file}
quit "ERROR: Test of submitting spark job failed,error when testing
spark with spark configurations in Kyligence Enterprise!"
fi
diff --git
a/src/core-common/src/test/java/org/apache/kylin/junit/MetadataExtension.java
b/src/core-common/src/test/java/org/apache/kylin/junit/MetadataExtension.java
index a57a5d54b1..05c0d3aec9 100644
---
a/src/core-common/src/test/java/org/apache/kylin/junit/MetadataExtension.java
+++
b/src/core-common/src/test/java/org/apache/kylin/junit/MetadataExtension.java
@@ -102,7 +102,7 @@ public class MetadataExtension implements
BeforeEachCallback, BeforeAllCallback,
val kylinHomePath = new
File(getTestConfig().getMetadataUrl().toString()).getParentFile().getAbsolutePath();
System.setProperty("KYLIN_HOME", kylinHomePath);
val jobJar = org.apache.kylin.common.util.FileUtils.findFile(
- new File(kylinHomePath,
"../../../assembly/target/").getAbsolutePath(), "kap-assembly(.?)\\.jar");
+ new File(kylinHomePath,
"../../../assembly/target/").getAbsolutePath(), "kylin-assembly(.?)\\.jar");
getTestConfig().setProperty("kylin.engine.spark.job-jar", jobJar
== null ? "" : jobJar.getAbsolutePath());
getTestConfig().setProperty("kylin.query.security.acl-tcr-enabled", "false");
return tempMetadataDirectory;
diff --git
a/src/tool/src/main/java/org/apache/kylin/tool/security/KapPasswordResetCLI.java
b/src/tool/src/main/java/org/apache/kylin/tool/security/KylinPasswordResetCLI.java
similarity index 98%
rename from
src/tool/src/main/java/org/apache/kylin/tool/security/KapPasswordResetCLI.java
rename to
src/tool/src/main/java/org/apache/kylin/tool/security/KylinPasswordResetCLI.java
index 61540723f2..308079ddc2 100644
---
a/src/tool/src/main/java/org/apache/kylin/tool/security/KapPasswordResetCLI.java
+++
b/src/tool/src/main/java/org/apache/kylin/tool/security/KylinPasswordResetCLI.java
@@ -38,8 +38,8 @@ import
org.springframework.security.crypto.password.PasswordEncoder;
import io.kyligence.kap.guava20.shaded.common.io.ByteSource;
import lombok.val;
-public class KapPasswordResetCLI {
- protected static final Logger logger =
LoggerFactory.getLogger(KapPasswordResetCLI.class);
+public class KylinPasswordResetCLI {
+ protected static final Logger logger =
LoggerFactory.getLogger(KylinPasswordResetCLI.class);
public static void main(String[] args) {
int exit;
diff --git
a/src/tool/src/test/java/org/apache/kylin/tool/security/KapPasswordResetCLITest.java
b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
similarity index 94%
rename from
src/tool/src/test/java/org/apache/kylin/tool/security/KapPasswordResetCLITest.java
rename to
src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
index 83a0d0c9db..6aaf5a5e65 100644
---
a/src/tool/src/test/java/org/apache/kylin/tool/security/KapPasswordResetCLITest.java
+++
b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
@@ -44,7 +44,7 @@ import
org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import lombok.val;
-public class KapPasswordResetCLITest extends LogOutputTestCase {
+public class KylinPasswordResetCLITest extends LogOutputTestCase {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@@ -53,7 +53,7 @@ public class KapPasswordResetCLITest extends
LogOutputTestCase {
public void setup() {
createTestMetadata();
getTestConfig().setMetadataUrl(
-
"testKapPasswordResetCLITest@jdbc,driverClassName=org.h2.Driver,url=jdbc:h2:mem:db_default;DB_CLOSE_DELAY=-1,username=sa,password=");
+
"testKylinPasswordResetCLITest@jdbc,driverClassName=org.h2.Driver,url=jdbc:h2:mem:db_default;DB_CLOSE_DELAY=-1,username=sa,password=");
}
@After
@@ -84,7 +84,7 @@ public class KapPasswordResetCLITest extends
LogOutputTestCase {
ByteArrayOutputStream output = new ByteArrayOutputStream();
System.setOut(new PrintStream(output, false,
Charset.defaultCharset().name()));
- KapPasswordResetCLI.reset();
+ KylinPasswordResetCLI.reset();
ResourceStore.clearCache(config);
config.clearManagers();
@@ -118,7 +118,7 @@ public class KapPasswordResetCLITest extends
LogOutputTestCase {
val mode = config.getServerMode();
try {
config.setProperty("kylin.server.mode", "query");
- Assert.assertFalse(KapPasswordResetCLI.reset());
+ Assert.assertFalse(KylinPasswordResetCLI.reset());
Assert.assertTrue(containsLog("Only job/all node can update
metadata."));
} finally {
config.setProperty("kylin.server.mode", mode);