This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new d8e6d4ae8ab [chore](ci) adjust pipelines (#52507)
d8e6d4ae8ab is described below
commit d8e6d4ae8abcef8678261d3cbd8b79cf292e4960
Author: Dongyang Li <[email protected]>
AuthorDate: Fri Jul 4 15:12:37 2025 +0800
[chore](ci) adjust pipelines (#52507)
### What problem does this PR solve?
Issue Number: close #xxx
Related PR: #xxx
Problem Summary:
### Release note
None
### Check List (For Author)
- Test <!-- At least one of them must be included. -->
- [ ] Regression test
- [ ] Unit Test
- [ ] Manual test (add detailed scripts or steps below)
- [ ] No need to test or manual test. Explain why:
- [ ] This is a refactor/code format and no logic has been changed.
- [ ] Previous test can cover this change.
- [ ] No code files have been changed.
- [ ] Other reason <!-- Add your reason? -->
- Behavior changed:
- [ ] No.
- [ ] Yes. <!-- Explain the behavior change -->
- Does this need documentation?
- [ ] No.
- [ ] Yes. <!-- Add document PR link here. eg:
https://github.com/apache/doris-website/pull/1214 -->
### Check List (For Reviewer who merge this PR)
- [ ] Confirm the release note
- [ ] Confirm test cases
- [ ] Confirm document
- [ ] Add branch pick label <!-- Add branch pick label that this PR
should merge into -->
---
regression-test/pipeline/cloud_p0/clean.sh | 2 +-
regression-test/pipeline/cloud_p0/prepare.sh | 23 ++++----
regression-test/pipeline/cloud_p0/run.sh | 9 ++-
regression-test/pipeline/common/doris-utils.sh | 76 +++++++++++++++++++++++---
regression-test/pipeline/vault_p0/clean.sh | 2 +-
regression-test/pipeline/vault_p0/prepare.sh | 2 +-
regression-test/pipeline/vault_p0/run.sh | 2 +-
7 files changed, 89 insertions(+), 27 deletions(-)
diff --git a/regression-test/pipeline/cloud_p0/clean.sh
b/regression-test/pipeline/cloud_p0/clean.sh
index 9707378e27b..2b67ae93495 100644
--- a/regression-test/pipeline/cloud_p0/clean.sh
+++ b/regression-test/pipeline/cloud_p0/clean.sh
@@ -26,5 +26,5 @@ if ${skip_pipeline:=false}; then echo "INFO: skip build
pipline" && exit 0; else
echo "#### Run tpcds test on Doris ####"
DORIS_HOME="${teamcity_build_checkoutDir}/output"
export DORIS_HOME
-stop_doris
+export -f stop_doris_grace && timeout -v 20m bash -cx stop_doris_grace
clean_fdb "cloud_instance_0"
diff --git a/regression-test/pipeline/cloud_p0/prepare.sh
b/regression-test/pipeline/cloud_p0/prepare.sh
index 44154c2ee26..bd08a0b2f18 100644
--- a/regression-test/pipeline/cloud_p0/prepare.sh
+++ b/regression-test/pipeline/cloud_p0/prepare.sh
@@ -35,7 +35,7 @@ if ${DEBUG:-false}; then
fi
# shellcheck source=/dev/null
-# stop_doris, clean_fdb, install_fdb, install_java
+# stop_doris, clean_fdb, install_fdb, install_java, clear_coredump
source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
# shellcheck source=/dev/null
# check_oss_file_exist, download_oss_file
@@ -100,17 +100,18 @@ echo "#### 3. try to kill old doris process"
DORIS_HOME="${teamcity_build_checkoutDir}/output"
export DORIS_HOME
stop_doris
+clear_coredump
echo "#### 4. prepare fundationdb"
install_fdb
clean_fdb "cloud_instance_0"
echo "#### 5. check if binary package ready"
-merge_pr_to_target_branch_compiled_commit() {
+merge_pr_to_master_commit() {
local pr_num_from_trigger="$1"
local target_branch="$2"
- local target_branch_compiled_commit="$3"
- echo "INFO: merge pull request into ${target_branch}
${target_branch_compiled_commit}"
+ local master_commit="$3"
+ echo "INFO: merge pull request into ${target_branch} ${master_commit}"
if [[ -z "${teamcity_build_checkoutDir}" ]]; then
echo "ERROR: env teamcity_build_checkoutDir not set" && return 1
fi
@@ -119,10 +120,10 @@ merge_pr_to_target_branch_compiled_commit() {
git fetch origin "${target_branch}"
git checkout "${target_branch}"
git reset --hard origin/"${target_branch}"
- git checkout "${target_branch_compiled_commit}"
+ git checkout "${master_commit}"
returnValue=$?
if [[ ${returnValue} -ne 0 ]]; then
- echo "ERROR: checkout ${target_branch}
${target_branch_compiled_commit} failed. please rebase to the newest version."
+ echo "ERROR: checkout ${target_branch} ${master_commit} failed. please
rebase to the newest version."
return 1
fi
git rev-parse HEAD
@@ -131,7 +132,7 @@ merge_pr_to_target_branch_compiled_commit() {
echo "git fetch origin refs/pull/${pr_num_from_trigger}/head"
git fetch origin "refs/pull/${pr_num_from_trigger}/head"
git merge --no-edit --allow-unrelated-histories FETCH_HEAD
- echo "INFO: merge refs/pull/${pr_num_from_trigger}/head into
${target_branch} ${target_branch_compiled_commit}"
+ echo "INFO: merge refs/pull/${pr_num_from_trigger}/head into
${target_branch} ${master_commit}"
# CONFLICTS=$(git ls-files -u | wc -l)
if [[ $(git ls-files -u | wc -l) -gt 0 ]]; then
echo "ERROR: merge refs/pull/${pr_num_from_trigger}/head into failed.
Aborting"
@@ -144,11 +145,11 @@ if ! check_oss_file_exist
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.
if download_oss_file
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then
rm -rf "${teamcity_build_checkoutDir}"/output
tar -I pigz -xf "${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"
- target_branch_compiled_commit_file="master.commit"
- if [[ -e output/${target_branch_compiled_commit_file} ]]; then
+ master_commit_file="master.commit"
+ if [[ -e output/${master_commit_file} ]]; then
# checkout to master commit and merge this pr, to ensure binary and
case are same version
- target_branch_compiled_commit=$(cat
output/"${target_branch_compiled_commit_file}")
- if merge_pr_to_target_branch_compiled_commit "${pr_num_from_trigger}"
"${target_branch}" "${target_branch_compiled_commit}"; then
+ master_commit=$(cat output/"${master_commit_file}")
+ if merge_pr_to_master_commit "${pr_num_from_trigger}"
"${target_branch}" "${master_commit}"; then
echo "INFO: merged done"
if [[ "${teamcity_buildType_id:-}" =~
^Doris_DorisCloudRegression_CloudP1 ]]; then
echo "INFO: 用cloud_p1/conf覆盖cloud_p0/conf"
diff --git a/regression-test/pipeline/cloud_p0/run.sh
b/regression-test/pipeline/cloud_p0/run.sh
index 43847f86980..55f0c419013 100644
--- a/regression-test/pipeline/cloud_p0/run.sh
+++ b/regression-test/pipeline/cloud_p0/run.sh
@@ -13,7 +13,7 @@ fi
EOF
############################# run.sh content
########################################
# shellcheck source=/dev/null
-# check_tpcds_table_rows, restart_doris, set_session_variable,
check_tpcds_result
+# _monitor_regression_log, print_running_pipeline_tasks
source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
# shellcheck source=/dev/null
# create_an_issue_comment
@@ -47,6 +47,9 @@ export DORIS_HOME
exit_flag=0
need_collect_log=false
+# monitoring the log files in "${DORIS_HOME}"/regression-test/log/ for keyword
'Reach limit of connections'
+_monitor_regression_log &
+
# shellcheck disable=SC2317
run() {
set -e
@@ -72,7 +75,6 @@ run() {
export JAVA_HOME
if "${teamcity_build_checkoutDir}"/run-regression-test.sh \
--teamcity \
- --clean \
--run \
--times "${repeat_times_from_trigger:-1}" \
-parallel 18 \
@@ -105,6 +107,7 @@ export -f run
timeout_minutes=$((${repeat_times_from_trigger:-1} *
${BUILD_TIMEOUT_MINUTES:-180}))m
timeout "${timeout_minutes}" bash -cx run
exit_flag="$?"
+if print_running_pipeline_tasks; then :; fi
# shellcheck source=/dev/null
source "$(cd "${teamcity_build_checkoutDir}" && bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'get')"
@@ -116,7 +119,7 @@ if [[ ${exit_flag} != "0" ]] || ${need_collect_log}; then
print_doris_fe_log
print_doris_be_log
fi
- stop_doris
+ export -f stop_doris_grace && timeout -v 20m bash -cx stop_doris_grace
if log_file_name=$(archive_doris_logs
"${pr_num_from_trigger}_${commit_id_from_trigger}_$(date
+%Y%m%d%H%M%S)_doris_logs.tar.gz"); then
if log_info="$(upload_doris_log_to_oss "${log_file_name}")"; then
reporting_messages_error "${log_info##*logs.tar.gz to }"
diff --git a/regression-test/pipeline/common/doris-utils.sh
b/regression-test/pipeline/common/doris-utils.sh
index 1ae9d7c1c89..d1b6e71835a 100644
--- a/regression-test/pipeline/common/doris-utils.sh
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -94,7 +94,7 @@ function install_java() {
[[ -z "$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*')" ]];
then
sudo apt update && sudo apt install openjdk-8-jdk -y >/dev/null
fi
- # doris master and branch-3.0 use java-17
+ # doris master branch use java-17
if ! java -version >/dev/null ||
[[ -z "$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-17-*')" ]];
then
sudo apt update && sudo apt install openjdk-17-jdk -y >/dev/null
@@ -137,10 +137,10 @@ function start_doris_fe() {
if [[ -n "${fe_version}" ]] && [[ "${fe_version}" != "NULL" ]]; then
echo "INFO: doris fe started, fe version: ${fe_version}" && return 0
else
- echo "${i}/60, Wait for Frontend ready, sleep 2 seconds ..." &&
sleep 2
+ echo "${i}/60, Wait for Frontend ready, sleep 5 seconds ..." &&
sleep 5
fi
done
- if [[ ${i} -ge 60 ]]; then echo "ERROR: Start Doris Frontend Failed after
2 mins wait..." && return 1; fi
+ if [[ ${i} -ge 60 ]]; then echo "ERROR: Start Doris Frontend Failed after
5 mins wait..." && return 1; fi
}
function start_doris_be() {
@@ -194,10 +194,10 @@ function check_doris_ready() {
[[ ${be_ready_count} -eq 1 ]]; then
echo -e "INFO: Doris cluster ready, be version: \n$(${cl} -e 'show
backends\G' | grep 'Version')" && break
else
- echo 'Wait for backends ready, sleep 2 seconds ...' && sleep 2
+ echo 'Wait for backends ready, sleep 5 seconds ...' && sleep 5
fi
done
- if [[ ${i} -ge 60 ]]; then echo "ERROR: Doris cluster not ready after 2
mins wait..." && return 1; fi
+ if [[ ${i} -ge 60 ]]; then echo "ERROR: Doris cluster not ready after 5
mins wait..." && return 1; fi
# wait 10s for doris totally started, otherwize may encounter the error
below,
# ERROR 1105 (HY000) at line 102: errCode = 2, detailMessage = Failed to
find enough backend, please check the replication num,replication tag and
storage medium.
@@ -206,14 +206,26 @@ function check_doris_ready() {
function stop_doris() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
- if [[ -f "${DORIS_HOME}"/ms/bin/stop.sh ]]; then bash
"${DORIS_HOME}"/ms/bin/stop.sh; fi
- if [[ -f "${DORIS_HOME}"/recycler/bin/stop.sh ]]; then bash
"${DORIS_HOME}"/recycler/bin/stop.sh; fi
if "${DORIS_HOME}"/be/bin/stop_be.sh && "${DORIS_HOME}"/fe/bin/stop_fe.sh;
then
echo "INFO: normally stoped doris"
else
pgrep -fi doris | xargs kill -9 &>/dev/null
echo "WARNING: force stoped doris"
fi
+ if [[ -f "${DORIS_HOME}"/ms/bin/stop.sh ]]; then bash
"${DORIS_HOME}"/ms/bin/stop.sh; fi
+ if [[ -f "${DORIS_HOME}"/recycler/bin/stop.sh ]]; then bash
"${DORIS_HOME}"/recycler/bin/stop.sh; fi
+}
+
+function stop_doris_grace() {
+ if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+ if "${DORIS_HOME}"/be/bin/stop_be.sh --grace &&
"${DORIS_HOME}"/fe/bin/stop_fe.sh --grace; then
+ echo "INFO: normally stoped doris --grace"
+ else
+ pgrep -fi doris | xargs kill -9 &>/dev/null
+ echo "WARNING: force stoped doris"
+ fi
+ if [[ -f "${DORIS_HOME}"/ms/bin/stop.sh ]]; then bash
"${DORIS_HOME}"/ms/bin/stop.sh --grace; fi
+ if [[ -f "${DORIS_HOME}"/recycler/bin/stop.sh ]]; then bash
"${DORIS_HOME}"/recycler/bin/stop.sh --grace; fi
}
function clean_fdb() {
@@ -293,10 +305,10 @@ function restart_doris() {
[[ ${be_ready_count} -eq 1 ]]; then
echo -e "INFO: ${be_ready_count} Backends ready, version:
\n$(${cl} -e 'show backends\G' | grep 'Version')" && break
else
- echo 'Wait for Backends ready, sleep 2 seconds ...' && sleep 2
+ echo 'Wait for Backends ready, sleep 5 seconds ...' && sleep 5
fi
done
- if [[ ${i} -ge 60 ]]; then echo "ERROR: Backend not ready after 2 mins
wait..." && return 1; fi
+ if [[ ${i} -ge 60 ]]; then echo "ERROR: Backend not ready after 5 mins
wait..." && return 1; fi
# wait 10s for doris totally started, otherwize may encounter the error
below,
# ERROR 1105 (HY000) at line 102: errCode = 2, detailMessage = Failed to
find enough backend, please check the replication num,replication tag and
storage medium.
@@ -521,6 +533,47 @@ function set_doris_session_variables_from_file() {
fi
}
+_monitor_regression_log() {
+ if ! command -v inotifywait >/dev/null; then
+ apt install inotify-tools -y
+ fi
+
+ # Path to the log directory
+ local LOG_DIR="${DORIS_HOME}"/regression-test/log
+
+ # keyword to search for in the log files
+ local KEYWORD="Reach limit of connections"
+
+ local query_port
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf
query_port)
+
+ echo "INFO: start monitoring the log files in ${LOG_DIR} for the keyword
'${KEYWORD}'"
+
+ local start_row=1
+ local filepath=""
+ set +x
+ # Monitor the log directory for new files and changes, only one file
+ # shellcheck disable=SC2034
+ inotifywait -m -e modify "${LOG_DIR}" | while read -r directory events
filename; do
+ filepath="${directory}${filename}"
+ if [[ ! -f "${filepath}" ]]; then continue; fi
+ total_rows=$(wc -l "${filepath}" | awk '{print $1}')
+ if [[ -n ${total_rows} ]] && [[ ${start_row} -ge ${total_rows} ]]; then
+ start_row=${total_rows}
+ fi
+ # shellcheck disable=SC2250
+ if sed -n "${start_row},\$p" "${filepath}" | grep -a -q "${KEYWORD}";
then
+ matched=$(grep -a -n "${KEYWORD}" "${filepath}")
+ start_row=$(echo "${matched}" | tail -n1 | cut -d: -f1)
+ echo "WARNING: find '${matched}' in ${filepath}, run 'show
processlist;' to check the connections" | tee -a
"${DORIS_HOME}"/fe/log/monitor_regression_log.out
+ mysql -h127.0.0.1 -P"${query_port}" -uroot -e'show processlist;' |
tee -a "${DORIS_HOME}"/fe/log/monitor_regression_log.out
+ fi
+ start_row=$((start_row + 1))
+ # echo "start_row ${start_row}" | tee -a
"${DORIS_HOME}"/fe/log/monitor_regression_log.out
+ done
+
+}
+
archive_doris_logs() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
local archive_name="$1"
@@ -592,6 +645,11 @@ wait_coredump_file_ready() {
done
}
+clear_coredump() {
+ echo -e "INFO: clear coredump files \n$(ls /var/lib/apport/coredump/)"
+ rm -rf /var/lib/apport/coredump/*
+}
+
archive_doris_coredump() {
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
archive_name="$1"
diff --git a/regression-test/pipeline/vault_p0/clean.sh
b/regression-test/pipeline/vault_p0/clean.sh
index 309c9b8e533..1500bd48bf4 100644
--- a/regression-test/pipeline/vault_p0/clean.sh
+++ b/regression-test/pipeline/vault_p0/clean.sh
@@ -26,7 +26,7 @@ if ${skip_pipeline:=false}; then echo "INFO: skip build
pipline" && exit 0; else
echo "#### stop doris and clean fdb ####"
DORIS_HOME="${teamcity_build_checkoutDir}/output"
export DORIS_HOME
-stop_doris
+export -f stop_doris_grace && timeout -v 20m bash -cx stop_doris_grace
clean_fdb "cloud_instance_0"
echo "#### docker-compose down ####"
diff --git a/regression-test/pipeline/vault_p0/prepare.sh
b/regression-test/pipeline/vault_p0/prepare.sh
index 00f6878ee59..3f348052337 100644
--- a/regression-test/pipeline/vault_p0/prepare.sh
+++ b/regression-test/pipeline/vault_p0/prepare.sh
@@ -71,7 +71,7 @@ if [[ "${target_branch}" == "master" || "${target_branch}" ==
"branch-3.0" ]]; t
echo "INFO: PR target branch ${target_branch}"
install_java
else
- echo "WARNING: PR target branch ${target_branch} is NOT in (master,
branch-3.0), skip pipeline."
+ echo "WARNING: PR target branch ${target_branch} is NOT in
(master,branch-3.0), skip pipeline."
bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'set' "export skip_pipeline=true"
exit 0
fi
diff --git a/regression-test/pipeline/vault_p0/run.sh
b/regression-test/pipeline/vault_p0/run.sh
index 8322e458a7a..7cf48010f95 100644
--- a/regression-test/pipeline/vault_p0/run.sh
+++ b/regression-test/pipeline/vault_p0/run.sh
@@ -156,7 +156,7 @@ if [[ ${exit_flag} != "0" ]] || ${need_collect_log}; then
print_doris_fe_log
print_doris_be_log
fi
- stop_doris
+ export -f stop_doris_grace && timeout -v 20m bash -cx stop_doris_grace
if log_file_name=$(archive_doris_logs
"${pr_num_from_trigger}_${commit_id_from_trigger}_$(date
+%Y%m%d%H%M%S)_doris_logs.tar.gz"); then
if log_info="$(upload_doris_log_to_oss "${log_file_name}")"; then
reporting_messages_error "${log_info##*logs.tar.gz to }"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]