This is an automated email from the ASF dual-hosted git repository.

zhangstar333 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 001af83dbe8 [ci](cloud) add cloud p1 pipeline (#31239)
001af83dbe8 is described below

commit 001af83dbe8dc3d601126ecce2e782d8bc77a03f
Author: Dongyang Li <hello_step...@qq.com>
AuthorDate: Thu Feb 22 14:54:05 2024 +0800

    [ci](cloud) add cloud p1 pipeline (#31239)
    
    * [ci](cloud) add cloud p1 pipeline
    Co-authored-by: stephen <hello-step...@qq.com>
---
 .github/workflows/comment-to-trigger-teamcity.yml  | 58 +++++++++++++++++++---
 .../pipeline/cloud_p1/conf/be_custom.conf          | 31 ++++++++++++
 .../pipeline/cloud_p1/conf/fe_custom.conf          | 35 +++++++++++++
 .../cloud_p1/conf/regression-conf-custom.groovy    |  1 +
 .../pipeline/cloud_p1/conf/session_variables.sql   |  6 +++
 regression-test/pipeline/common/doris-utils.sh     | 11 ++--
 regression-test/pipeline/common/github-utils.sh    |  4 ++
 regression-test/pipeline/common/teamcity-utils.sh  |  2 +
 8 files changed, 136 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/comment-to-trigger-teamcity.yml 
b/.github/workflows/comment-to-trigger-teamcity.yml
index b561483aaa4..cda32ca7c91 100644
--- a/.github/workflows/comment-to-trigger-teamcity.yml
+++ b/.github/workflows/comment-to-trigger-teamcity.yml
@@ -47,6 +47,7 @@ jobs:
             "${COMMENT_BODY}" == *'run external'* ||
             "${COMMENT_BODY}" == *'run pipelinex_p0'* ||
             "${COMMENT_BODY}" == *'run cloud_p0'* ||
+            "${COMMENT_BODY}" == *'run cloud_p1'* ||
             "${COMMENT_BODY}" == *'run arm'* ||
             "${COMMENT_BODY}" == *'run performance'* ]]; then
             echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
@@ -64,7 +65,7 @@ jobs:
         echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
         echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
 
-        reg="run 
(buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|cloud_p0|arm|performance)(
 [1-9]*[0-9]+)*"
+        reg="run 
(buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|cloud_p0|cloud_p1|arm|performance)(
 [1-9]*[0-9]+)*"
         COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E 
"${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
         COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E 
"${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
         echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a 
"$GITHUB_OUTPUT"
@@ -122,6 +123,11 @@ jobs:
           else
             echo "changed_cloud_p0=false" | tee -a "$GITHUB_OUTPUT"
           fi
+          if file_changed_cloud_p1; then
+            echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
+          else
+            echo "changed_cloud_p1=false" | tee -a "$GITHUB_OUTPUT"
+          fi
         else
           echo "INFO: failed to _get_pr_changed_files, default trigger all"
           echo "changed_fe_ut=true" | tee -a "$GITHUB_OUTPUT"
@@ -134,6 +140,7 @@ jobs:
           echo "changed_p1=true" | tee -a "$GITHUB_OUTPUT"
           echo "changed_performance=true" | tee -a "$GITHUB_OUTPUT"
           echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
+          echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
         fi
 
     # - name: "Setup tmate session"
@@ -258,12 +265,49 @@ jobs:
           echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, 
compile will trigger cloud_p0" && exit
         fi
         set -x
-        trigger_or_skip_build \
-          "${{ steps.changes.outputs.changed_cloud_p0 }}" \
-          "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
-          "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
-          "cloud_p0" \
-          "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+        if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" ]]; then
+          echo "PR target branch in (master), need run cloud_p0"
+          trigger_or_skip_build \
+            "${{ steps.changes.outputs.changed_cloud_p0 }}" \
+            "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+            "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+            "cloud_p0" \
+            "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+        else
+          echo "PR target branch not in (master), skip run cloud_p0"
+          trigger_or_skip_build \
+            "false" \
+            "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+            "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+            "cloud_p0" \
+            "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+        fi
+
+    - name: "Trigger or Skip cloud_p1"
+      if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && 
contains(fromJSON('["cloud_p1", "buildall"]'), 
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
+      run: |
+        source ./regression-test/pipeline/common/teamcity-utils.sh
+        if [[ ${{ steps.parse.outputs.COMMENT_TRIGGER_TYPE }} == "buildall" 
]]; then
+          echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, 
compile will trigger cloud_p1" && exit
+        fi
+        set -x
+        if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" ]]; then
+          echo "PR target branch in (master), need run cloud_p1"
+          trigger_or_skip_build \
+            "${{ steps.changes.outputs.changed_cloud_p1 }}" \
+            "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+            "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+            "cloud_p1" \
+            "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+        else
+          echo "PR target branch not in (master), skip run cloud_p1"
+          trigger_or_skip_build \
+            "false" \
+            "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+            "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+            "cloud_p1" \
+            "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+        fi
 
     - name: "Trigger or Skip arm"
       if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && 
contains(fromJSON('["arm", "buildall"]'), 
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
diff --git a/regression-test/pipeline/cloud_p1/conf/be_custom.conf 
b/regression-test/pipeline/cloud_p1/conf/be_custom.conf
new file mode 100644
index 00000000000..2259c031ce9
--- /dev/null
+++ b/regression-test/pipeline/cloud_p1/conf/be_custom.conf
@@ -0,0 +1,31 @@
+streaming_load_rpc_max_alive_time_sec = 72000
+quick_cooldown = true
+disable_stream_load_2pc=false
+enable_vectorized_alter_table = true
+enable_new_scan_node = true
+push_worker_count_high_priority = 2
+streaming_load_max_mb = 107374182400
+clear_file_cache=true
+enable_file_cache=true
+mem_limit=50%
+#disable_storage_page_cache = true
+enable_file_cache_query_limit=true
+file_cache_max_file_segment_size=1048576
+s3_write_buffer_whole_size=52428800
+enable_vertical_compaction=true
+fuzzy_vertical_compaction=true
+vacuum_stale_rowsets_interval_seconds=60
+tablet_rowset_stale_sweep_time_sec=300
+user_files_secure_path=/
+enable_file_cache_as_load_buffer=true
+enable_merge_on_write_correctness_check=true
+enable_debug_points=true
+prioritize_query_perf_in_compaction = true
+cumulative_compaction_min_deltas = 5
+#p0 parameter
+meta_service_endpoint = 127.0.0.1:5000
+cloud_unique_id = cloud_unique_id_compute_node0
+meta_service_use_load_balancer = false
+enable_file_cache = true
+file_cache_path = 
[{"path":"/data/doris_cloud/file_cache","total_size":104857600,"query_limit":104857600}]
+tmp_file_dirs = 
[{"path":"/data/doris_cloud/tmp","max_cache_bytes":104857600,"max_upload_bytes":104857600}]
\ No newline at end of file
diff --git a/regression-test/pipeline/cloud_p1/conf/fe_custom.conf 
b/regression-test/pipeline/cloud_p1/conf/fe_custom.conf
new file mode 100644
index 00000000000..5a96e7f075a
--- /dev/null
+++ b/regression-test/pipeline/cloud_p1/conf/fe_custom.conf
@@ -0,0 +1,35 @@
+stream_load_default_timeout_second = 72000
+replication_num_forced_in_cloud_mode = true
+ignore_unsupported_properties_in_cloud_mode = true
+enable_array_type = true
+tablet_stat_update_interval_second = 10
+catalog_trash_expire_second = 600
+cloud_delete_loaded_internal_stage_files = true
+merge_on_write_forced_to_false = true
+enable_ssl = true
+light_schema_change_force_to_true = true
+enable_mtmv = true
+remote_fragment_exec_timeout_ms=60000
+dynamic_partition_check_interval_seconds=10
+use_fuzzy_session_variable=true
+
+enable_cloud_snapshot_version = true
+enable_auto_collect_statistics = false
+
+forbid_function_stmt = false
+forbid_insecurity_stmt = false
+
+enable_debug_points = true
+
+disable_datev1=false
+
+disable_decimalv2=false
+max_query_profile_num=1000
+
+statistics_sql_mem_limit_in_bytes=21474836480
+cpu_resource_limit_per_analyze_task=-1
+
+priority_networks=127.0.0.1/24
+cloud_http_port=18030
+meta_service_endpoint=127.0.0.1:5000
+cloud_unique_id=cloud_unique_id_sql_server00
diff --git 
a/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy 
b/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
new file mode 100644
index 00000000000..69e59d1493c
--- /dev/null
+++ b/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
@@ -0,0 +1 @@
+testGroups = "p1"
diff --git a/regression-test/pipeline/cloud_p1/conf/session_variables.sql 
b/regression-test/pipeline/cloud_p1/conf/session_variables.sql
new file mode 100644
index 00000000000..8885476ee06
--- /dev/null
+++ b/regression-test/pipeline/cloud_p1/conf/session_variables.sql
@@ -0,0 +1,6 @@
+-- set those session variables before run cloud p0 regression
+set global insert_visible_timeout_ms=60000;
+set global enable_auto_analyze=false;
+set global enable_audit_plugin=true;
+set global enable_memtable_on_sink_node=false;
+set global enable_two_phase_read_opt = false;
diff --git a/regression-test/pipeline/common/doris-utils.sh 
b/regression-test/pipeline/common/doris-utils.sh
index 85cff73cb11..c86ee707fd8 100644
--- a/regression-test/pipeline/common/doris-utils.sh
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -456,7 +456,8 @@ archive_doris_logs() {
         archive_content="${archive_content} session_variables"
     fi
     if [[ -d "${DORIS_HOME}"/ms ]]; then
-        cp -rf /var/log/foundationdb "${DORIS_HOME}"/foundationdb/log
+        mkdir -p "${DORIS_HOME}"/foundationdb/log
+        cp -rf /var/log/foundationdb/* "${DORIS_HOME}"/foundationdb/log/
         archive_content="${archive_content} ms/conf ms/log foundationdb/log"
     fi
     if [[ -d "${DORIS_HOME}"/recycler ]]; then
@@ -565,11 +566,11 @@ function create_warehouse() {
         \"user_id\":\"user-id\",
         \"obj_info\": {
             \"provider\": \"COS\",
-            \"region\": \"ap-beijing\",
-            \"bucket\": \"doris-build-1308700295\",
+            \"region\": \"ap-hongkong\",
+            \"bucket\": \"doris-build-hk-1308700295\",
             \"prefix\": \"ci\",
-            \"endpoint\": \"cos.ap-beijing.myqcloud.com\",
-            \"external_endpoint\": \"cos.ap-beijing.myqcloud.com\",
+            \"endpoint\": \"cos.ap-hongkong.myqcloud.com\",
+            \"external_endpoint\": \"cos.ap-hongkong.myqcloud.com\",
             \"ak\": \"${COS_ak}\",
             \"sk\": \"${COS_sk}\"
         }
diff --git a/regression-test/pipeline/common/github-utils.sh 
b/regression-test/pipeline/common/github-utils.sh
index 74ff0879bec..9a3c40d7052 100644
--- a/regression-test/pipeline/common/github-utils.sh
+++ b/regression-test/pipeline/common/github-utils.sh
@@ -316,6 +316,10 @@ file_changed_cloud_p0() {
     echo "return no need" && return 1
 }
 
+file_changed_cloud_p1() {
+    file_changed_cloud_p0
+}
+
 file_changed_regression_p0() {
     local all_files
     all_files=$(cat all_files)
diff --git a/regression-test/pipeline/common/teamcity-utils.sh 
b/regression-test/pipeline/common/teamcity-utils.sh
index 3041297562a..7cbf8554ecb 100644
--- a/regression-test/pipeline/common/teamcity-utils.sh
+++ b/regression-test/pipeline/common/teamcity-utils.sh
@@ -36,6 +36,7 @@ comment_to_pipeline=(
     ['arm']='Doris_ArmPipeline_P0Regression'
     ['performance']='Doris_DorisPerformance_Performance'
     ['cloud_p0']='Doris_DorisRegression_CloudP0'
+    ['cloud_p1']='Doris_DorisCloudRegression_CloudP1'
 )
 
 # github中评论的要触发的流水线名字
@@ -56,6 +57,7 @@ conment_to_context=(
     ['arm']='P0 Regression (ARM pipeline)'
     ['performance']='performance (Doris Performance)'
     ['cloud_p0']='cloud_p0 (Doris Cloud Regression)'
+    ['cloud_p1']='cloud_p1 (Doris Cloud Regression)'
 )
 
 get_commit_id_of_build() {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to