This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new ea8cfb3dd2d branch-3.0: [regression-test](fix) fix some fail filecache 
cases #47785 (#47822)
ea8cfb3dd2d is described below

commit ea8cfb3dd2dda707c401a864790c40ad80933bee
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Wed Feb 12 17:53:24 2025 +0800

    branch-3.0: [regression-test](fix) fix some fail filecache cases #47785 
(#47822)
    
    Cherry-picked from #47785
    
    Co-authored-by: chunping <zhangchunp...@selectdb.com>
---
 .../plugins/cloud_filecache_plugin.groovy          | 116 +++++++++++++++++++++
 .../warm_up/hotspot/test_warmup_show_stmt.groovy   |   8 +-
 .../warm_up/hotspot/test_warmup_show_stmt_2.groovy |  10 +-
 .../warm_up/hotspot/test_warmup_show_stmt_3.groovy |   4 +-
 .../suites/cloud_p0/cache/ttl/alter_ttl_1.groovy   |  17 +++
 .../suites/cloud_p0/cache/ttl/alter_ttl_2.groovy   |   4 +-
 6 files changed, 146 insertions(+), 13 deletions(-)

diff --git a/regression-test/plugins/cloud_filecache_plugin.groovy 
b/regression-test/plugins/cloud_filecache_plugin.groovy
new file mode 100644
index 00000000000..b41e83f4cfc
--- /dev/null
+++ b/regression-test/plugins/cloud_filecache_plugin.groovy
@@ -0,0 +1,116 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+import groovy.json.JsonOutput
+import org.apache.doris.regression.suite.Suite
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+
+
+Suite.metaClass.show_cache_hotspot = { String computeGroup = null, String 
table = null ->
+    //show cache hotspot "/<compute group>/<table>"
+    def select_table_hotspot = """
+        select
+            partition_id as PartitionId,
+            partition_name as PartitionName
+        from __internal_schema.cloud_cache_hotspot
+        where
+            cluster_name = "${computeGroup}"  
+            and table_name = "${table}" 
+        group by
+            cluster_id,
+            cluster_name,
+            table_id,
+            table_name,
+            partition_id,
+            partition_name;
+    """
+    //show cache hotspot "/<compute group>"
+    def select_compute_group_hotspot = """
+        with t1 as (
+            select
+                cluster_id,
+                cluster_name,
+                table_id,
+                table_name,
+                insert_day,
+                sum(query_per_day) as query_per_day_total,
+                sum(query_per_week) as query_per_week_total
+            from __internal_schema.cloud_cache_hotspot
+            where cluster_name = "${computeGroup}" 
+            group by cluster_id, cluster_name, table_id, table_name, insert_day
+            )
+            select
+            cluster_id as ComputeGroupId,
+            cluster_name as ComputeGroupName,
+            table_id as TableId,
+            table_name as TableName
+            from (
+            select
+                row_number() over (
+                partition by cluster_id
+                order by insert_day desc, query_per_day_total desc, 
query_per_week_total desc
+                ) as dr2,
+                *
+            from t1
+            ) t2
+            where dr2 = 1;
+    """
+    //show cache hotspot "/"
+    def select_all_hotspot = """
+        with t1 as (
+            select
+                cluster_id,
+                cluster_name,
+                table_id,
+                table_name,
+                insert_day,
+                sum(query_per_day) as query_per_day_total,
+                sum(query_per_week) as query_per_week_total
+            from __internal_schema.cloud_cache_hotspot
+            group by cluster_id, cluster_name, table_id, table_name, insert_day
+            )
+            select
+            cluster_id as ComputeGroupId,
+            cluster_name as ComputeGroupName,
+            table_id as TableId,
+            table_name as TableName
+            from (
+            select
+                row_number() over (
+                partition by cluster_id
+                order by insert_day desc, query_per_day_total desc, 
query_per_week_total desc
+                ) as dr2,
+                *
+            from t1
+            ) t2
+            where dr2 = 1;
+    """
+    def res = null
+    if ( computeGroup != null  && table != null ){
+        res = sql_return_maparray """${select_table_hotspot}"""
+    }
+
+    if ( computeGroup != null && table = null) {
+        res = sql_return_maparray """${select_compute_group_hotspot}"""
+    }
+
+    if ( computeGroup = null && table = null) {
+        res = sql_return_maparray """${select_all_hotspot}"""
+    }
+    return res
+
+}
diff --git 
a/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt.groovy
 
b/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt.groovy
index be430e408c3..dc929c16c06 100644
--- 
a/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt.groovy
+++ 
b/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt.groovy
@@ -103,7 +103,7 @@ suite("test_warmup_show_stmt") {
         }
     }
     sleep(40000)
-    // result = sql """ show cache hotspot "/" """
+    // result = show_cache_hotspot()
     // if (result[0][0].equals("regression_cluster_id0")) {
     //     assertEquals(result[0][3], 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
     //     assertEquals(result[1][3], 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier")
@@ -112,15 +112,15 @@ suite("test_warmup_show_stmt") {
     //     assertEquals(result[0][3], 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier")
     // }
 
-    // try {
-    //     sql """ show cache hotspot "/error_cluster """ 
+    // try { 
+    //     show_cache_hotspot("error_cluster")
     //     assertTrue(false)
     // } catch (Exception e) {
     //     assertTrue(true)
     // }
 
     // try {
-    //     sql """ show cache hotspot 
"/regression_cluster_name1/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.error_table
 """ 
+    //     show_cache_hotspot("regression_cluster_name1", 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.error_table")
     //     assertTrue(false)
     // } catch (Exception e) {
     //     assertTrue(true)
diff --git 
a/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_2.groovy
 
b/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_2.groovy
index 221edfef550..3b944768970 100644
--- 
a/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_2.groovy
+++ 
b/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_2.groovy
@@ -80,7 +80,7 @@ suite("test_warmup_show_stmt_2") {
         return s.getFileName() + ":" + s.getLineNumber()
     }
 
-    def result = sql_return_maparray """ show cache hotspot "/" """
+    def result = show_cache_hotspot()
     log.info(result.toString())
     org.junit.Assert.assertTrue("result.size() " + result.size() + " > 0", 
result.size() > 0)
     def hotTableName = 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer"
@@ -96,21 +96,21 @@ suite("test_warmup_show_stmt_2") {
     }
     org.junit.Assert.assertTrue(getLineNumber() + "cannot find expected cache 
hotspot ${hotTableName}", found)
 
-    result = sql_return_maparray """ show cache hotspot 
"/regression_cluster_name0" """
+    result = show_cache_hotspot("regression_cluster_name0")
     log.info(result.toString())
     org.junit.Assert.assertTrue(getLineNumber() + "result.size() " + 
result.size() + " > 0", result.size() > 0)
     assertEquals(result[0].get("PartitionName"), "p3")
     assertEquals(result[0].get("TableName"), 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
-    // result = sql_return_maparray """ show cache hotspot 
"/regression_cluster_name1" """
+    // result = show_cache_hotspot("regression_cluster_name1")
     // assertEquals(result.size(), 0);
     // not queried table should not be the hotspot
-    result = sql_return_maparray """ show cache hotspot 
"/regression_cluster_name0/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier"
 """
+    result = show_cache_hotspot("regression_cluster_name0", 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier" )
     log.info(result.toString())
     assertEquals(result.size(), 0);
 
     sql new File("""${context.file.parent}/../ddl/${table}_delete.sql""").text
     sleep(40000)
-    result = sql_return_maparray """ show cache hotspot "/" """
+    def result = show_cache_hotspot()
     log.info(result.toString())
     org.junit.Assert.assertTrue("result.size() " + result.size() + " > 0", 
result.size() > 0)
     found = false
diff --git 
a/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_3.groovy
 
b/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_3.groovy
index 6f4f3cfe176..9ad8c637594 100644
--- 
a/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_3.groovy
+++ 
b/regression-test/suites/cloud_p0/cache/multi_cluster/warm_up/hotspot/test_warmup_show_stmt_3.groovy
@@ -78,11 +78,11 @@ suite("test_warmup_show_stmt_3") {
     }
     for (int i = 0; i < 3; i++) {
         sleep(40000)
-        result = sql """ show cache hotspot 
"/regression_cluster_name0/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer"
 """
+        result = show_cache_hotspot("regression_cluster_name0", 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
         assertTrue(result.size() > 0);
     }
     thread.join()
     sleep(40000)
-    result = sql """ show cache hotspot 
"/regression_cluster_name0/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer"
 """
+    result = show_cache_hotspot("regression_cluster_name0", 
"regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
     assertTrue(result.size() > 0);
 }
diff --git a/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_1.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_1.groovy
index 299608f4091..7cbaca8f1df 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_1.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_1.groovy
@@ -122,6 +122,7 @@ suite("alter_ttl_1") {
     }
     sql """ ALTER TABLE customer_ttl SET ("file_cache_ttl_seconds"="140") """
     sleep(80000)
+    // after 110s, the first load has translate to normal
     getMetricsMethod.call() {
         respCode, body ->
             assertEquals("${respCode}".toString(), "200")
@@ -131,6 +132,15 @@ suite("alter_ttl_1") {
             for (String line in strs) {
                 if (flag1) break;
                 if (line.contains("ttl_cache_size")) {
+                    if (line.startsWith("#")) {
+                        continue
+                    }
+                    def i = line.indexOf(' ')
+                    assertEquals(line.substring(i).toLong(), 0)
+
+                }
+                
+                if (line.contains("normal_queue_cache_size")) {
                     if (line.startsWith("#")) {
                         continue
                     }
@@ -158,6 +168,13 @@ suite("alter_ttl_1") {
                     }
                     def i = line.indexOf(' ')
                     assertEquals(line.substring(i).toLong(), 0)
+                }
+                if (line.contains("normal_queue_cache_size")) {
+                    if (line.startsWith("#")) {
+                        continue
+                    }
+                    def i = line.indexOf(' ')
+                    assertEquals(line.substring(i).toLong(), ttl_cache_size)
                     flag1 = true
                 }
             }
diff --git a/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_2.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_2.groovy
index 660e822075d..b0069cd704a 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_2.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_2.groovy
@@ -161,8 +161,8 @@ suite("alter_ttl_2") {
             }
             assertTrue(flag1)
     }
-    // wait for ttl timeout
-    sleep(40000)
+    // the first load data ttl is 300,so need wait for 200s until the ttl 
timeout
+    sleep(200000)
     getMetricsMethod.call() {
         respCode, body ->
             assertEquals("${respCode}".toString(), "200")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to