This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new b946521a569 [enhancement](regression-test) add single stream multi 
table case (#25360)
b946521a569 is described below

commit b946521a569b281d9c567c964d4d5e83ec043c3f
Author: HHoflittlefish777 <77738092+hhoflittlefish...@users.noreply.github.com>
AuthorDate: Sat Oct 14 10:59:50 2023 +0800

    [enhancement](regression-test) add single stream multi table case (#25360)
---
 .../kafka/scripts/multi_table_csv.csv              |  1 +
 .../kafka/scripts/multi_table_json.json            |  1 +
 docker/thirdparties/run-thirdparties-docker.sh     |  4 +-
 .../load_p0/routine_load/test_routine_load.out     |  8 +-
 .../ddl/dup_tbl_basic_multi_table_create.sql       | 46 +++++++++++
 .../ddl/dup_tbl_basic_multi_table_drop.sql         |  1 +
 .../load_p0/routine_load/test_routine_load.groovy  | 96 +++++++++++++++++++++-
 7 files changed, 153 insertions(+), 4 deletions(-)

diff --git 
a/docker/thirdparties/docker-compose/kafka/scripts/multi_table_csv.csv 
b/docker/thirdparties/docker-compose/kafka/scripts/multi_table_csv.csv
new file mode 100644
index 00000000000..e2ad18c3878
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kafka/scripts/multi_table_csv.csv
@@ -0,0 +1 @@
+routine_load_dup_tbl_basic_multi_table|49|2023-08-08|FALSE|\N|16275|-2144851675|-2303421957908954634|-46526938720058765|-13141.142578|-686632233.230200|229942298.0|-152553823.0|2022-09-01
 00:16:01|2023-03-25|2022-09-07 14:59:03|s||yvuILR2iNxfe8RRml|{"student": true, 
"name": "Alice", "grade": 9, "subjects": ["math", "science", "history"]}
diff --git 
a/docker/thirdparties/docker-compose/kafka/scripts/multi_table_json.json 
b/docker/thirdparties/docker-compose/kafka/scripts/multi_table_json.json
new file mode 100644
index 00000000000..b8d9f9e37b3
--- /dev/null
+++ b/docker/thirdparties/docker-compose/kafka/scripts/multi_table_json.json
@@ -0,0 +1 @@
+routine_load_dup_tbl_basic_multi_table|{"k00": "8", "k01": "2023-08-14", 
"k02": "1", "k03": "109", "k04": "-31573", "k05": "-1362465190", "k06": 
"3990845741226497177", "k07": "2732763251146840270", "k08": "-25698.553", 
"k09": "1312831962.5678179", "k10": "\\N", "k11": "\\N", "k12": "2023-03-07 
14:13:19", "k13": "2022-10-18", "k14": "2023-07-16 05:03:13", "k15": "D", 
"k16": "", "k17": 
"PBn1wa6X8WneZYLMac11zzyhGl7tPXB5XgjmOV8L6uav9ja5oY433ktb2yhyQQIqBveZPkme", 
"k18": "\\N"}
diff --git a/docker/thirdparties/run-thirdparties-docker.sh 
b/docker/thirdparties/run-thirdparties-docker.sh
index 041aa866f25..de55f9c3396 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -256,7 +256,7 @@ if [[ "${RUN_KAFKA}" -eq 1 ]]; then
         local ip_host="$2"
         local backup_dir=/home/work/pipline/backup_center
 
-        declare -a topics=("basic_data" "basic_array_data" 
"basic_data_with_errors" "basic_array_data_with_errors" "basic_data_timezone" 
"basic_array_data_timezone")
+        declare -a topics=("basic_data" "basic_array_data" 
"basic_data_with_errors" "basic_array_data_with_errors" "basic_data_timezone" 
"basic_array_data_timezone" "multi_table_csv")
 
         for topic in "${topics[@]}"; do
             while IFS= read -r line; do
@@ -267,7 +267,7 @@ if [[ "${RUN_KAFKA}" -eq 1 ]]; then
             done < "${ROOT}/docker-compose/kafka/scripts/${topic}.csv"
         done
 
-        declare -a json_topics=("basic_data_json" "basic_array_data_json" 
"basic_array_data_json_by_line" "basic_data_json_by_line")
+        declare -a json_topics=("basic_data_json" "basic_array_data_json" 
"basic_array_data_json_by_line" "basic_data_json_by_line" "multi_table_json")
         
         for json_topic in "${json_topics[@]}"; do
             echo ${json_topics}
diff --git a/regression-test/data/load_p0/routine_load/test_routine_load.out 
b/regression-test/data/load_p0/routine_load/test_routine_load.out
index d8bd4c715ed..161af660b47 100644
--- a/regression-test/data/load_p0/routine_load/test_routine_load.out
+++ b/regression-test/data/load_p0/routine_load/test_routine_load.out
@@ -980,4 +980,10 @@
 55     [1, 1, 1, 1, 1, 1]      [65, 65, 65, 65, 65, 65]        [18805, 18805, 
18805, 18805, 18805, 18805]      [229725878, 229725878, 229725878, 229725878, 
229725878, 229725878]      [2742856458318615325, 2742856458318615325, 
2742856458318615325, 2742856458318615325, 2742856458318615325, 
2742856458318615325]  [5907702768956232371, 5907702768956232371, 
5907702768956232371, 5907702768956232371, 5907702768956232371, 
5907702768956232371]  [12354.624, 12354.624, 12354.624, 12354.624, 12354.624] 
[1697579881.9474771, 1697 [...]
 61     [1, 1, 1, 1, 1, 1]      [121, 121, 121, 121, 121, 121]  [31806, 31806, 
31806, 31806, 31806, 31806]      [-1410915562, -1410915562, -1410915562, 
-1410915562, -1410915562, -1410915562]  [-250403393155768717, 
-250403393155768717, -250403393155768717, -250403393155768717, 
-250403393155768717, -250403393155768717]  [4301573778529723431, 
4301573778529723431, 4301573778529723431, 4301573778529723431, 
4301573778529723431, 4301573778529723431]  [10719.892, 10719.892, 10719.892, 
10719.892, 10719.892] [107378 [...]
 76     [1, 1, 1, 1, 1, 1]      [-63, -63, -63, -63, -63, -63]  [25799, 25799, 
25799, 25799, 25799, 25799]      [-1387912656, -1387912656, -1387912656, 
-1387912656, -1387912656, -1387912656]  [8967926767558546181, 
8967926767558546181, 8967926767558546181, 8967926767558546181, 
8967926767558546181, 8967926767558546181]  [-3537865898119184476, 
-3537865898119184476, -3537865898119184476, -3537865898119184476, 
-3537865898119184476, -3537865898119184476]    [5311.188, 5311.188, 5311.188, 
5311.188, 5311.188]      [17362 [...]
-84     [0, 0, 0, 0, 0, 0]      [-10, -10, -10, -10, -10, -10]  [9493, 9493, 
9493, 9493, 9493, 9493]    [-547874696, -547874696, -547874696, -547874696, 
-547874696, -547874696]        [-115057683458952756, -115057683458952756, 
-115057683458952756, -115057683458952756, -115057683458952756, 
-115057683458952756]  [4473017779279230085, 4473017779279230085, 
4473017779279230085, 4473017779279230085, 4473017779279230085, 
4473017779279230085]  [13718.372, 13718.372, 13718.372, 13718.372, 13718.372] 
[-978213266.02697,  [...]
\ No newline at end of file
+84     [0, 0, 0, 0, 0, 0]      [-10, -10, -10, -10, -10, -10]  [9493, 9493, 
9493, 9493, 9493, 9493]    [-547874696, -547874696, -547874696, -547874696, 
-547874696, -547874696]        [-115057683458952756, -115057683458952756, 
-115057683458952756, -115057683458952756, -115057683458952756, 
-115057683458952756]  [4473017779279230085, 4473017779279230085, 
4473017779279230085, 4473017779279230085, 4473017779279230085, 
4473017779279230085]  [13718.372, 13718.372, 13718.372, 13718.372, 13718.372] 
[-978213266.02697,  [...]
+
+-- !sql_multi_table_one_data --
+49     2023-08-08      false   \N      16275   -2144851675     
-2303421957908954634    -46526938720058765      -13141.143      
-6.866322332302E8       99999999.9      -99999999.9     2022-09-01T00:16:01     
2023-03-25      2022-09-07T14:59:03     s               yvuILR2iNxfe8RRml       
{"student":true,"name":"Alice","grade":9,"subjects":["math","science","history"]}
+
+-- !sql_multi_table_one_data --
+8      2023-08-14      true    109     -31573  -1362465190     
3990845741226497177     2732763251146840270     -25698.553      
1.312831962567818E9     \N      \N      2023-03-07T14:13:19     2022-10-18      
2023-07-16T05:03:13     D               
PBn1wa6X8WneZYLMac11zzyhGl7tPXB5XgjmOV8L6uav9ja5oY433ktb2yhyQQIqBveZPkme        
\N
\ No newline at end of file
diff --git 
a/regression-test/suites/load_p0/routine_load/ddl/dup_tbl_basic_multi_table_create.sql
 
b/regression-test/suites/load_p0/routine_load/ddl/dup_tbl_basic_multi_table_create.sql
new file mode 100644
index 00000000000..ea0a1fe63d6
--- /dev/null
+++ 
b/regression-test/suites/load_p0/routine_load/ddl/dup_tbl_basic_multi_table_create.sql
@@ -0,0 +1,46 @@
+CREATE TABLE routine_load_dup_tbl_basic_multi_table
+(
+    k00 INT             NOT NULL,
+    k01 DATE            NOT NULL,
+    k02 BOOLEAN         NULL,
+    k03 TINYINT         NULL,
+    k04 SMALLINT        NULL,
+    k05 INT             NULL,
+    k06 BIGINT          NULL,
+    k07 LARGEINT        NULL,
+    k08 FLOAT           NULL,
+    k09 DOUBLE          NULL,
+    k10 DECIMAL(9,1)    NULL,
+    k11 DECIMALV3(9,1)  NULL,
+    k12 DATETIME        NULL,
+    k13 DATEV2          NULL,
+    k14 DATETIMEV2      NULL,
+    k15 CHAR            NULL,
+    k16 VARCHAR         NULL,
+    k17 STRING          NULL,
+    k18 JSON            NULL,
+    
+    INDEX idx_inverted_k104 (`k05`) USING INVERTED,
+    INDEX idx_inverted_k110 (`k11`) USING INVERTED,
+    INDEX idx_inverted_k113 (`k13`) USING INVERTED,
+    INDEX idx_inverted_k114 (`k14`) USING INVERTED,
+    INDEX idx_inverted_k117 (`k17`) USING INVERTED PROPERTIES("parser" = 
"english"),
+    INDEX idx_ngrambf_k115 (`k15`) USING NGRAM_BF PROPERTIES("gram_size"="3", 
"bf_size"="256"),
+    INDEX idx_ngrambf_k116 (`k16`) USING NGRAM_BF PROPERTIES("gram_size"="3", 
"bf_size"="256"),
+    INDEX idx_ngrambf_k117 (`k17`) USING NGRAM_BF PROPERTIES("gram_size"="3", 
"bf_size"="256"),
+
+    INDEX idx_bitmap_k104 (`k02`) USING BITMAP,
+    
+)
+DUPLICATE KEY(k00)
+PARTITION BY RANGE(k01)
+(
+    PARTITION p1 VALUES [('2023-08-01'), ('2023-08-11')),
+    PARTITION p2 VALUES [('2023-08-11'), ('2023-08-21')),
+    PARTITION p3 VALUES [('2023-08-21'), ('2023-09-01'))
+)
+DISTRIBUTED BY HASH(k00) BUCKETS 32
+PROPERTIES (
+    "bloom_filter_columns"="k05",
+    "replication_num" = "1"
+);
\ No newline at end of file
diff --git 
a/regression-test/suites/load_p0/routine_load/ddl/dup_tbl_basic_multi_table_drop.sql
 
b/regression-test/suites/load_p0/routine_load/ddl/dup_tbl_basic_multi_table_drop.sql
new file mode 100644
index 00000000000..188c29e39ec
--- /dev/null
+++ 
b/regression-test/suites/load_p0/routine_load/ddl/dup_tbl_basic_multi_table_drop.sql
@@ -0,0 +1 @@
+DROP TABLE IF EXISTS routine_load_dup_tbl_basic_multi_table;
\ No newline at end of file
diff --git 
a/regression-test/suites/load_p0/routine_load/test_routine_load.groovy 
b/regression-test/suites/load_p0/routine_load/test_routine_load.groovy
index 11ad2fc2084..4b659df4eff 100644
--- a/regression-test/suites/load_p0/routine_load/test_routine_load.groovy
+++ b/regression-test/suites/load_p0/routine_load/test_routine_load.groovy
@@ -27,6 +27,10 @@ suite("test_routine_load","p0") {
                   "mow_tbl_array",
                  ]
 
+    def multiTables = [
+                  "dup_tbl_basic_multi_table",
+                 ]
+
     def jobs =   [
                   "dup_tbl_basic_job",
                   "uniq_tbl_basic_job",
@@ -118,6 +122,16 @@ suite("test_routine_load","p0") {
                     "k00=unix_timestamp('2007-11-30 
10:30:19'),k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17",
                   ]
 
+    def multiTableJobName = [
+                    "multi_table_csv",
+                    "multi_table_json",
+                  ]
+
+    def formats = [
+                    "csv",
+                    "json",
+                  ]            
+
     def loadedRows = [0,0,0,0,17,17,17]
 
     def filteredRows = [20,20,20,20,3,3,3]
@@ -144,7 +158,7 @@ suite("test_routine_load","p0") {
                     PROPERTIES
                     (
                         "exec_mem_limit" = "5",
-                        "max_batch_interval" = "1",
+                        "max_batch_interval" = "5",
                         "max_batch_rows" = "300000",
                         "max_batch_size" = "209715200"
                     )
@@ -886,4 +900,84 @@ suite("test_routine_load","p0") {
     //         }
     //     }
     // }
+
+    // multi_table
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def j = 0
+        for (String jobName in multiTableJobName) {
+            try {
+                for (String tableName in multiTables) {
+                    sql new 
File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
+                    sql new 
File("""${context.file.parent}/ddl/${tableName}_create.sql""").text
+                }
+
+                sql """
+                    CREATE ROUTINE LOAD ${jobName}
+                    COLUMNS TERMINATED BY "|"
+                    PROPERTIES
+                    (
+                        "max_batch_interval" = "5",
+                        "format" = "${formats[j]}",
+                        "max_batch_rows" = "300000",
+                        "max_batch_size" = "209715200"
+                    )
+                    FROM KAFKA
+                    (
+                        "kafka_broker_list" = "${externalEnvIp}:${kafka_port}",
+                        "kafka_topic" = "${jobName}",
+                        "property.kafka_default_offsets" = "OFFSET_BEGINNING"
+                    );
+                """
+                sql "sync"
+
+                i = 0
+                for (String tableName in multiTables) {
+                    while (true) {
+                        sleep(1000)
+                        def res = sql "show routine load for ${jobName}"
+                        def state = res[0][8].toString()
+                        if (state == "NEED_SCHEDULE") {
+                            continue;
+                        }
+                        assertEquals(res[0][8].toString(), "RUNNING")
+                        break;
+                    }
+
+                    def count = 0
+                    def tableName1 =  "routine_load_" + tableName
+                    while (true) {
+                        def res = sql "select count(*) from ${tableName1}"
+                        def state = sql "show routine load for ${jobName}"
+                        log.info("routine load state: 
${state[0][8].toString()}".toString())
+                        log.info("routine load statistic: 
${state[0][14].toString()}".toString())
+                        log.info("reason of state changed: 
${state[0][17].toString()}".toString())
+                        if (res[0][0] > 0) {
+                            break
+                        }
+                        if (count >= 120) {
+                            log.error("routine load can not visible for long 
time")
+                            assertEquals(20, res[0][0])
+                            break
+                        }
+                        sleep(5000)
+                        count++
+                    }
+                    
+                    if (i <= 3) {
+                        qt_sql_multi_table_one_data "select * from 
${tableName1} order by k00,k01"
+                    } else {
+                        qt_sql_multi_table_one_data "select * from 
${tableName1} order by k00"
+                    }
+                    
+                    i++
+                }
+            } finally {
+                sql "stop routine load for ${jobName}"
+                for (String tableName in multiTables) {
+                    sql new 
File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
+                }
+            }
+            j++
+        }
+    }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to