This is an automated email from the ASF dual-hosted git repository. hellostephen pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push: new 8ee53630d10 [regression-test](fix) remove ununsed cluster key cases (#46591) 8ee53630d10 is described below commit 8ee53630d1021208868fc4241c9c59c0918be98d Author: shuke <sh...@selectdb.com> AuthorDate: Wed Jan 8 14:15:10 2025 +0800 [regression-test](fix) remove ununsed cluster key cases (#46591) --- .../compaction/test_compaction_uniq_keys_ck.out | 13 -- .../test_compaction_uniq_keys_with_delete_ck.out | 15 -- .../test_vertical_compaction_uniq_keys_ck.out | 16 -- .../compaction/test_compaction_uniq_keys_ck.groovy | 136 ----------------- ...test_compaction_uniq_keys_with_delete_ck.groovy | 164 --------------------- .../test_vertical_compaction_uniq_keys_ck.groovy | 143 ------------------ 6 files changed, 487 deletions(-) diff --git a/regression-test/data/compaction/test_compaction_uniq_keys_ck.out b/regression-test/data/compaction/test_compaction_uniq_keys_ck.out deleted file mode 100644 index 0ab6761ce2f..00000000000 --- a/regression-test/data/compaction/test_compaction_uniq_keys_ck.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select_default -- -1 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2017-10-01T11:11:11.160 2017-10-01T11:11:11.100111 2020-01-02T00:00 1 31 19 -2 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2017-10-01T11:11:11.140 2017-10-01T11:11:11.120111 2020-01-03T00:00 1 32 20 -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 -4 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 - --- !select_default2 -- -1 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2017-10-01T11:11:11.160 2017-10-01T11:11:11.100111 2020-01-02T00:00 1 31 19 -2 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2017-10-01T11:11:11.140 2017-10-01T11:11:11.120111 2020-01-03T00:00 1 32 20 -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 -4 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 - diff --git a/regression-test/data/compaction/test_compaction_uniq_keys_with_delete_ck.out b/regression-test/data/compaction/test_compaction_uniq_keys_with_delete_ck.out deleted file mode 100644 index ea06a5aa3c7..00000000000 --- a/regression-test/data/compaction/test_compaction_uniq_keys_with_delete_ck.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select_default -- -2 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2017-10-01T11:11:11.140 2017-10-01T11:11:11.120111 2020-01-03T00:00 1 32 20 - --- !select_default1 -- -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 - --- !select_default2 -- -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 -4 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 - --- !select_default3 -- -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 -4 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 - diff --git a/regression-test/data/compaction/test_vertical_compaction_uniq_keys_ck.out b/regression-test/data/compaction/test_vertical_compaction_uniq_keys_ck.out deleted file mode 100644 index 863a064c640..00000000000 --- a/regression-test/data/compaction/test_vertical_compaction_uniq_keys_ck.out +++ /dev/null @@ -1,16 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select_default -- -1 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2017-10-01T11:11:11.160 2017-10-01T11:11:11.100111 2020-01-02T00:00 1 31 19 - --- !select_default1 -- -2 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2017-10-01T11:11:11.140 2017-10-01T11:11:11.120111 2020-01-03T00:00 1 32 20 - --- !select_default2 -- -2 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2017-10-01T11:11:11.140 2017-10-01T11:11:11.120111 2020-01-03T00:00 1 32 20 -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 -4 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 - --- !select_default3 -- -2 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2017-10-01T11:11:11.140 2017-10-01T11:11:11.120111 2020-01-03T00:00 1 32 20 -3 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 -4 2017-10-01 2017-10-01 2017-10-01T11:11:11.110 2017-10-01T11:11:11.110111 Beijing 10 1 \N \N \N \N 2020-01-05T00:00 1 34 20 diff --git a/regression-test/suites/compaction/test_compaction_uniq_keys_ck.groovy b/regression-test/suites/compaction/test_compaction_uniq_keys_ck.groovy deleted file mode 100644 index c089572ccba..00000000000 --- a/regression-test/suites/compaction/test_compaction_uniq_keys_ck.groovy +++ /dev/null @@ -1,136 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import org.codehaus.groovy.runtime.IOGroovyMethods - -suite("test_compaction_uniq_keys_ck") { - if (isCloudMode()) { - logger.info("cloud does not support mow cluster key") - return - } - def tableName = "compaction_uniq_keys_ck" - - try { - String backend_id; - def backendId_to_backendIP = [:] - def backendId_to_backendHttpPort = [:] - getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort); - - backend_id = backendId_to_backendIP.keySet()[0] - def (code, out, err) = show_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id)) - - logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err) - assertEquals(code, 0) - def configList = parseJson(out.trim()) - assert configList instanceof List - - boolean disableAutoCompaction = true - for (Object ele in (List) configList) { - assert ele instanceof List<String> - if (((List<String>) ele)[0] == "disable_auto_compaction") { - disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2]) - } - } - - sql """ DROP TABLE IF EXISTS ${tableName} """ - sql """ - CREATE TABLE IF NOT EXISTS ${tableName} ( - `user_id` LARGEINT NOT NULL COMMENT "用户id", - `date` DATE NOT NULL COMMENT "数据灌入日期时间", - `datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间", - `datetimev2_1` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间", - `datetimev2_2` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间", - `city` VARCHAR(20) COMMENT "用户所在城市", - `age` SMALLINT COMMENT "用户年龄", - `sex` TINYINT COMMENT "用户性别", - `last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间", - `last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间", - `datetime_val1` DATETIMEV2(3) DEFAULT "1970-01-01 00:00:00.111" COMMENT "用户最后一次访问时间", - `datetime_val2` DATETIME(6) DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间", - `last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间", - `cost` BIGINT DEFAULT "0" COMMENT "用户总消费", - `max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间", - `min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间") - UNIQUE KEY(`user_id`, `date`, `datev2`, `datetimev2_1`, `datetimev2_2`, `city`, `age`, `sex`) - CLUSTER BY(`last_visit_date_not_null`, `age`, `sex`, `city`) - DISTRIBUTED BY HASH(`user_id`) - PROPERTIES ( - "replication_num" = "1", - "enable_unique_key_merge_on_write" = "true" - ); - """ - - sql """ INSERT INTO ${tableName} VALUES - (1, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2017-10-01 11:11:11.170000', '2017-10-01 11:11:11.110111', '2020-01-01', 1, 30, 20) - """ - - sql """ INSERT INTO ${tableName} VALUES - (1, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2017-10-01 11:11:11.160000', '2017-10-01 11:11:11.100111', '2020-01-02', 1, 31, 19) - """ - - sql """ INSERT INTO ${tableName} VALUES - (2, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2017-10-01 11:11:11.150000', '2017-10-01 11:11:11.130111', '2020-01-02', 1, 31, 21) - """ - - sql """ INSERT INTO ${tableName} VALUES - (2, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2017-10-01 11:11:11.140000', '2017-10-01 11:11:11.120111', '2020-01-03', 1, 32, 20) - """ - - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2017-10-01 11:11:11.100000', '2017-10-01 11:11:11.140111', '2020-01-03', 1, 32, 22) - """ - - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-04', '2020-01-04', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.150111', '2020-01-04', 1, 33, 21) - """ - - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, NULL, NULL, NULL, NULL, '2020-01-05', 1, 34, 20) - """ - - sql """ INSERT INTO ${tableName} VALUES - (4, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, NULL, NULL, NULL, NULL, '2020-01-05', 1, 34, 20) - """ - - qt_select_default """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - //TabletId,ReplicaId,BackendId,SchemaHash,Version,LstSuccessVersion,LstFailedVersion,LstFailedTime,LocalDataSize,RemoteDataSize,RowCount,State,LstConsistencyCheckTime,CheckVersion,VersionCount,QueryHits,PathHash,MetaUrl,CompactionStatus - def tablets = sql_return_maparray """ show tablets from ${tableName}; """ - - // trigger compactions for all tablets in ${tableName} - trigger_and_wait_compaction(tableName, "cumulative") - - def replicaNum = get_table_replica_num(tableName) - logger.info("get table replica num: " + replicaNum) - int rowCount = 0 - for (def tablet in tablets) { - String tablet_id = tablet.TabletId - (code, out, err) = curl("GET", tablet.CompactionStatus) - logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err) - assertEquals(code, 0) - def tabletJson = parseJson(out.trim()) - assert tabletJson.rowsets instanceof List - for (String rowset in (List<String>) tabletJson.rowsets) { - rowCount += Integer.parseInt(rowset.split(" ")[1]) - } - } - assert (rowCount < 8 * replicaNum) - qt_select_default2 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - } finally { - // try_sql("DROP TABLE IF EXISTS ${tableName}") - } -} diff --git a/regression-test/suites/compaction/test_compaction_uniq_keys_with_delete_ck.groovy b/regression-test/suites/compaction/test_compaction_uniq_keys_with_delete_ck.groovy deleted file mode 100644 index e668c4d62f0..00000000000 --- a/regression-test/suites/compaction/test_compaction_uniq_keys_with_delete_ck.groovy +++ /dev/null @@ -1,164 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import org.codehaus.groovy.runtime.IOGroovyMethods - -suite("test_compaction_uniq_keys_with_delete_ck") { - if (isCloudMode()) { - logger.info("cloud does not support mow cluster key") - return - } - def tableName = "test_compaction_uniq_keys_with_delete_ck" - - try { - String backend_id; - def backendId_to_backendIP = [:] - def backendId_to_backendHttpPort = [:] - getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort); - - backend_id = backendId_to_backendIP.keySet()[0] - def (code, out, err) = show_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id)) - - logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err) - assertEquals(code, 0) - def configList = parseJson(out.trim()) - assert configList instanceof List - - boolean disableAutoCompaction = true - for (Object ele in (List) configList) { - assert ele instanceof List<String> - if (((List<String>) ele)[0] == "disable_auto_compaction") { - disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2]) - } - } - - sql """ DROP TABLE IF EXISTS ${tableName} """ - sql """ - CREATE TABLE IF NOT EXISTS ${tableName} ( - `user_id` LARGEINT NOT NULL COMMENT "用户id", - `date` DATE NOT NULL COMMENT "数据灌入日期时间", - `datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间", - `datetimev2_1` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间", - `datetimev2_2` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间", - `city` VARCHAR(20) COMMENT "用户所在城市", - `age` SMALLINT COMMENT "用户年龄", - `sex` TINYINT COMMENT "用户性别", - `last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间", - `last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间", - `datetime_val1` DATETIMEV2(3) DEFAULT "1970-01-01 00:00:00.111" COMMENT "用户最后一次访问时间", - `datetime_val2` DATETIME(6) DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间", - `last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间", - `cost` BIGINT DEFAULT "0" COMMENT "用户总消费", - `max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间", - `min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间") - UNIQUE KEY(`user_id`, `date`, `datev2`, `datetimev2_1`, `datetimev2_2`, `city`, `age`, `sex`) - CLUSTER BY(`sex`, `date`, `cost`) - DISTRIBUTED BY HASH(`user_id`) - PROPERTIES ( - "replication_num" = "1", - "enable_unique_key_merge_on_write" = "true", - "enable_mow_light_delete" = "true" - ); - """ - - // 2 - sql """ INSERT INTO ${tableName} VALUES - (1, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2017-10-01 11:11:11.170000', '2017-10-01 11:11:11.110111', '2020-01-01', 1, 30, 20) - """ - - // 3 - sql """ INSERT INTO ${tableName} VALUES - (1, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2017-10-01 11:11:11.160000', '2017-10-01 11:11:11.100111', '2020-01-02', 1, 31, 19) - """ - - // 4 - sql """ - DELETE FROM ${tableName} where user_id <= 5 - """ - - // 5 - sql """ INSERT INTO ${tableName} VALUES - (2, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2017-10-01 11:11:11.150000', '2017-10-01 11:11:11.130111', '2020-01-02', 1, 31, 21) - """ - - // 6 - sql """ INSERT INTO ${tableName} VALUES - (2, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2017-10-01 11:11:11.140000', '2017-10-01 11:11:11.120111', '2020-01-03', 1, 32, 20) - """ - - // 7 - sql """ - DELETE FROM ${tableName} where user_id <= 1 - """ - - qt_select_default """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - // 8 - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2017-10-01 11:11:11.100000', '2017-10-01 11:11:11.140111', '2020-01-03', 1, 32, 22) - """ - - // 9 - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-04', '2020-01-04', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.150111', '2020-01-04', 1, 33, 21) - """ - - // 10 - sql """ - DELETE FROM ${tableName} where user_id <= 2 - """ - - // 11 - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, NULL, NULL, NULL, NULL, '2020-01-05', 1, 34, 20) - """ - - qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - // 12 - sql """ INSERT INTO ${tableName} VALUES - (4, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, NULL, NULL, NULL, NULL, '2020-01-05', 1, 34, 20) - """ - sql """sync""" - qt_select_default2 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - //TabletId,ReplicaId,BackendId,SchemaHash,Version,LstSuccessVersion,LstFailedVersion,LstFailedTime,LocalDataSize,RemoteDataSize,RowCount,State,LstConsistencyCheckTime,CheckVersion,VersionCount,PathHash,MetaUrl,CompactionStatus - def tablets = sql_return_maparray """ show tablets from ${tableName}; """ - - // trigger compactions for all tablets in ${tableName} - trigger_and_wait_compaction(tableName, "cumulative") - - def replicaNum = get_table_replica_num(tableName) - logger.info("get table replica num: " + replicaNum) - int rowCount = 0 - for (def tablet in tablets) { - String tablet_id = tablet.TabletId - (code, out, err) = curl("GET", tablet.CompactionStatus) - logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err) - assertEquals(code, 0) - def tabletJson = parseJson(out.trim()) - assert tabletJson.rowsets instanceof List - for (String rowset in (List<String>) tabletJson.rowsets) { - rowCount += Integer.parseInt(rowset.split(" ")[1]) - } - } - assert (rowCount < 8 * replicaNum) - qt_select_default3 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - } finally { - // try_sql("DROP TABLE IF EXISTS ${tableName}") - } -} diff --git a/regression-test/suites/compaction/test_vertical_compaction_uniq_keys_ck.groovy b/regression-test/suites/compaction/test_vertical_compaction_uniq_keys_ck.groovy deleted file mode 100644 index a5fb478a8e1..00000000000 --- a/regression-test/suites/compaction/test_vertical_compaction_uniq_keys_ck.groovy +++ /dev/null @@ -1,143 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import org.codehaus.groovy.runtime.IOGroovyMethods - -suite("test_vertical_compaction_uniq_keys_ck") { - if (isCloudMode()) { - logger.info("cloud does not support mow cluster key") - return - } - def tableName = "test_vertical_compaction_uniq_keys_ck" - - try { - String backend_id; - def backendId_to_backendIP = [:] - def backendId_to_backendHttpPort = [:] - getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort); - - backend_id = backendId_to_backendIP.keySet()[0] - def (code, out, err) = show_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id)) - logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err) - assertEquals(code, 0) - def configList = parseJson(out.trim()) - assert configList instanceof List - - boolean disableAutoCompaction = true - for (Object ele in (List) configList) { - assert ele instanceof List<String> - if (((List<String>) ele)[0] == "disable_auto_compaction") { - disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2]) - } - } - - sql """ DROP TABLE IF EXISTS ${tableName} """ - sql """ - CREATE TABLE ${tableName} ( - `user_id` LARGEINT NOT NULL COMMENT "用户id", - `date` DATE NOT NULL COMMENT "数据灌入日期时间", - `datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间", - `datetimev2_1` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间", - `datetimev2_2` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间", - `city` VARCHAR(20) COMMENT "用户所在城市", - `age` SMALLINT COMMENT "用户年龄", - `sex` TINYINT COMMENT "用户性别", - `last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间", - `last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间", - `datetime_val1` DATETIMEV2(3) DEFAULT "1970-01-01 00:00:00.111" COMMENT "用户最后一次访问时间", - `datetime_val2` DATETIME(6) DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间", - `last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间", - `cost` BIGINT DEFAULT "0" COMMENT "用户总消费", - `max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间", - `min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间") - UNIQUE KEY(`user_id`, `date`, `datev2`, `datetimev2_1`, `datetimev2_2`, `city`, `age`, `sex`) - CLUSTER BY(`age`, `sex`, `user_id`) - DISTRIBUTED BY HASH(`user_id`) - PROPERTIES ( "replication_num" = "1", "enable_unique_key_merge_on_write"="true" ); - """ - - sql """ INSERT INTO ${tableName} VALUES - (1, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2017-10-01 11:11:11.170000', '2017-10-01 11:11:11.110111', '2020-01-01', 1, 30, 20) - """ - - sql """ INSERT INTO ${tableName} VALUES - (1, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2017-10-01 11:11:11.160000', '2017-10-01 11:11:11.100111', '2020-01-02', 1, 31, 19) - """ - - sql """ - DELETE from ${tableName} where user_id <= 0 - """ - qt_select_default """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - - sql """ INSERT INTO ${tableName} VALUES - (2, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2017-10-01 11:11:11.150000', '2017-10-01 11:11:11.130111', '2020-01-02', 1, 31, 21) - """ - - sql """ INSERT INTO ${tableName} VALUES - (2, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2017-10-01 11:11:11.140000', '2017-10-01 11:11:11.120111', '2020-01-03', 1, 32, 20) - """ - - sql """ - DELETE from ${tableName} where user_id <= 1 - """ - qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2017-10-01 11:11:11.100000', '2017-10-01 11:11:11.140111', '2020-01-03', 1, 32, 22) - """ - - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, '2020-01-04', '2020-01-04', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.150111', '2020-01-04', 1, 33, 21) - """ - - sql """ INSERT INTO ${tableName} VALUES - (3, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, NULL, NULL, NULL, NULL, '2020-01-05', 1, 34, 20) - """ - - sql """ INSERT INTO ${tableName} VALUES - (4, '2017-10-01', '2017-10-01', '2017-10-01 11:11:11.110000', '2017-10-01 11:11:11.110111', 'Beijing', 10, 1, NULL, NULL, NULL, NULL, '2020-01-05', 1, 34, 20) - """ - - qt_select_default2 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - - //TabletId,ReplicaId,BackendId,SchemaHash,Version,LstSuccessVersion,LstFailedVersion,LstFailedTime,LocalDataSize,RemoteDataSize,RowCount,State,LstConsistencyCheckTime,CheckVersion,VersionCount,QueryHits,PathHash,MetaUrl,CompactionStatus - def tablets = sql_return_maparray """ show tablets from ${tableName}; """ - - // trigger compactions for all tablets in ${tableName} - trigger_and_wait_compaction(tableName, "cumulative") - - def replicaNum = get_table_replica_num(tableName) - logger.info("get table replica num: " + replicaNum) - int rowCount = 0 - for (def tablet in tablets) { - String tablet_id = tablet.TabletId - (code, out, err) = curl("GET", tablet.CompactionStatus) - logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err) - assertEquals(code, 0) - def tabletJson = parseJson(out.trim()) - assert tabletJson.rowsets instanceof List - for (String rowset in (List<String>) tabletJson.rowsets) { - rowCount += Integer.parseInt(rowset.split(" ")[1]) - } - } - assert (rowCount < 8 * replicaNum) - qt_select_default3 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ - } finally { - try_sql("DROP TABLE IF EXISTS ${tableName}") - } -} --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org