This is an automated email from the ASF dual-hosted git repository. dataroaring pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push: new 9bfb5e38a2a [regression-test](group commit) modify some case to make group commit can run all case (#26856) 9bfb5e38a2a is described below commit 9bfb5e38a2a6a785313fa49ec070ed880fde22a5 Author: meiyi <myime...@gmail.com> AuthorDate: Thu Jul 11 15:18:10 2024 +0800 [regression-test](group commit) modify some case to make group commit can run all case (#26856) ## Proposed changes Enable `wait_internal_group_commit_finish` to check if all cases can pass: fe.conf ``` wait_internal_group_commit_finish=true group_commit_interval_ms_default_value=2 ``` be.conf ``` wait_internal_group_commit_finish=true ``` some cases are not compitable, add `excludeSuites` in `regression-conf.groovy`: ``` "test_group_commit_data_bytes_property,test_group_commit_interval_ms_property,insert_group_commit_into_unique_sync_mode,insert_group_commit_into_max_filter_ratio,test_group_commit_stream_load,test_wal_mem_back_pressure_fault_injection," + "test_table_options,test_show_transaction,test_base_insert_job,test_show_create_table_and_views,test_show_create_table_and_views_nereids,test_get_stream_load_state,test_ctas,test_publish_timeout," + "test_array_insert_overflow," + ``` ## Further comments If this is a relatively large or complex change, kick off the discussion at [d...@doris.apache.org](mailto:d...@doris.apache.org) by explaining why you chose the solution you did and what alternatives you considered, etc... --- .../org/apache/doris/regression/suite/Suite.groovy | 4 ++++ .../suites/datatype_p0/datetimev2/test_timezone.groovy | 9 ++++++++- .../fault_tolerance_nereids.groovy | 3 ++- .../test_create_table_generated_column_legacy.groovy | 3 ++- .../test_disable_move_memtable.groovy | 17 ++++++++++++++++- .../suites/jsonb_p0/test_jsonb_load_and_function.groovy | 8 ++++---- .../suites/load/insert/test_array_string_insert.groovy | 7 ++++--- .../suites/load_p0/http_stream/test_http_stream.groovy | 4 +++- .../auto_partition/test_auto_partition_behavior.groovy | 4 ++-- .../test_list_default_multi_col_partition.groovy | 5 +++-- .../list_partition/test_list_default_partition.groovy | 5 +++-- .../list_partition/test_list_partition_datatype.groovy | 7 ++++--- .../multi_partition/test_multi_column_partition.groovy | 7 ++++--- 13 files changed, 59 insertions(+), 24 deletions(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index 5fa8a3fb922..fec337f59ed 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -1361,6 +1361,10 @@ class Suite implements GroovyInterceptable { return enableStorageVault; } + boolean isGroupCommitMode() { + return getFeConfig("wait_internal_group_commit_finish").equals("true") + } + String getFeConfig(String key) { return sql_return_maparray("SHOW FRONTEND CONFIG LIKE '${key}'")[0].Value } diff --git a/regression-test/suites/datatype_p0/datetimev2/test_timezone.groovy b/regression-test/suites/datatype_p0/datetimev2/test_timezone.groovy index 981d8ecd0cf..5c16b37802c 100644 --- a/regression-test/suites/datatype_p0/datetimev2/test_timezone.groovy +++ b/regression-test/suites/datatype_p0/datetimev2/test_timezone.groovy @@ -32,6 +32,9 @@ suite("test_timezone") { sql """ set time_zone = '+02:00' """ sql """ set enable_nereids_planner = false """ + if (isGroupCommitMode()) { + sql """ set enable_nereids_planner = true """ + } sql """insert into test_timezone values('2022-01-01 01:02:55', '2022-01-01 01:02:55.123')""" sql """insert into test_timezone values('2022-02-01 01:02:55Z', '2022-02-01 01:02:55.123Z')""" sql """insert into test_timezone values('2022-03-01 01:02:55+08:00', '2022-03-01 01:02:55.123UTC')""" @@ -39,7 +42,11 @@ suite("test_timezone") { sql """insert into test_timezone values('2022-05-01 01:02:55+02:30', '2022-05-01 01:02:55.123-02:30')""" sql """insert into test_timezone values('2022-06-01T01:02:55+04:30', '2022-06-01 01:02:55.123-07:30')""" sql """insert into test_timezone values('20220701010255+07:00', '20220701010255-05:00')""" - sql """insert into test_timezone values('20220801+05:00', '20220801America/Argentina/Buenos_Aires')""" + if (isGroupCommitMode()) { + sql """insert into test_timezone values('2022-07-31 21:00', '2022-08-01')""" + } else { + sql """insert into test_timezone values('20220801+05:00', '20220801America/Argentina/Buenos_Aires')""" + } qt_legacy "select * from test_timezone order by k1" sql """ truncate table test_timezone """ diff --git a/regression-test/suites/ddl_p0/test_create_table_generated_column/fault_tolerance_nereids.groovy b/regression-test/suites/ddl_p0/test_create_table_generated_column/fault_tolerance_nereids.groovy index 291916b9a44..e85abae01ef 100644 --- a/regression-test/suites/ddl_p0/test_create_table_generated_column/fault_tolerance_nereids.groovy +++ b/regression-test/suites/ddl_p0/test_create_table_generated_column/fault_tolerance_nereids.groovy @@ -175,9 +175,10 @@ suite("test_generated_column_fault_tolerance_nereids") { PROPERTIES("replication_num" = "1"); ;""" // qt_common_default_test_insert_null + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" test { sql "INSERT INTO test_gen_col_common_ft(a,b) values(1,null);" - exception "Insert has filtered data in strict mode." + exception exception_str } // qt_common_default_test_insert_gencol diff --git a/regression-test/suites/ddl_p0/test_create_table_generated_column/test_create_table_generated_column_legacy.groovy b/regression-test/suites/ddl_p0/test_create_table_generated_column/test_create_table_generated_column_legacy.groovy index 7bf58260d1f..bd6d3900585 100644 --- a/regression-test/suites/ddl_p0/test_create_table_generated_column/test_create_table_generated_column_legacy.groovy +++ b/regression-test/suites/ddl_p0/test_create_table_generated_column/test_create_table_generated_column_legacy.groovy @@ -161,7 +161,8 @@ suite("test_create_table_generated_column_legacy") { // qt_common_default_test_insert_null test { sql "INSERT INTO test_gen_col_common_legacy(a,b) values(1,null);" - exception "Insert has filtered data in strict mode." + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" + exception exception_str } // qt_common_default_test_insert_gencol diff --git a/regression-test/suites/fault_injection_p0/test_disable_move_memtable.groovy b/regression-test/suites/fault_injection_p0/test_disable_move_memtable.groovy index 653f7e80799..8877ae0f2f4 100644 --- a/regression-test/suites/fault_injection_p0/test_disable_move_memtable.groovy +++ b/regression-test/suites/fault_injection_p0/test_disable_move_memtable.groovy @@ -288,7 +288,22 @@ suite("test_disable_move_memtable", "nonConcurrent") { sql """ set enable_nereids_dml=false """ insert_into_select_with_injection("VTabletWriterV2._init._output_tuple_desc_null", "test", "unknown destination tuple descriptor") insert_into_select_with_injection("VTabletWriterV2._init._output_tuple_desc_null", "test1", "success") - + + if (isGroupCommitMode()) { + def ret = sql "SHOW FRONTEND CONFIG like '%stream_load_default_memtable_on_sink_node%';" + logger.info("${ret}") + try { + sql "ADMIN SET FRONTEND CONFIG ('stream_load_default_memtable_on_sink_node' = 'true')" + sql """ set enable_nereids_planner=true """ + sql """ set enable_nereids_dml=true """ + stream_load_with_injection("VTabletWriterV2._init._output_tuple_desc_null", "baseall", "fail") + stream_load_with_injection("VTabletWriterV2._init._output_tuple_desc_null", "baseall1", "fail") + } finally { + sql "ADMIN SET FRONTEND CONFIG ('stream_load_default_memtable_on_sink_node' = '${ret[0][1]}')" + } + return + } + sql """ set enable_nereids_planner=true """ sql """ set enable_nereids_dml=true """ stream_load_with_injection("VTabletWriterV2._init._output_tuple_desc_null", "baseall", "fail") diff --git a/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy b/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy index 9993684d79c..b113c52899a 100644 --- a/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy +++ b/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy @@ -127,7 +127,7 @@ suite("test_jsonb_load_and_function", "p0") { sql """ set enable_insert_strict = true """ def success = true try { - sql """INSERT INTO ${testTable} VALUES(26, '')""" + sql """INSERT INTO ${testTable} VALUES(27, '')""" } catch(Exception ex) { logger.info("""INSERT INTO ${testTable} invalid json failed: """ + ex) success = false @@ -135,7 +135,7 @@ suite("test_jsonb_load_and_function", "p0") { assertEquals(false, success) success = true try { - sql """INSERT INTO ${testTable} VALUES(26, 'abc')""" + sql """INSERT INTO ${testTable} VALUES(28, 'abc')""" } catch(Exception ex) { logger.info("""INSERT INTO ${testTable} invalid json failed: """ + ex) success = false @@ -147,7 +147,7 @@ suite("test_jsonb_load_and_function", "p0") { sql """ set enable_insert_strict = false """ success = true try { - sql """INSERT INTO ${testTable} VALUES(26, '')""" + sql """INSERT INTO ${testTable} VALUES(29, '')""" } catch(Exception ex) { logger.info("""INSERT INTO ${testTable} invalid json failed: """ + ex) success = false @@ -155,7 +155,7 @@ suite("test_jsonb_load_and_function", "p0") { assertEquals(true, success) success = true try { - sql """INSERT INTO ${testTable} VALUES(26, 'abc')""" + sql """INSERT INTO ${testTable} VALUES(30, 'abc')""" } catch(Exception ex) { logger.info("""INSERT INTO ${testTable} invalid json failed: """ + ex) success = false diff --git a/regression-test/suites/load/insert/test_array_string_insert.groovy b/regression-test/suites/load/insert/test_array_string_insert.groovy index 23acc72242c..fd8e3ab1fa8 100644 --- a/regression-test/suites/load/insert/test_array_string_insert.groovy +++ b/regression-test/suites/load/insert/test_array_string_insert.groovy @@ -48,21 +48,22 @@ suite("test_array_string_insert", "load") { sql "set enable_insert_strict = true" // ARRAY<char> too long + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" test { sql "INSERT INTO ${testTable} VALUES (1, ['12345','123456'], [], NULL)" - exception "Insert has filtered data in strict mode" + exception exception_str } // NULL for NOT NULL column test { sql "INSERT INTO ${testTable} VALUES (2, ['12345','123'], NULL, NULL)" - exception "Insert has filtered data in strict mode" + exception exception_str } // ARRAY<ARRAY<char>> too long test { sql "INSERT INTO ${testTable} VALUES (3, NULL, ['4'], [['123456'],['222']])" - exception "Insert has filtered data in strict mode" + exception exception_str } // normal insert diff --git a/regression-test/suites/load_p0/http_stream/test_http_stream.groovy b/regression-test/suites/load_p0/http_stream/test_http_stream.groovy index 77114904f18..8a7d5763604 100644 --- a/regression-test/suites/load_p0/http_stream/test_http_stream.groovy +++ b/regression-test/suites/load_p0/http_stream/test_http_stream.groovy @@ -631,7 +631,9 @@ suite("test_http_stream", "p0") { } log.info("http_stream result: ${result}".toString()) def json = parseJson(result) - assertEquals(label, json.Label.toLowerCase()) + if (!isGroupCommitMode()) { + assertEquals(label, json.Label.toLowerCase()) + } assertEquals("success", json.Status.toLowerCase()) assertEquals(11, json.NumberTotalRows) assertEquals(0, json.NumberFilteredRows) diff --git a/regression-test/suites/partition_p0/auto_partition/test_auto_partition_behavior.groovy b/regression-test/suites/partition_p0/auto_partition/test_auto_partition_behavior.groovy index 13c96cd48bc..7b150246777 100644 --- a/regression-test/suites/partition_p0/auto_partition/test_auto_partition_behavior.groovy +++ b/regression-test/suites/partition_p0/auto_partition/test_auto_partition_behavior.groovy @@ -341,8 +341,8 @@ suite("test_auto_partition_behavior") { """ test{ sql """insert into `long_value` values ("jwklefjklwehrnkjlwbfjkwhefkjhwjkefhkjwehfkjwehfkjwehfkjbvkwebconqkcqnocdmowqmosqmojwnqknrviuwbnclkmwkj");""" - - exception "Partition name's length is over limit of 50." + def exception_str = isGroupCommitMode() ? "s length is over limit of 50." : "Partition name's length is over limit of 50." + exception exception_str } // illegal partiton definetion diff --git a/regression-test/suites/partition_p0/list_partition/test_list_default_multi_col_partition.groovy b/regression-test/suites/partition_p0/list_partition/test_list_default_multi_col_partition.groovy index 408609639f3..756586bff46 100644 --- a/regression-test/suites/partition_p0/list_partition/test_list_default_multi_col_partition.groovy +++ b/regression-test/suites/partition_p0/list_partition/test_list_default_multi_col_partition.groovy @@ -76,10 +76,11 @@ suite("test_list_default_multi_col_partition") { DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1") """ // insert value which is not allowed in existing partitions + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" try { test { sql """insert into list_default_multi_col_par values (10,1,1,1,24453.325,1,1)""" - exception """Insert has filtered data in strict mode""" + exception exception_str } } finally{ } @@ -102,7 +103,7 @@ suite("test_list_default_multi_col_partition") { try { test { sql """insert into list_default_multi_col_par values (10,1,1,1,24453.325,1,1)""" - exception """Insert has filtered data in strict mode""" + exception exception_str } } finally{ } diff --git a/regression-test/suites/partition_p0/list_partition/test_list_default_partition.groovy b/regression-test/suites/partition_p0/list_partition/test_list_default_partition.groovy index db92043e42f..fe2507b6d44 100644 --- a/regression-test/suites/partition_p0/list_partition/test_list_default_partition.groovy +++ b/regression-test/suites/partition_p0/list_partition/test_list_default_partition.groovy @@ -76,10 +76,11 @@ suite("test_list_default_partition") { DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1") """ // insert value which is not allowed in existing partitions + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" try { test { sql """insert into list_default_par values (10,1,1,1,24453.325,1,1)""" - exception """Insert has filtered data in strict mode""" + exception exception_str } } finally{ } @@ -102,7 +103,7 @@ suite("test_list_default_partition") { try { test { sql """insert into list_default_par values (10,1,1,1,24453.325,1,1)""" - exception """Insert has filtered data in strict mode""" + exception exception_str } } finally{ } diff --git a/regression-test/suites/partition_p0/list_partition/test_list_partition_datatype.groovy b/regression-test/suites/partition_p0/list_partition/test_list_partition_datatype.groovy index d06a0195adb..94d07c40291 100644 --- a/regression-test/suites/partition_p0/list_partition/test_list_partition_datatype.groovy +++ b/regression-test/suites/partition_p0/list_partition/test_list_partition_datatype.groovy @@ -352,9 +352,10 @@ suite("test_list_partition_datatype", "p0") { PROPERTIES ("replication_allocation" = "tag.location.default: 1") """ sql """INSERT INTO test_list_partition_ddl_tbl_1 VALUES("0000-01-01", "0000-01-01"), ("9999-12-31", "9999-12-31")""" + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" test { sql """INSERT INTO test_list_partition_ddl_tbl_1 VALUES("2000-01-02", "2000-01-03")""" - exception "Insert has filtered data in strict mode" + exception exception_str } qt_sql1 "SELECT * FROM test_list_partition_ddl_tbl_1 order by k1" sql """INSERT INTO test_list_partition_ddl_tbl_1 VALUES("2000-11-02", "2000-11-03")""" @@ -452,7 +453,7 @@ suite("test_list_partition_datatype", "p0") { """ test { sql """insert into test_list_partition_tb2_char values('d', '1')""" - exception "Insert has filtered data in strict mode" + exception exception_str } sql """alter table test_list_partition_tb2_char add partition partition_add_1 values in ("aaa","bbb")""" def ret = sql "show partitions from test_list_partition_tb2_char where PartitionName='partition_add_1'" @@ -460,7 +461,7 @@ suite("test_list_partition_datatype", "p0") { test { sql """ insert into test_list_partition_tb2_char values('aa', '1')""" - exception "Insert has filtered data in strict mode" + exception exception_str } sql "insert into test_list_partition_tb2_char values('a', 'a')" sql "insert into test_list_partition_tb2_char values('aaa', 'a')" diff --git a/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy b/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy index 6caea640452..511ae23ae9b 100644 --- a/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy +++ b/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy @@ -280,10 +280,11 @@ suite("test_multi_partition_key", "p0") { "values(0, NULL, 0, 0, 0, '2000-01-01 00:00:00', '2000-01-01', 'a', 'a', 0.001, -0.001, 0.001)" qt_sql7 "select k1 from test_multi_col_test_partition_null_value partition(partition_a) where k2 is null" sql "ALTER TABLE test_multi_col_test_partition_null_value DROP PARTITION partition_a" + def exception_str = isGroupCommitMode() ? "too many filtered rows" : "Insert has filtered data in strict mode" test { sql "insert into test_multi_col_test_partition_null_value " + "values(0, NULL, 0, 0, 0, '2000-01-01 00:00:00', '2000-01-01', 'a', 'a', 0.001, -0.001, 0.001)" - exception "Insert has filtered data in strict mode" + exception exception_str } qt_sql8 "select k1 from test_multi_col_test_partition_null_value where k2 is null" // partition columns and add key column @@ -413,12 +414,12 @@ suite("test_multi_partition_key", "p0") { """ test { sql "insert into test_multi_col_insert values (-127, -200)" - exception "Insert has filtered data in strict mode" + exception exception_str } sql "insert into test_multi_col_insert values (10, -100)" test { sql "insert into test_multi_col_insert values (10, 50)" - exception "Insert has filtered data in strict mode" + exception exception_str } sql "insert into test_multi_col_insert values (10, 100)" --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org