This is an automated email from the ASF dual-hosted git repository. eldenmoon pushed a commit to branch variant-sparse in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/variant-sparse by this push: new 599d54b0e01 [Fix](predefine) add many cases port from original cases (#50301) 599d54b0e01 is described below commit 599d54b0e019e50208948920585ebec48ff25b4a Author: lihangyu <lihan...@selectdb.com> AuthorDate: Tue Apr 22 21:38:57 2025 +0800 [Fix](predefine) add many cases port from original cases (#50301) 1. If the path is typed, it means the path is not a sparse column, so we can't read the sparse column even if the sparse column size is reached limit,so should not use SparseColumnExtractReader for typed column 2. handle Int128 in visitor correctly 3. ScalarType should not be modified --- be/src/olap/rowset/segment_v2/column_reader.cpp | 5 +- be/src/vec/common/field_visitors.h | 2 +- .../java/org/apache/doris/catalog/ScalarType.java | 8 -- .../common/util/FetchRemoteTabletSchemaUtil.java | 9 +- .../data/variant_p0/predefine/delete_update.out | Bin 0 -> 931 bytes .../data/variant_p0/predefine/element_function.out | Bin 0 -> 109 bytes .../variant_p0/predefine/insert_into_select.out | Bin 0 -> 3026 bytes .../data/variant_p0/predefine/multi_var.out | Bin 0 -> 953 bytes .../predefine/partial_update_parallel1.csv | 5 + .../predefine/partial_update_parallel2.csv | 5 + .../predefine/partial_update_parallel3.csv | 5 + .../predefine/partial_update_parallel4.csv | 3 + .../test_variant_compaction_with_sparse_limit.out | Bin 0 -> 7035 bytes .../variant_p0/predefine/variant_hirachinal.out | Bin 0 -> 623 bytes .../data/variant_p0/predefine/variant_with_mow.out | Bin 0 -> 1664 bytes regression-test/suites/variant_p0/agg.groovy | 12 +- .../suites/variant_p0/delete_update.groovy | 4 +- regression-test/suites/variant_p0/multi_var.groovy | 6 +- .../{ => predefine}/delete_update.groovy | 14 +- .../variant_p0/predefine/element_function.groovy | 33 +++++ .../variant_p0/predefine/insert_into_select.groovy | 58 +++++++++ .../variant_p0/{ => predefine}/multi_var.groovy | 15 ++- .../predefine/test_predefine_pattern.groovy | 2 +- ...est_variant_compaction_with_sparse_limit.groovy | 142 +++++++++++++++++++++ .../{ => predefine}/variant_hirachinal.groovy | 15 +-- .../variant_p0/predefine/variant_with_mow.groovy | 74 +++++++++++ regression-test/suites/variant_p0/tpch/load.groovy | 10 +- .../suites/variant_p0/variant_hirachinal.groovy | 11 -- 28 files changed, 376 insertions(+), 62 deletions(-) diff --git a/be/src/olap/rowset/segment_v2/column_reader.cpp b/be/src/olap/rowset/segment_v2/column_reader.cpp index 07e2aff5a2f..5c422684904 100644 --- a/be/src/olap/rowset/segment_v2/column_reader.cpp +++ b/be/src/olap/rowset/segment_v2/column_reader.cpp @@ -393,7 +393,10 @@ Status VariantColumnReader::_new_iterator_with_flat_leaves(ColumnIterator** iter RETURN_IF_ERROR(_create_sparse_merge_reader(iterator, opts, target_col, inner_iter)); return Status::OK(); } - if (existed_in_sparse_column || exceeded_sparse_column_limit) { + // If the path is typed, it means the path is not a sparse column, so we can't read the sparse column + // even if the sparse column size is reached limit + if (existed_in_sparse_column || + (exceeded_sparse_column_limit && !relative_path.get_is_typed())) { // Sparse column exists or reached sparse size limit, read sparse column ColumnIterator* inner_iter; RETURN_IF_ERROR(_sparse_column_reader->new_iterator(&inner_iter)); diff --git a/be/src/vec/common/field_visitors.h b/be/src/vec/common/field_visitors.h index e5468867b07..092708f1a62 100644 --- a/be/src/vec/common/field_visitors.h +++ b/be/src/vec/common/field_visitors.h @@ -76,7 +76,7 @@ typename std::decay_t<Visitor>::ResultType apply_visitor(Visitor&& visitor, F&& return visitor(field.template get<VariantField>()); case Field::Types::IPv6: return visitor(field.template get<IPv6>()); - case Field::Types::Int256: + case Field::Types::Int128: return visitor(field.template get<Int128>()); default: throw doris::Exception(ErrorCode::INTERNAL_ERROR, "Bad type of Field {}", diff --git a/fe/fe-common/src/main/java/org/apache/doris/catalog/ScalarType.java b/fe/fe-common/src/main/java/org/apache/doris/catalog/ScalarType.java index cde1933385c..b8b48d390d3 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/catalog/ScalarType.java +++ b/fe/fe-common/src/main/java/org/apache/doris/catalog/ScalarType.java @@ -1221,12 +1221,4 @@ public class ScalarType extends Type { result = 31 * result + scale; return result; } - - public void setPrecision(int precision) { - this.precision = precision; - } - - public void setScale(int scale) { - this.scale = scale; - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java b/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java index b247c9c5299..3b37d48cd59 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java @@ -112,12 +112,9 @@ public class FetchRemoteTabletSchemaUtil { public static Type getTypeFromTypeName(String typeName, int precision, int scale) { Type res = typeMap.getOrDefault(typeName, Type.UNSUPPORTED); - if (res.isScalarType()) { - ScalarType scalarType = (ScalarType) res; - if (scalarType.isDecimalV3() || scalarType.isDecimalV2()) { - scalarType.setPrecision(precision); - scalarType.setScale(scale); - } + if (res.isScalarType() && (res.isDecimalV3() || res.isDecimalV2())) { + // set precision and scale + res = ScalarType.createType(res.getPrimitiveType(), 0, precision, scale); } return res; } diff --git a/regression-test/data/variant_p0/predefine/delete_update.out b/regression-test/data/variant_p0/predefine/delete_update.out new file mode 100644 index 00000000000..3fab0479cac Binary files /dev/null and b/regression-test/data/variant_p0/predefine/delete_update.out differ diff --git a/regression-test/data/variant_p0/predefine/element_function.out b/regression-test/data/variant_p0/predefine/element_function.out new file mode 100644 index 00000000000..095c7b20356 Binary files /dev/null and b/regression-test/data/variant_p0/predefine/element_function.out differ diff --git a/regression-test/data/variant_p0/predefine/insert_into_select.out b/regression-test/data/variant_p0/predefine/insert_into_select.out new file mode 100644 index 00000000000..e8d1c13743a Binary files /dev/null and b/regression-test/data/variant_p0/predefine/insert_into_select.out differ diff --git a/regression-test/data/variant_p0/predefine/multi_var.out b/regression-test/data/variant_p0/predefine/multi_var.out new file mode 100644 index 00000000000..18e31a4a5a5 Binary files /dev/null and b/regression-test/data/variant_p0/predefine/multi_var.out differ diff --git a/regression-test/data/variant_p0/predefine/partial_update_parallel1.csv b/regression-test/data/variant_p0/predefine/partial_update_parallel1.csv new file mode 100644 index 00000000000..4ba84bb7785 --- /dev/null +++ b/regression-test/data/variant_p0/predefine/partial_update_parallel1.csv @@ -0,0 +1,5 @@ +1,"ddddddddddd" +2,"eeeeee" +3,"aaaaa" +4,"bbbbbbbb" +5,"cccccccccccc" diff --git a/regression-test/data/variant_p0/predefine/partial_update_parallel2.csv b/regression-test/data/variant_p0/predefine/partial_update_parallel2.csv new file mode 100644 index 00000000000..1560d6d3261 --- /dev/null +++ b/regression-test/data/variant_p0/predefine/partial_update_parallel2.csv @@ -0,0 +1,5 @@ +1,1111,199 +2,2222,299 +3,3333,399 +4,4444,499 +5,5555,599 diff --git a/regression-test/data/variant_p0/predefine/partial_update_parallel3.csv b/regression-test/data/variant_p0/predefine/partial_update_parallel3.csv new file mode 100644 index 00000000000..17abeef1a9c --- /dev/null +++ b/regression-test/data/variant_p0/predefine/partial_update_parallel3.csv @@ -0,0 +1,5 @@ +1,10,{"new_data1" : 1} +2,20,{"new_data2" : 2} +3,30,{"new_data3" : 3} +4,40,{"new_data4" : 4} +5,50,{"new_data5" : 5} diff --git a/regression-test/data/variant_p0/predefine/partial_update_parallel4.csv b/regression-test/data/variant_p0/predefine/partial_update_parallel4.csv new file mode 100644 index 00000000000..0a7cbd412fa --- /dev/null +++ b/regression-test/data/variant_p0/predefine/partial_update_parallel4.csv @@ -0,0 +1,3 @@ +1,1 +3,1 +5,1 diff --git a/regression-test/data/variant_p0/predefine/test_variant_compaction_with_sparse_limit.out b/regression-test/data/variant_p0/predefine/test_variant_compaction_with_sparse_limit.out new file mode 100644 index 00000000000..8e2cbaee3dd Binary files /dev/null and b/regression-test/data/variant_p0/predefine/test_variant_compaction_with_sparse_limit.out differ diff --git a/regression-test/data/variant_p0/predefine/variant_hirachinal.out b/regression-test/data/variant_p0/predefine/variant_hirachinal.out new file mode 100644 index 00000000000..a3b4f28e286 Binary files /dev/null and b/regression-test/data/variant_p0/predefine/variant_hirachinal.out differ diff --git a/regression-test/data/variant_p0/predefine/variant_with_mow.out b/regression-test/data/variant_p0/predefine/variant_with_mow.out new file mode 100644 index 00000000000..008e14f894a Binary files /dev/null and b/regression-test/data/variant_p0/predefine/variant_with_mow.out differ diff --git a/regression-test/suites/variant_p0/agg.groovy b/regression-test/suites/variant_p0/agg.groovy index 5788d053965..209789189dc 100644 --- a/regression-test/suites/variant_p0/agg.groovy +++ b/regression-test/suites/variant_p0/agg.groovy @@ -17,15 +17,21 @@ suite("regression_test_variant_agg"){ sql """DROP TABLE IF EXISTS var_agg""" + + int max_subcolumns_count = Math.floor(Math.random() * 10) + def var = "variant replace" + if (max_subcolumns_count % 2 == 0) { + var = "variant <'d' : int, 'b.f' : int, 'xxxx' : string, 'point' : bigint> replace" + } sql """ CREATE TABLE IF NOT EXISTS var_agg ( k bigint, - v variant replace, + v ${var}, s bigint sum ) AGGREGATE KEY(`k`) DISTRIBUTED BY HASH(k) BUCKETS 4 - properties("replication_num" = "1", "disable_auto_compaction" = "true"); + properties("replication_num" = "1", "disable_auto_compaction" = "true", "variant_max_subcolumns_count" = "${max_subcolumns_count}"); """ sql """insert into var_agg values (1, '[1]', 1),(1, '{"a" : 1}', 1);""" sql """insert into var_agg values (2, '[2]', 2),(1, '{"a" : [[[1]]]}', 2);""" @@ -47,6 +53,7 @@ suite("regression_test_variant_agg"){ qt_sql6 "select cast(v['b'] as string) from var_agg where cast(v['b'] as string) is not null and length(v['b']) >4 order by k, cast(v['b'] as string) " qt_sql7 "select * from var_agg where cast(v['b'] as string) is not null and length(v['b']) >4 order by k, cast(v['b'] as string) " qt_sql8 "select * from var_agg order by 1, cast(2 as string), 3" + trigger_and_wait_compaction("var_agg", "cumulative") sql "alter table var_agg drop column s" sql """insert into var_agg select 5, '{"a" : 1234, "xxxx" : "fffff", "point" : 42000}' as json_str union all select 5, '{"a": 1123}' as json_str union all select *, '{"a": 11245, "x" : 42005}' as json_str from numbers("number" = "1024") limit 1024;""" @@ -58,6 +65,7 @@ suite("regression_test_variant_agg"){ union all select 5, '{"a": 1123}' as json_str union all select *, '{"a": 11245, "e" : [123456]}' as json_str from numbers("number" = "1024") limit 1024;""" sql """insert into var_agg select 5, '{"a" : 1234, "xxxx" : "fffff", "point" : 42000}' as json_str union all select 5, '{"a": 1123}' as json_str union all select *, '{"a": 11245, "f" : ["123456"]}' as json_str from numbers("number" = "1024") limit 1024;""" + trigger_and_wait_compaction("var_agg", "cumulative") qt_sql9 "select * from var_agg order by cast(2 as string), 3, 1 limit 10" qt_sql9 "select * from var_agg where k > 1024 order by cast(2 as string), 3, 1 limit 10" } \ No newline at end of file diff --git a/regression-test/suites/variant_p0/delete_update.groovy b/regression-test/suites/variant_p0/delete_update.groovy index a345af8323b..839b796b1f3 100644 --- a/regression-test/suites/variant_p0/delete_update.groovy +++ b/regression-test/suites/variant_p0/delete_update.groovy @@ -24,7 +24,7 @@ suite("regression_test_variant_delete_and_update", "variant_type"){ sql """ CREATE TABLE IF NOT EXISTS ${table_name} ( k bigint, - v variant + v variant<'a' : int, 'b' : array<int>, 'c' : double> ) UNIQUE KEY(`k`) DISTRIBUTED BY HASH(k) BUCKETS 3 @@ -50,7 +50,7 @@ suite("regression_test_variant_delete_and_update", "variant_type"){ sql """ CREATE TABLE IF NOT EXISTS ${table_name} ( k bigint, - v variant, + v variant<'a' : int, 'b' : array<int>, 'c' : double>, vs string ) UNIQUE KEY(`k`) diff --git a/regression-test/suites/variant_p0/multi_var.groovy b/regression-test/suites/variant_p0/multi_var.groovy index 91d5810a670..e3477858dde 100644 --- a/regression-test/suites/variant_p0/multi_var.groovy +++ b/regression-test/suites/variant_p0/multi_var.groovy @@ -37,9 +37,9 @@ suite("regression_test_variant_multi_var", "variant_type"){ sql "alter table ${table_name} add column ss string default null" sql """INSERT INTO ${table_name} select k, v, v, v, v from ${table_name}""" sql """DELETE FROM ${table_name} where k = 1""" - qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from ${table_name} order by k, 1, 2, 3, 4, 5, 6 limit 10""" - qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from ${table_name} where k > 200 order by k, 1, 2, 3, 4, 5, 6 limit 10""" - qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from ${table_name} where k > 300 order by k, 1, 2, 3, 4, 5, 6 limit 10""" + qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from multi_variants order by k, 1, 2, 3, 4, 5, 6 limit 10""" + qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from multi_variants where k > 200 order by k, 1, 2, 3, 4, 5, 6 limit 10""" + qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from multi_variants where k > 300 order by k, 1, 2, 3, 4, 5, 6 limit 10""" sql "alter table ${table_name} add column v4 variant default null" for (int i = 0; i < 20; i++) { diff --git a/regression-test/suites/variant_p0/delete_update.groovy b/regression-test/suites/variant_p0/predefine/delete_update.groovy similarity index 93% copy from regression-test/suites/variant_p0/delete_update.groovy copy to regression-test/suites/variant_p0/predefine/delete_update.groovy index a345af8323b..97f1440560a 100644 --- a/regression-test/suites/variant_p0/delete_update.groovy +++ b/regression-test/suites/variant_p0/predefine/delete_update.groovy @@ -17,18 +17,20 @@ import org.codehaus.groovy.runtime.IOGroovyMethods -suite("regression_test_variant_delete_and_update", "variant_type"){ +suite("regression_test_variant_predefine_delete_and_update", "variant_type"){ // MOR def table_name = "var_delete_update" sql "DROP TABLE IF EXISTS ${table_name}" + int max_subcolumns_count = Math.floor(Math.random() * 5) + def var = "variant <'a' : largeint, 'b' : array<int>, 'c' : double, 'd' : text>" sql """ CREATE TABLE IF NOT EXISTS ${table_name} ( k bigint, - v variant + v ${var} ) UNIQUE KEY(`k`) DISTRIBUTED BY HASH(k) BUCKETS 3 - properties("replication_num" = "1", "enable_unique_key_merge_on_write" = "false", "variant_enable_flatten_nested" = "false"); + properties("replication_num" = "1", "enable_unique_key_merge_on_write" = "false", "variant_enable_flatten_nested" = "false", "variant_max_subcolumns_count" = "${max_subcolumns_count}"); """ // test mor table @@ -50,12 +52,12 @@ suite("regression_test_variant_delete_and_update", "variant_type"){ sql """ CREATE TABLE IF NOT EXISTS ${table_name} ( k bigint, - v variant, + v ${var}, vs string ) UNIQUE KEY(`k`) DISTRIBUTED BY HASH(k) BUCKETS 4 - properties("replication_num" = "1", "enable_unique_key_merge_on_write" = "true"); + properties("replication_num" = "1", "enable_unique_key_merge_on_write" = "true", "variant_max_subcolumns_count" = "${max_subcolumns_count}"); """ sql "insert into var_delete_update_mow select k, cast(v as string), cast(v as string) from var_delete_update" sql "delete from ${table_name} where k = 1" @@ -109,7 +111,7 @@ suite("regression_test_variant_delete_and_update", "variant_type"){ `score` int(11) NOT NULL COMMENT "用户得分", `test` int(11) NULL COMMENT "null test", `dft` int(11) DEFAULT "4321", - `var` variant NULL) + `var` variant<'id' : int, 'name' : string, 'score' : int, 'test' : int, 'dft' : int> NULL) UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true", "disable_auto_compaction" = "true", "store_row_column" = "true") """ diff --git a/regression-test/suites/variant_p0/predefine/element_function.groovy b/regression-test/suites/variant_p0/predefine/element_function.groovy new file mode 100644 index 00000000000..e7dbdfc9b13 --- /dev/null +++ b/regression-test/suites/variant_p0/predefine/element_function.groovy @@ -0,0 +1,33 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("regression_test_variant_predefine_element_at", "p0") { + sql """ DROP TABLE IF EXISTS element_fn_test """ + sql """ + CREATE TABLE IF NOT EXISTS element_fn_test( + k bigint, + v variant<'arr1' : array<int>, 'arr2' : array<int>>, + v1 variant<'arr1' : array<int>, 'arr2' : array<int>> not null, + ) + UNIQUE KEY(`k`) + DISTRIBUTED BY HASH(k) BUCKETS 4 + properties("replication_num" = "1"); + """ + + sql """insert into element_fn_test values (1, '{"arr1" : [1, 2, 3]}', '{"arr2" : [4, 5, 6]}')""" + qt_sql """select array_first((x,y) -> (x - y) < 0, cast(v['arr1'] as array<int>), cast(v1['arr2'] as array<int>)) from element_fn_test order by k""" +} \ No newline at end of file diff --git a/regression-test/suites/variant_p0/predefine/insert_into_select.groovy b/regression-test/suites/variant_p0/predefine/insert_into_select.groovy new file mode 100644 index 00000000000..4d92d2822dc --- /dev/null +++ b/regression-test/suites/variant_p0/predefine/insert_into_select.groovy @@ -0,0 +1,58 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("regression_test_variant_predefine_insert_into_select", "variant_type"){ + def table_name = "insert_into_select" + sql "DROP TABLE IF EXISTS ${table_name}_var" + sql "DROP TABLE IF EXISTS ${table_name}_str" + sql """ + CREATE TABLE IF NOT EXISTS ${table_name}_var ( + k bigint, + v variant<'a' : int, 'b' : array<int>, 'c' : double> + ) + DUPLICATE KEY(`k`) + DISTRIBUTED BY HASH(k) BUCKETS 3 + properties("replication_num" = "1"); + """ + sql """ + CREATE TABLE IF NOT EXISTS ${table_name}_str ( + k bigint, + v string + ) + DUPLICATE KEY(`k`) + DISTRIBUTED BY HASH(k) BUCKETS 3 + properties("replication_num" = "1"); + """ + + sql """insert into ${table_name}_var values (1, '{"a" : 1, "b" : [1], "c": 1.0}')""" + sql """insert into ${table_name}_var values (2, '{"a" : 2, "b" : [1], "c": 2.0}')""" + sql """insert into ${table_name}_var values (3, '{"a" : 3, "b" : [3], "c": 3.0}')""" + sql """insert into ${table_name}_var values (4, '{"a" : 4, "b" : [4], "c": 4.0}')""" + sql """insert into ${table_name}_var values (5, '{"a" : 5, "b" : [5], "c": 5.0}')""" + sql """insert into ${table_name}_var values (6, '{"a" : 6, "b" : [6], "c": 6.0, "d" : [{"x" : 6}, {"y" : "6"}]}')""" + sql """insert into ${table_name}_var values (7, '{"a" : 7, "b" : [7], "c": 7.0, "e" : [{"x" : 7}, {"y" : "7"}]}')""" + sql """insert into ${table_name}_var values (8, '{"a" : 8, "b" : [8], "c": 8.0, "f" : [{"x" : 8}, {"y" : "8"}]}')""" + + sql """insert into ${table_name}_str select * from ${table_name}_var""" + sql """insert into ${table_name}_var select * from ${table_name}_str""" + sql """insert into ${table_name}_var select * from ${table_name}_var""" + qt_sql """select v["a"], v["b"], v["c"], v['d'], v['e'], v['f'] from ${table_name}_var order by k""" + qt_sql "select v from ${table_name}_str order by k" + qt_sql """insert into ${table_name}_var select * from ${table_name}_str""" + qt_sql """insert into ${table_name}_var select * from ${table_name}_var""" + qt_sql """select v["a"], v["b"], v["c"], v['d'], v['e'], v['f'] from insert_into_select_var order by k limit 215""" +} \ No newline at end of file diff --git a/regression-test/suites/variant_p0/multi_var.groovy b/regression-test/suites/variant_p0/predefine/multi_var.groovy similarity index 76% copy from regression-test/suites/variant_p0/multi_var.groovy copy to regression-test/suites/variant_p0/predefine/multi_var.groovy index 91d5810a670..a31451207b3 100644 --- a/regression-test/suites/variant_p0/multi_var.groovy +++ b/regression-test/suites/variant_p0/predefine/multi_var.groovy @@ -15,28 +15,32 @@ // specific language governing permissions and limitations // under the License. -suite("regression_test_variant_multi_var", "variant_type"){ +suite("regression_test_variant_predefine_multi_var", "variant_type"){ + // int max_subcolumns_count = Math.floor(Math.random() * 7) + int max_subcolumns_count = 3 def table_name = "multi_variants" sql "DROP TABLE IF EXISTS ${table_name}" sql """ CREATE TABLE IF NOT EXISTS ${table_name} ( k bigint, - v variant + v variant<'k1' : int, 'k2' : string, 'k3' : array<int>, 'k4' : double, 'k5' : array<array<int>>>, ) DUPLICATE KEY(`k`) DISTRIBUTED BY HASH(k) BUCKETS 4 - properties("replication_num" = "1"); + properties("replication_num" = "1", "disable_auto_compaction" = "true", "variant_max_subcolumns_count" = "${max_subcolumns_count}"); """ sql """INSERT INTO ${table_name} SELECT *, '{"k1":1, "k2": "hello world", "k3" : [1234], "k4" : 1.10000, "k5" : [[123]]}' FROM numbers("number" = "101")""" sql """INSERT INTO ${table_name} SELECT *, '{"k7":123, "k8": "elden ring", "k9" : 1.1112, "k10" : [1.12], "k11" : ["moon"]}' FROM numbers("number" = "203") where number > 100""" sql """INSERT INTO ${table_name} SELECT *, '{"k7":123, "k8": "elden ring", "k9" : 1.1112, "k10" : [1.12], "k11" : ["moon"]}' FROM numbers("number" = "411") where number > 200""" - sql "alter table ${table_name} add column v2 variant default null" + trigger_and_wait_compaction(table_name, "cumulative") + sql "alter table ${table_name} add column v2 variant<'k1' : int, 'k2' : string, 'k3' : array<int>, 'k4' : double, 'k5' : array<array<int>>> default null" sql """INSERT INTO ${table_name} select k, v, v from ${table_name}""" - sql "alter table ${table_name} add column v3 variant default null" + sql "alter table ${table_name} add column v3 variant<'k1' : int, 'k2' : string, 'k3' : array<int>, 'k4' : double, 'k5' : array<array<int>>> default null" sql """INSERT INTO ${table_name} select k, v, v, v from ${table_name}""" sql "alter table ${table_name} add column ss string default null" sql """INSERT INTO ${table_name} select k, v, v, v, v from ${table_name}""" sql """DELETE FROM ${table_name} where k = 1""" + trigger_and_wait_compaction(table_name, "cumulative") qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from ${table_name} order by k, 1, 2, 3, 4, 5, 6 limit 10""" qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from ${table_name} where k > 200 order by k, 1, 2, 3, 4, 5, 6 limit 10""" qt_sql """select cast(v["k1"] as tinyint), cast(v2["k2"] as text), cast(v3["k3"] as string), cast(v["k7"] as tinyint), cast(v2["k8"] as text), cast(v3["k9"] as double) from ${table_name} where k > 300 order by k, 1, 2, 3, 4, 5, 6 limit 10""" @@ -45,4 +49,5 @@ suite("regression_test_variant_multi_var", "variant_type"){ for (int i = 0; i < 20; i++) { sql """insert into ${table_name} values (1, '{"a" : 1}', '{"a" : 1}', '{"a" : 1}', '{"a" : 1}', '{"a" : 1}')""" } + trigger_and_wait_compaction(table_name, "cumulative") } \ No newline at end of file diff --git a/regression-test/suites/variant_p0/predefine/test_predefine_pattern.groovy b/regression-test/suites/variant_p0/predefine/test_predefine_pattern.groovy index f729e24e133..b9857493a58 100644 --- a/regression-test/suites/variant_p0/predefine/test_predefine_pattern.groovy +++ b/regression-test/suites/variant_p0/predefine/test_predefine_pattern.groovy @@ -79,7 +79,7 @@ suite("test_variant_predefine_base", "p0"){ INDEX idx_b_b (var) USING INVERTED PROPERTIES("field_pattern"="a.b[0-9]", "parser"="unicode", "support_phrase" = "true") COMMENT '', INDEX idx_bb_glob (var) USING INVERTED PROPERTIES("field_pattern"="a.b?c", "parser"="unicode", "support_phrase" = "true") COMMENT '', INDEX idx_bx_glob (var) USING INVERTED PROPERTIES("field_pattern"="a.c*", "parser"="unicode", "support_phrase" = "true") COMMENT '' - ) ENGINE=OLAP DUPLICATE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 PROPERTIES ( "replication_allocation" = "tag.location.default: 1", "disable_auto_compaction" = "true")""" + ) ENGINE=OLAP DUPLICATE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 PROPERTIES ( "replication_allocation" = "tag.location.default: 1", "disable_auto_compaction" = "true", "variant_max_subcolumns_count" = "${count}")""" sql """insert into ${tableName} values(1, '{"a" : {"b" : 789, "*" : 789, "b1" : 789, "bxc" : 789, "c2323" : 789}}')""" sql """insert into ${tableName} values(2, '{"a" : {"b" : 111, "*" : 111, "b1" : 111, "bxc" : 111, "c2323" : 111}}')""" diff --git a/regression-test/suites/variant_p0/predefine/test_variant_compaction_with_sparse_limit.groovy b/regression-test/suites/variant_p0/predefine/test_variant_compaction_with_sparse_limit.groovy new file mode 100644 index 00000000000..ac0709a3e91 --- /dev/null +++ b/regression-test/suites/variant_p0/predefine/test_variant_compaction_with_sparse_limit.groovy @@ -0,0 +1,142 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods +import org.awaitility.Awaitility + +suite("test_compaction_variant_predefine_with_sparse_limit", "nonConcurrent") { + def backendId_to_backendIP = [:] + def backendId_to_backendHttpPort = [:] + getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort); + + def set_be_config = { key, value -> + for (String backend_id: backendId_to_backendIP.keySet()) { + def (code, out, err) = update_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id), key, value) + logger.info("update config: code=" + code + ", out=" + out + ", err=" + err) + } + } + try { + String backend_id = backendId_to_backendIP.keySet()[0] + def (code, out, err) = show_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id)) + logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def configList = parseJson(out.trim()) + assert configList instanceof List + + boolean disableAutoCompaction = true + for (Object ele in (List) configList) { + assert ele instanceof List<String> + if (((List<String>) ele)[0] == "disable_auto_compaction") { + disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2]) + } + } + + set_be_config("variant_max_sparse_column_statistics_size", "2") + int max_subcolumns_count = Math.floor(Math.random() * 5) + if (max_subcolumns_count == 1) { + max_subcolumns_count = 0 + } + def create_table = { tableName, buckets="auto", key_type="DUPLICATE" -> + sql "DROP TABLE IF EXISTS ${tableName}" + def var_def = "variant <'sala' : int, 'ddd' : double, 'z' : double>" + if (key_type == "AGGREGATE") { + var_def = "variant <'sala' : int, 'ddd' : double, 'z' : double> replace" + } + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + k bigint, + v ${var_def} + ) + ${key_type} KEY(`k`) + DISTRIBUTED BY HASH(k) BUCKETS ${buckets} + properties("replication_num" = "1", "disable_auto_compaction" = "true", "variant_max_subcolumns_count" = "${max_subcolumns_count}"); + """ + } + def key_types = ["DUPLICATE", "UNIQUE", "AGGREGATE"] + // def key_types = ["AGGREGATE"] + for (int i = 0; i < key_types.size(); i++) { + def tableName = "simple_variant_${key_types[i]}" + // 1. simple cases + create_table.call(tableName, "1", key_types[i]) + def insert1 = { + sql """insert into ${tableName} values (1, '{"x" : [1]}'),(13, '{"a" : 1}');""" + sql """insert into ${tableName} values (2, '{"a" : "1"}'),(14, '{"a" : [[[1]]]}');""" + sql """insert into ${tableName} values (3, '{"x" : [3]}'),(15, '{"a" : 1}')""" + sql """insert into ${tableName} values (4, '{"y": 1}'),(16, '{"a" : "1223"}');""" + sql """insert into ${tableName} values (5, '{"z" : 2.0}'),(17, '{"a" : [1]}');""" + sql """insert into ${tableName} values (6, '{"x" : 111}'),(18, '{"a" : ["1", 2, 1.1]}');""" + sql """insert into ${tableName} values (7, '{"m" : 1}'),(19, '{"a" : 1, "b" : {"c" : 1}}');""" + sql """insert into ${tableName} values (8, '{"l" : 2}'),(20, '{"a" : 1, "b" : {"c" : [{"a" : 1}]}}');""" + sql """insert into ${tableName} values (9, '{"g" : 1.11}'),(21, '{"a" : 1, "b" : {"c" : [{"a" : 1}]}}');""" + sql """insert into ${tableName} values (10, '{"z" : 1.1111}'),(22, '{"a" : 1, "b" : {"c" : [{"a" : 1}]}}');""" + sql """insert into ${tableName} values (11, '{"sala" : 0}'),(1999, '{"a" : 1, "b" : {"c" : 1}}'),(19921, '{"a" : 1, "b" : 10}');""" + sql """insert into ${tableName} values (12, '{"dddd" : 0.1}'),(1022, '{"a" : 1, "b" : 10}'),(1029, '{"a" : 1, "b" : {"c" : 1}}');""" + } + insert1.call(); + insert1.call(); + qt_sql_1 "SELECT * FROM ${tableName} ORDER BY k, cast(v as string); " + qt_sql_2 "select k, cast(v['a'] as array<int>) from ${tableName} where size(cast(v['a'] as array<int>)) > 0 order by k" + qt_sql_3 "select k, v['a'], cast(v['b'] as string) from ${tableName} where length(cast(v['b'] as string)) > 4 order by k" + qt_sql_5 "select cast(v['b'] as string), cast(v['b']['c'] as string) from ${tableName} where cast(v['b'] as string) != 'null' and cast(v['b'] as string) != '{}' order by k desc, 1, 2 limit 10;" + + + //TabletId,ReplicaId,BackendId,SchemaHash,Version,LstSuccessVersion,LstFailedVersion,LstFailedTime,LocalDataSize,RemoteDataSize,RowCount,State,LstConsistencyCheckTime,CheckVersion,VersionCount,QueryHits,PathHash,MetaUrl,CompactionStatus + def tablets = sql_return_maparray """ show tablets from ${tableName}; """ + + // trigger compactions for all tablets in ${tableName} + trigger_and_wait_compaction(tableName, "cumulative") + + int rowCount = 0 + for (def tablet in tablets) { + String tablet_id = tablet.TabletId + (code, out, err) = curl("GET", tablet.CompactionStatus) + logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def tabletJson = parseJson(out.trim()) + assert tabletJson.rowsets instanceof List + for (String rowset in (List<String>) tabletJson.rowsets) { + rowCount += Integer.parseInt(rowset.split(" ")[1]) + } + } + // assert (rowCount < 8) + qt_sql_11 "SELECT * FROM ${tableName} ORDER BY k, cast(v as string); " + qt_sql_22 "select k, cast(v['a'] as array<int>) from ${tableName} where size(cast(v['a'] as array<int>)) > 0 order by k" + qt_sql_33 "select k, v['a'], cast(v['b'] as string) from ${tableName} where length(cast(v['b'] as string)) > 4 order by k" + qt_sql_55 "select cast(v['b'] as string), cast(v['b']['c'] as string) from ${tableName} where cast(v['b'] as string) != 'null' and cast(v['b'] as string) != '{}' order by k desc limit 10;" + } + for (int i = 0; i < key_types.size(); i++) { + def tableName = "simple_variant_${key_types[i]}" + def insert2 = { + sql """insert into ${tableName} values (1, '{"sala" : 0.1, "ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1022, '{"ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1029, '{"a" : 1, "b" : {"c" : 1}}');""" + sql """insert into ${tableName} values (2, '{"sala" : 0.1, "ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1022, '{"ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1029, '{"a" : 1, "b" : {"c" : 1}}');""" + sql """insert into ${tableName} values (3, '{"sala" : 0.1, "ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1022, '{"ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1029, '{"a" : 1, "b" : {"c" : 1}}');""" + sql """insert into ${tableName} values (4, '{"sala" : 0.1, "ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1022, '{"ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1029, '{"a" : 1, "b" : {"c" : 1}}');""" + sql """insert into ${tableName} values (5, '{"sala" : 0.1, "ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1022, '{"ddd" : 1, "z" : 10, "a" : 1, "b" : {"c" : 1}}'),(1029, '{"a" : 1, "b" : {"c" : 1}}');""" + } + insert2.call(); + insert2.call(); + trigger_and_wait_compaction(tableName, "cumulative") + sql "set topn_opt_limit_threshold = 1" + qt_sql "select * from ${tableName} order by k limit 5;" + sql "set topn_opt_limit_threshold = 10" + qt_sql "select * from ${tableName} order by k limit 5;" + } + } finally { + // set back to default + set_be_config("variant_max_sparse_column_statistics_size", "10000") + } +} diff --git a/regression-test/suites/variant_p0/variant_hirachinal.groovy b/regression-test/suites/variant_p0/predefine/variant_hirachinal.groovy similarity index 77% copy from regression-test/suites/variant_p0/variant_hirachinal.groovy copy to regression-test/suites/variant_p0/predefine/variant_hirachinal.groovy index a942150e57f..86cdbfc4966 100644 --- a/regression-test/suites/variant_p0/variant_hirachinal.groovy +++ b/regression-test/suites/variant_p0/predefine/variant_hirachinal.groovy @@ -15,25 +15,14 @@ // specific language governing permissions and limitations // under the License. -suite("regression_test_variant_hirachinal", "variant_type"){ - def set_be_config = { key, value -> - String backend_id; - def backendId_to_backendIP = [:] - def backendId_to_backendHttpPort = [:] - getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort); - - backend_id = backendId_to_backendIP.keySet()[0] - def (code, out, err) = update_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id), key, value) - logger.info("update config: code=" + code + ", out=" + out + ", err=" + err) - } - +suite("regression_test_variant_predefine_hirachinal", "variant_type"){ def table_name = "var_rs" sql "DROP TABLE IF EXISTS ${table_name}" sql """ CREATE TABLE IF NOT EXISTS ${table_name} ( k bigint, - v variant + v variant<'a' : largeint, 'c.d' : text> ) DUPLICATE KEY(`k`) DISTRIBUTED BY HASH(k) BUCKETS 1 diff --git a/regression-test/suites/variant_p0/predefine/variant_with_mow.groovy b/regression-test/suites/variant_p0/predefine/variant_with_mow.groovy new file mode 100644 index 00000000000..bcb0d387ebf --- /dev/null +++ b/regression-test/suites/variant_p0/predefine/variant_with_mow.groovy @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("variant_predefine_with_mow") { + sql "DROP TABLE IF EXISTS var_mow" + sql """ + CREATE TABLE `var_mow` ( + `PORTALID` int NOT NULL, + `OBJECTTYPEID` varchar(65533) NOT NULL, + `OBJECTIDHASH` tinyint NOT NULL, + `OBJECTID` bigint NOT NULL, + `DELETED` boolean NULL DEFAULT "FALSE", + `INGESTIONTIMESTAMP` bigint NOT NULL, + `PROCESSEDTIMESTAMP` bigint NOT NULL, + `VERSION` bigint NULL DEFAULT "0", + `OVERFLOWPROPERTIES` variant<'a' : int, 'b' : string, 'c' : largeint> NULL, + INDEX objects_properties_idx (`OVERFLOWPROPERTIES`) USING INVERTED COMMENT 'This is an inverted index on all properties of the object' + ) ENGINE=OLAP + UNIQUE KEY(`PORTALID`, `OBJECTTYPEID`, `OBJECTIDHASH`, `OBJECTID`) + DISTRIBUTED BY HASH(`PORTALID`, `OBJECTTYPEID`, `OBJECTIDHASH`) BUCKETS 1 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "enable_unique_key_merge_on_write" = "true", + "function_column.sequence_col" = "VERSION", + "variant_max_subcolumns_count" = "100", + "disable_auto_compaction" = "true" + ); + """ + + sql """ insert into var_mow values(944935233, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 30, "b": 40, "c": 50, "d": 60, "e": 70, "f": 80, "g": 90, "h": 100, "i": 110, "j": 120}'); """ + sql """ insert into var_mow values(944935234, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 31, "b": 41, "c": 51, "d": 61, "e": 71, "f": 81, "g": 91, "h": 101, "i": 111, "j": 121}'); """ + sql """ insert into var_mow values(944935235, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 32, "b": 42, "c": 52, "d": 62, "e": 72, "f": 82, "g": 92, "h": 102, "i": 112, "j": 122}'); """ + sql """ insert into var_mow values(944935236, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 33, "b": 43, "c": 53, "d": 63, "e": 73, "f": 83, "g": 93, "h": 103, "i": 113, "j": 123}'); """ + sql """ insert into var_mow values(944935237, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 34, "b": 44, "c": 54, "d": 64, "e": 74, "f": 84, "g": 94, "h": 104, "i": 114, "j": 124}'); """ + sql """ insert into var_mow values(944935238, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 35, "b": 45, "c": 55, "d": 65, "e": 75, "f": 85, "g": 95, "h": 105, "i": 115, "j": 125}'); """ + sql """ insert into var_mow values(944935239, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 36, "b": 46, "c": 56, "d": 66, "e": 76, "f": 86, "g": 96, "h": 106, "i": 116, "j": 126}'); """ + sql """ insert into var_mow values(944935240, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 37, "b": 47, "c": 57, "d": 67, "e": 77, "f": 87, "g": 97, "h": 107, "i": 117, "j": 127}'); """ + sql """ insert into var_mow values(944935241, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 38, "b": 48, "c": 58, "d": 68, "e": 78, "f": 88, "g": 98, "h": 108, "i": 118, "j": 128}'); """ + sql """ insert into var_mow values(944935242, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 39, "b": 49, "c": 59, "d": 69, "e": 79, "f": 89, "g": 99, "h": 109, "i": 119, "j": 129}'); """ + sql """ insert into var_mow values(944935243, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 40, "b": 50, "c": 60, "d": 70, "e": 80, "f": 90, "g": 100, "h": 110, "i": 120, "j": 130}'); """ + sql """ insert into var_mow values(944935244, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 41, "b": 51, "c": 61, "d": 71, "e": 81, "f": 91, "g": 101, "h": 111, "i": 121, "j": 131}'); """ + sql """ insert into var_mow values(944935245, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 42, "b": 52, "c": 62, "d": 72, "e": 82, "f": 92, "g": 102, "h": 112, "i": 122, "j": 132}'); """ + sql """ insert into var_mow values(944935246, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 43, "b": 53, "c": 63, "d": 73, "e": 83, "f": 93, "g": 103, "h": 113, "i": 123, "j": 133}'); """ + sql """ insert into var_mow values(944935247, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 44, "b": 54, "c": 64, "d": 74, "e": 84, "f": 94, "g": 104, "h": 114, "i": 124, "j": 134}'); """ + sql """ insert into var_mow values(944935248, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 45, "b": 55, "c": 65, "d": 75, "e": 85, "f": 95, "g": 105, "h": 115, "i": 125, "j": 135}'); """ + sql """ insert into var_mow values(944935249, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 46, "b": 56, "c": 66, "d": 76, "e": 86, "f": 96, "g": 106, "h": 116, "i": 126, "j": 136}'); """ + sql """ insert into var_mow values(944935250, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 47, "b": 57, "c": 67, "d": 77, "e": 87, "f": 97, "g": 107, "h": 117, "i": 127, "j": 137}'); """ + sql """ insert into var_mow values(944935251, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 48, "b": 58, "c": 68, "d": 78, "e": 88, "f": 98, "g": 108, "h": 118, "i": 128, "j": 138}'); """ + sql """ insert into var_mow values(944935252, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 49, "b": 59, "c": 69, "d": 79, "e": 89, "f": 99, "g": 109, "h": 119, "i": 129, "j": 139}'); """ + sql """ insert into var_mow values(944935253, '2', 1, 1, 'TRUE', 1741682404960657985, 1741682404960657985, 0, '{"a": 50, "b": 60, "c": 70, "d": 80, "e": 90, "f": 100, "g": 110, "h": 120, "i": 130, "j": 140}'); """ + + trigger_and_wait_compaction("var_mow", "cumulative") + + qt_sql """ select objectId from var_mow objects_alias where objects_alias.portalid = 944935233 and objects_alias.objectTypeId = '2' limit 100 """ + // topn two phase enabled + qt_sql """select * from var_mow order by portalid limit 5""" + // topn two phase disabled + qt_sql """select * from var_mow order by portalid + OBJECTIDHASH limit 5""" + qt_sql """select variant_type(OVERFLOWPROPERTIES) from var_mow limit 1""" +} \ No newline at end of file diff --git a/regression-test/suites/variant_p0/tpch/load.groovy b/regression-test/suites/variant_p0/tpch/load.groovy index 873277e319f..44ef25e72b1 100644 --- a/regression-test/suites/variant_p0/tpch/load.groovy +++ b/regression-test/suites/variant_p0/tpch/load.groovy @@ -37,15 +37,19 @@ suite("load") { tables.forEach { tableName -> sql "DROP TABLE IF EXISTS ${tableName}" + int max_subcolumns_count = Math.floor(Math.random() * 7) + def var_def = "variant" + if (max_subcolumns_count % 2) { + var_def = "variant<'O_CLERK' : string, 'C_COMMENT' : string, 'L_RETURNFLAG' : string, 'S_COMMENT' : string, 'S_ACCTBAL' : double>" + } sql """ CREATE TABLE IF NOT EXISTS ${tableName} ( k bigint, - var variant - + var ${var_def} ) DUPLICATE KEY(`k`) DISTRIBUTED BY RANDOM BUCKETS 5 - properties("replication_num" = "1", "disable_auto_compaction" = "false"); + properties("replication_num" = "1", "disable_auto_compaction" = "false", "variant_max_subcolumns_count" = "${max_subcolumns_count}"); """ streamLoad { // a default db 'regression_test' is specified in diff --git a/regression-test/suites/variant_p0/variant_hirachinal.groovy b/regression-test/suites/variant_p0/variant_hirachinal.groovy index a942150e57f..b3eaaccd2b6 100644 --- a/regression-test/suites/variant_p0/variant_hirachinal.groovy +++ b/regression-test/suites/variant_p0/variant_hirachinal.groovy @@ -16,17 +16,6 @@ // under the License. suite("regression_test_variant_hirachinal", "variant_type"){ - def set_be_config = { key, value -> - String backend_id; - def backendId_to_backendIP = [:] - def backendId_to_backendHttpPort = [:] - getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort); - - backend_id = backendId_to_backendIP.keySet()[0] - def (code, out, err) = update_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id), key, value) - logger.info("update config: code=" + code + ", out=" + out + ", err=" + err) - } - def table_name = "var_rs" sql "DROP TABLE IF EXISTS ${table_name}" --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org