This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.0 by this push:
     new 7cc8ab26c69 Revert "[refactor](Nereids) refactor infer predicate rule 
to avoid lost cast (#25637)"
7cc8ab26c69 is described below

commit 7cc8ab26c69a39180bf2e6934e54a14171cbfe97
Author: Kang <kxiao.ti...@gmail.com>
AuthorDate: Thu Oct 26 10:37:50 2023 +0800

    Revert "[refactor](Nereids) refactor infer predicate rule to avoid lost 
cast (#25637)"
    
    This reverts commit 5ca2fc6b1ede1141b557a17a3db166e5b93edd3a, since it's 
mixed by mistake
---
 be/src/olap/rowset/segment_v2/segment_iterator.cpp |  40 +++++++--------------
 be/src/olap/rowset/segment_v2/segment_iterator.h   |   1 -
 be/src/vec/columns/column.h                        |   2 --
 be/src/vec/columns/column_map.cpp                  |  21 -----------
 be/src/vec/columns/column_map.h                    |   4 +--
 be/src/vec/columns/column_nullable.h               |   2 --
 be/src/vec/columns/column_struct.cpp               |  15 --------
 be/src/vec/columns/column_struct.h                 |   4 ---
 .../test_nestedtypes_csv_insert_into_with_s3.out   |   2 +-
 .../test_nestedtypes_json_insert_into_with_s3.out  |   6 ++--
 regression-test/data/export/test_struct_export.out | Bin 442 -> 469 bytes
 .../data/insert_p0/test_struct_insert.out          | Bin 417 -> 444 bytes
 .../data/load_p0/stream_load/map_char_test.csv     |   4 ---
 .../data/load_p0/stream_load/test_stream_load.out  |   6 ----
 .../aggregate/aggregate_group_by_metric_type.out   | Bin 154 -> 170 bytes
 .../load_p0/stream_load/test_stream_load.groovy    |  40 +--------------------
 16 files changed, 19 insertions(+), 128 deletions(-)

diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.cpp 
b/be/src/olap/rowset/segment_v2/segment_iterator.cpp
index ce04b05d4b4..bd2b9df914c 100644
--- a/be/src/olap/rowset/segment_v2/segment_iterator.cpp
+++ b/be/src/olap/rowset/segment_v2/segment_iterator.cpp
@@ -1498,38 +1498,24 @@ bool 
SegmentIterator::_can_evaluated_by_vectorized(ColumnPredicate* predicate) {
     }
 }
 
-bool SegmentIterator::_has_char_type(const Field& column_desc) {
-    switch (column_desc.type()) {
-    case FieldType::OLAP_FIELD_TYPE_CHAR:
-        return true;
-    case FieldType::OLAP_FIELD_TYPE_ARRAY:
-        return _has_char_type(*column_desc.get_sub_field(0));
-    case FieldType::OLAP_FIELD_TYPE_MAP:
-        return _has_char_type(*column_desc.get_sub_field(0)) ||
-               _has_char_type(*column_desc.get_sub_field(1));
-    case FieldType::OLAP_FIELD_TYPE_STRUCT:
-        for (int idx = 0; idx < column_desc.get_sub_field_count(); ++idx) {
-            if (_has_char_type(*column_desc.get_sub_field(idx))) {
-                return true;
-            }
-        }
-        return false;
-    default:
-        return false;
-    }
-};
-
 void SegmentIterator::_vec_init_char_column_id() {
     for (size_t i = 0; i < _schema->num_column_ids(); i++) {
         auto cid = _schema->column_id(i);
-        const Field* column_desc = _schema->column(cid);
+        auto column_desc = _schema->column(cid);
 
-        if (_has_char_type(*column_desc)) {
-            _char_type_idx.emplace_back(i);
-            if (i != 0) {
-                _char_type_idx_no_0.emplace_back(i);
+        do {
+            if (column_desc->type() == FieldType::OLAP_FIELD_TYPE_CHAR) {
+                _char_type_idx.emplace_back(i);
+                if (i != 0) {
+                    _char_type_idx_no_0.emplace_back(i);
+                }
+                break;
+            } else if (column_desc->type() != 
FieldType::OLAP_FIELD_TYPE_ARRAY) {
+                break;
             }
-        }
+            // for Array<Char> or Array<Array<Char>>
+            column_desc = column_desc->get_sub_field(0);
+        } while (column_desc != nullptr);
     }
 }
 
diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.h 
b/be/src/olap/rowset/segment_v2/segment_iterator.h
index 33d3a3f5f9c..d53ad9d62b0 100644
--- a/be/src/olap/rowset/segment_v2/segment_iterator.h
+++ b/be/src/olap/rowset/segment_v2/segment_iterator.h
@@ -201,7 +201,6 @@ private:
     // CHAR type in storage layer padding the 0 in length. But query engine 
need ignore the padding 0.
     // so segment iterator need to shrink char column before output it. only 
use in vec query engine.
     void _vec_init_char_column_id();
-    bool _has_char_type(const Field& column_desc);
 
     uint32_t segment_id() const { return _segment->id(); }
     uint32_t num_rows() const { return _segment->num_rows(); }
diff --git a/be/src/vec/columns/column.h b/be/src/vec/columns/column.h
index 139a23f7935..df4c66fa14c 100644
--- a/be/src/vec/columns/column.h
+++ b/be/src/vec/columns/column.h
@@ -670,8 +670,6 @@ public:
 
     virtual bool is_column_map() const { return false; }
 
-    virtual bool is_column_struct() const { return false; }
-
     /// If the only value column can contain is NULL.
     virtual bool only_null() const { return false; }
 
diff --git a/be/src/vec/columns/column_map.cpp 
b/be/src/vec/columns/column_map.cpp
index 9a550097af9..f10890b0093 100644
--- a/be/src/vec/columns/column_map.cpp
+++ b/be/src/vec/columns/column_map.cpp
@@ -442,27 +442,6 @@ void ColumnMap::replicate(const uint32_t* indexs, size_t 
target_size, IColumn& c
             ->replicate(indexs, target_size, 
res.values_column->assume_mutable_ref());
 }
 
-MutableColumnPtr ColumnMap::get_shrinked_column() {
-    MutableColumns new_columns(2);
-
-    if (keys_column->is_column_string() || keys_column->is_column_array() ||
-        keys_column->is_column_map() || keys_column->is_column_struct()) {
-        new_columns[0] = keys_column->get_shrinked_column();
-    } else {
-        new_columns[0] = keys_column->get_ptr();
-    }
-
-    if (values_column->is_column_string() || values_column->is_column_array() 
||
-        values_column->is_column_map() || values_column->is_column_struct()) {
-        new_columns[1] = values_column->get_shrinked_column();
-    } else {
-        new_columns[1] = values_column->get_ptr();
-    }
-
-    return ColumnMap::create(new_columns[0]->assume_mutable(), 
new_columns[1]->assume_mutable(),
-                             offsets_column->assume_mutable());
-}
-
 void ColumnMap::reserve(size_t n) {
     get_offsets().reserve(n);
     keys_column->reserve(n);
diff --git a/be/src/vec/columns/column_map.h b/be/src/vec/columns/column_map.h
index f8bc8a4812d..0c09f0a9dae 100644
--- a/be/src/vec/columns/column_map.h
+++ b/be/src/vec/columns/column_map.h
@@ -36,8 +36,6 @@
 #include "vec/columns/column.h"
 #include "vec/columns/column_array.h"
 #include "vec/columns/column_impl.h"
-#include "vec/columns/column_nullable.h"
-#include "vec/columns/column_struct.h"
 #include "vec/columns/column_vector.h"
 #include "vec/common/assert_cast.h"
 #include "vec/common/cow.h"
@@ -112,7 +110,7 @@ public:
     const char* deserialize_and_insert_from_arena(const char* pos) override;
 
     void update_hash_with_value(size_t n, SipHash& hash) const override;
-    MutableColumnPtr get_shrinked_column() override;
+
     ColumnPtr filter(const Filter& filt, ssize_t result_size_hint) const 
override;
     size_t filter(const Filter& filter) override;
     ColumnPtr permute(const Permutation& perm, size_t limit) const override;
diff --git a/be/src/vec/columns/column_nullable.h 
b/be/src/vec/columns/column_nullable.h
index 4350bead62d..465e7dc2d96 100644
--- a/be/src/vec/columns/column_nullable.h
+++ b/be/src/vec/columns/column_nullable.h
@@ -262,8 +262,6 @@ public:
     bool is_column_decimal() const override { return 
get_nested_column().is_column_decimal(); }
     bool is_column_string() const override { return 
get_nested_column().is_column_string(); }
     bool is_column_array() const override { return 
get_nested_column().is_column_array(); }
-    bool is_column_map() const override { return 
get_nested_column().is_column_map(); }
-    bool is_column_struct() const override { return 
get_nested_column().is_column_struct(); }
     bool is_fixed_and_contiguous() const override { return false; }
     bool values_have_fixed_size() const override { return 
nested_column->values_have_fixed_size(); }
 
diff --git a/be/src/vec/columns/column_struct.cpp 
b/be/src/vec/columns/column_struct.cpp
index a2d1eae5709..88391370398 100644
--- a/be/src/vec/columns/column_struct.cpp
+++ b/be/src/vec/columns/column_struct.cpp
@@ -293,21 +293,6 @@ void ColumnStruct::replicate(const uint32_t* indexs, 
size_t target_size, IColumn
     }
 }
 
-MutableColumnPtr ColumnStruct::get_shrinked_column() {
-    const size_t tuple_size = columns.size();
-    MutableColumns new_columns(tuple_size);
-
-    for (size_t i = 0; i < tuple_size; ++i) {
-        if (columns[i]->is_column_string() || columns[i]->is_column_array() ||
-            columns[i]->is_column_map() || columns[i]->is_column_struct()) {
-            new_columns[i] = columns[i]->get_shrinked_column();
-        } else {
-            new_columns[i] = columns[i]->get_ptr();
-        }
-    }
-    return ColumnStruct::create(std::move(new_columns));
-}
-
 MutableColumns ColumnStruct::scatter(ColumnIndex num_columns, const Selector& 
selector) const {
     const size_t tuple_size = columns.size();
     std::vector<MutableColumns> scattered_tuple_elements(tuple_size);
diff --git a/be/src/vec/columns/column_struct.h 
b/be/src/vec/columns/column_struct.h
index db9633f6032..9d219c3c989 100644
--- a/be/src/vec/columns/column_struct.h
+++ b/be/src/vec/columns/column_struct.h
@@ -82,7 +82,6 @@ public:
     }
 
     std::string get_name() const override;
-    bool is_column_struct() const override { return true; }
     const char* get_family_name() const override { return "Struct"; }
     bool can_be_inside_nullable() const override { return true; }
     MutableColumnPtr clone_empty() const override;
@@ -161,9 +160,6 @@ public:
         LOG(FATAL) << "compare_at not implemented";
     }
     void get_extremes(Field& min, Field& max) const override;
-
-    MutableColumnPtr get_shrinked_column() override;
-
     void reserve(size_t n) override;
     void resize(size_t n) override;
     size_t byte_size() const override;
diff --git 
a/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_csv_insert_into_with_s3.out
 
b/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_csv_insert_into_with_s3.out
index 0774e566854..3aeb3276518 100644
--- 
a/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_csv_insert_into_with_s3.out
+++ 
b/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_csv_insert_into_with_s3.out
@@ -15,7 +15,7 @@
 0      {"true":true,"false":false}     
{"69":-94,"101":-115,"-64":66,"-71":101,"0":38,"-49":-24,"88":45,"-98":20,"5":88,"120":25,"-59":-66,"-91":92,"-113":61,"-39":109,"-22":-103,"37":-56,"25":32,"-50":-52,"-8":60,"-76":-5,"93":48,"57":-119,"-127":34,"106":125,"-12":15,"84":-16,"61":68,"-72":33,"-44":0,"60":-107,"117":12,"102":-11,"-11":-12,"28":60,"-26":93,"21":122,"92":-52,"65":105,"-119":-1,"-4":10,"-23":93,"-55":-67,"-19":5,"41":46,"2":-3,"-83":-74,"-94":-54,"-47":109,"-42":53,"44":19,"-5":56
 [...]
 
 -- !sql_arr_csv_as_json_doris --
-0      {1:1, 0:0}      {69:-94, 101:-115, -64:66, -71:101, 0:38, -49:-24, 
88:45, -98:20, 5:88, 120:25, -59:-66, -91:92, -113:61, -39:109, -22:-103, 
37:-56, 25:32, -50:-52, -8:60, -76:-5, 93:48, 57:-119, -127:34, 106:125, 
-12:15, 84:-16, 61:68, -72:33, -44:0, 60:-107, 117:12, 102:-11, -11:-12, 28:60, 
-26:93, 21:122, 92:-52, 65:105, -119:-1, -4:10, -23:93, -55:-67, -19:5, 41:46, 
2:-3, -83:-74, -94:-54, -47:109, -42:53, 44:19, -5:56, 27:14, 54:124, -84:-69, 
86:-124, -20:9, 76:-39, 39:25, 98:7, -52:-1 [...]
+0      {1:1, 0:0}      {69:-94, 101:-115, -64:66, -71:101, 0:38, -49:-24, 
88:45, -98:20, 5:88, 120:25, -59:-66, -91:92, -113:61, -39:109, -22:-103, 
37:-56, 25:32, -50:-52, -8:60, -76:-5, 93:48, 57:-119, -127:34, 106:125, 
-12:15, 84:-16, 61:68, -72:33, -44:0, 60:-107, 117:12, 102:-11, -11:-12, 28:60, 
-26:93, 21:122, 92:-52, 65:105, -119:-1, -4:10, -23:93, -55:-67, -19:5, 41:46, 
2:-3, -83:-74, -94:-54, -47:109, -42:53, 44:19, -5:56, 27:14, 54:124, -84:-69, 
86:-124, -20:9, 76:-39, 39:25, 98:7, -52:-1 [...]
 
 -- !sql_arr_csv_as_json_s3 --
 
[{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"true":true,"false":true},{"tru
 [...]
diff --git 
a/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_json_insert_into_with_s3.out
 
b/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_json_insert_into_with_s3.out
index 7595d7693c7..21fb51ac176 100644
--- 
a/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_json_insert_into_with_s3.out
+++ 
b/regression-test/data/datatype_p0/nested_types/query/test_nestedtypes_json_insert_into_with_s3.out
@@ -15,19 +15,19 @@
 0      {"false":true,"true":true}      
{"-64":-114,"5":87,"-128":-34,"-59":100,"-54":-124,"-91":2,"-7":14,"-96":-88,"25":6,"-108":91,"46":89,"93":6,"-3":-21,"78":118,"29":103,"-127":5,"106":75,"121":-63,"84":-107,"-104":-53,"61":-14,"-44":98,"6":-50,"60":-120,"117":86,"38":50,"70":45,"33":39,"65":-43,"-119":-115,"-4":-68,"-36":80,"9":36,"53":95,"-55":-70,"77":115,"-19":-63,"13":60,"-51":23,"41":109,"73":-82,"32":-47,"34":122,"-42":-16,"44":17,"118":-103,"-101":14,"39":31,"-52":-98,"-106":-85,"48":
 [...]
 
 -- !sql_arr_json_without_quote_doris --
-0      {0:1, 1:1}      {-64:-114, 5:87, -128:-34, -59:100, -54:-124, -91:2, 
-7:14, -96:-88, 25:6, -108:91, 46:89, 93:6, -3:-21, 78:118, 29:103, -127:5, 
106:75, 121:-63, 84:-107, -104:-53, 61:-14, -44:98, 6:-50, 60:-120, 117:86, 
38:50, 70:45, 33:39, 65:-43, -119:-115, -4:-68, -36:80, 9:36, 53:95, -55:-70, 
77:115, -19:-63, 13:60, -51:23, 41:109, 73:-82, 32:-47, 34:122, -42:-16, 44:17, 
118:-103, -101:14, 39:31, -52:-98, -106:-85, 48:126, 63:92, -46:1, -1:-89, 
16:-38, -14:97, -21:-69, 11:-6, 72:57, -3 [...]
+0      {0:1, 1:1}      {-64:-114, 5:87, -128:-34, -59:100, -54:-124, -91:2, 
-7:14, -96:-88, 25:6, -108:91, 46:89, 93:6, -3:-21, 78:118, 29:103, -127:5, 
106:75, 121:-63, 84:-107, -104:-53, 61:-14, -44:98, 6:-50, 60:-120, 117:86, 
38:50, 70:45, 33:39, 65:-43, -119:-115, -4:-68, -36:80, 9:36, 53:95, -55:-70, 
77:115, -19:-63, 13:60, -51:23, 41:109, 73:-82, 32:-47, 34:122, -42:-16, 44:17, 
118:-103, -101:14, 39:31, -52:-98, -106:-85, 48:126, 63:92, -46:1, -1:-89, 
16:-38, -14:97, -21:-69, 11:-6, 72:57, -3 [...]
 
 -- !sql_arr_json_without_quote_s3 --
 0      
[{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true,"true":false},{"false":true
 [...]
 
 -- !sql_arr_json_without_quote_doris --
-0      [{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 
1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, 
{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 
1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, 
{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 
1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, 
{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0 [...]
+0      [{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 
1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, 
{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 
1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, 
{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 
1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0}, 
{0:1, 1:0}, {0:1, 1:0}, {0:1, 1:0 [...]
 
 -- !sql_arr_json_without_quote_s3 --
 0      {"true":[true,false,false],"false":[false,true,false]}  
{"101":[-38,-115,35],"-59":[-113,-128,110],"10":[-78,-127,16],"56":[108,-40,124],"-22":[-63,-88,-24],"37":[-7,81,-20],"14":[-114,95,-3],"110":[71,-72,-84],"125":[1,110,56],"-40":[-13,19,80],"20":[-41,-115,107],"46":[82,-104,-30],"57":[116,68,88],"-104":[-64,119,31],"-72":[-65,-44,27],"1":[-127,-73,81],"74":[81,-82,-32],"-99":[-70,37,55],"60":[89,112,124],"-58":[-69,50,-56],"102":[27,-17,-9],"-68":[-112,20,91],"-43":[75,21,76],"33":
 [...]
 
 -- !sql_arr_json_without_quote_doris --
-0      {1:[1, 0, 0], 0:[0, 1, 0]}      {101:[-38, -115, 35], -59:[-113, -128, 
110], 10:[-78, -127, 16], 56:[108, -40, 124], -22:[-63, -88, -24], 37:[-7, 81, 
-20], 14:[-114, 95, -3], 110:[71, -72, -84], 125:[1, 110, 56], -40:[-13, 19, 
80], 20:[-41, -115, 107], 46:[82, -104, -30], 57:[116, 68, 88], -104:[-64, 119, 
31], -72:[-65, -44, 27], 1:[-127, -73, 81], 74:[81, -82, -32], -99:[-70, 37, 
55], 60:[89, 112, 124], -58:[-69, 50, -56], 102:[27, -17, -9], -68:[-112, 20, 
91], -43:[75, 21, 76], 33:[103, 6 [...]
+0      {1:[1, 0, 0], 0:[0, 1, 0]}      {101:[-38, -115, 35], -59:[-113, -128, 
110], 10:[-78, -127, 16], 56:[108, -40, 124], -22:[-63, -88, -24], 37:[-7, 81, 
-20], 14:[-114, 95, -3], 110:[71, -72, -84], 125:[1, 110, 56], -40:[-13, 19, 
80], 20:[-41, -115, 107], 46:[82, -104, -30], 57:[116, 68, 88], -104:[-64, 119, 
31], -72:[-65, -44, 27], 1:[-127, -73, 81], 74:[81, -82, -32], -99:[-70, 37, 
55], 60:[89, 112, 124], -58:[-69, 50, -56], 102:[27, -17, -9], -68:[-112, 20, 
91], -43:[75, 21, 76], 33:[103, 6 [...]
 
 -- !sql_arr --
 100
diff --git a/regression-test/data/export/test_struct_export.out 
b/regression-test/data/export/test_struct_export.out
index 21141b417d3..0dc07fa7cb6 100644
Binary files a/regression-test/data/export/test_struct_export.out and 
b/regression-test/data/export/test_struct_export.out differ
diff --git a/regression-test/data/insert_p0/test_struct_insert.out 
b/regression-test/data/insert_p0/test_struct_insert.out
index 7bd960b0e3d..2c202a651e7 100644
Binary files a/regression-test/data/insert_p0/test_struct_insert.out and 
b/regression-test/data/insert_p0/test_struct_insert.out differ
diff --git a/regression-test/data/load_p0/stream_load/map_char_test.csv 
b/regression-test/data/load_p0/stream_load/map_char_test.csv
deleted file mode 100644
index 4b545d1ea36..00000000000
--- a/regression-test/data/load_p0/stream_load/map_char_test.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-1      {1:"1", 2:"22", 3:"333", 4:"4444", 5:"55555", 6:"666666", 7:"7777777"}
-3      {1:"11", 2:"22", 3:"33", 4:"44", 5:"55", 6:"66", 7:"77"}
-2      {1:"1", 2:"2", 3:"3", 4:"4", 5:"5", 6:"6", 7:"7"}
-4      {1:"111", 2:"22", 3:"333", 4:"444", 5:"55", 6:"66", 7:"777"}
diff --git a/regression-test/data/load_p0/stream_load/test_stream_load.out 
b/regression-test/data/load_p0/stream_load/test_stream_load.out
index 505b3110cf2..8816fce94b8 100644
--- a/regression-test/data/load_p0/stream_load/test_stream_load.out
+++ b/regression-test/data/load_p0/stream_load/test_stream_load.out
@@ -6,12 +6,6 @@
 -- !sql1 --
 2019   9       9       9       7.700   a       2019-09-09      
1970-01-01T08:33:39     k7      9.0     9.0
 
--- !map11 --
-1      {1:"1", 2:"22", 3:"333", 4:"4444", 5:"55555", 6:"666666", 7:"7777777"}
-2      {1:"1", 2:"2", 3:"3", 4:"4", 5:"5", 6:"6", 7:"7"}
-3      {1:"11", 2:"22", 3:"33", 4:"44", 5:"55", 6:"66", 7:"77"}
-4      {1:"111", 2:"22", 3:"333", 4:"444", 5:"55", 6:"66", 7:"777"}
-
 -- !all11 --
 2500
 
diff --git 
a/regression-test/data/query_p0/aggregate/aggregate_group_by_metric_type.out 
b/regression-test/data/query_p0/aggregate/aggregate_group_by_metric_type.out
index d37e5e62bb0..32cdb5a79e9 100644
Binary files 
a/regression-test/data/query_p0/aggregate/aggregate_group_by_metric_type.out 
and 
b/regression-test/data/query_p0/aggregate/aggregate_group_by_metric_type.out 
differ
diff --git a/regression-test/suites/load_p0/stream_load/test_stream_load.groovy 
b/regression-test/suites/load_p0/stream_load/test_stream_load.groovy
index 0884330f2d3..d9cf9c72938 100644
--- a/regression-test/suites/load_p0/stream_load/test_stream_load.groovy
+++ b/regression-test/suites/load_p0/stream_load/test_stream_load.groovy
@@ -191,8 +191,6 @@ suite("test_stream_load", "p0") {
     def tableName7 = "test_unique_key_with_delete"
     def tableName8 = "test_array"
     def tableName10 = "test_struct"
-    def tableName11 = "test_map"
-
     sql """ DROP TABLE IF EXISTS ${tableName3} """
     sql """ DROP TABLE IF EXISTS ${tableName4} """
     sql """ DROP TABLE IF EXISTS ${tableName5} """
@@ -200,7 +198,6 @@ suite("test_stream_load", "p0") {
     sql """ DROP TABLE IF EXISTS ${tableName7} """
     sql """ DROP TABLE IF EXISTS ${tableName8} """
     sql """ DROP TABLE IF EXISTS ${tableName10} """
-    sql """ DROP TABLE IF EXISTS ${tableName11} """
     sql """
     CREATE TABLE IF NOT EXISTS ${tableName3} (
       `k1` int(11) NULL,
@@ -284,7 +281,7 @@ suite("test_stream_load", "p0") {
       `k4` ARRAY<BIGINT> NULL COMMENT "",
       `k5` ARRAY<CHAR> NULL COMMENT "",
       `k6` ARRAY<VARCHAR(20)> NULL COMMENT "",
-      `k7` ARRAY<DATE> NULL COMMENT "",
+      `k7` ARRAY<DATE> NULL COMMENT "", 
       `k8` ARRAY<DATETIME> NULL COMMENT "",
       `k9` ARRAY<FLOAT> NULL COMMENT "",
       `k10` ARRAY<DOUBLE> NULL COMMENT "",
@@ -319,41 +316,6 @@ suite("test_stream_load", "p0") {
     );
     """
 
-    sql """
-    CREATE TABLE IF NOT EXISTS ${tableName11} (
-      `k1` int(11) NULL,
-      `k2` map<int, char(7)> NULL
-    ) ENGINE=OLAP
-    DUPLICATE KEY(`k1`)
-    DISTRIBUTED BY HASH(`k1`) BUCKETS 3
-    PROPERTIES (
-    "replication_allocation" = "tag.location.default: 1"
-    );
-    """
-
-    // load map with specific-length char with non-specific-length data
-    streamLoad {
-        table "${tableName11}"
-
-        set 'column_separator', '\t'
-
-        file 'map_char_test.csv'
-        time 10000 // limit inflight 10s
-
-        check { result, exception, startTime, endTime ->
-            if (exception != null) {
-                throw exception
-            }
-            log.info("Stream load result: ${result}".toString())
-            def json = parseJson(result)
-            assertEquals("success", json.Status.toLowerCase())
-            assertEquals(4, json.NumberTotalRows)
-            assertEquals(0, json.NumberFilteredRows)
-        }
-    }
-    sql "sync"
-    order_qt_map11 "SELECT * FROM ${tableName11} order by k1" 
-
     // load all columns
     streamLoad {
         table "${tableName3}"


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to