This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 538a885697cb559f14d0e04261984251f1ab2cd6
Author: Ashin Gau <ashin...@users.noreply.github.com>
AuthorDate: Mon Jul 1 23:02:25 2024 +0800

    [test](migrate) move test_hive_text_complex_type from p2 to p0 (#37007)
    
    follow up: #36787
    move test_hive_text_complex_type from p2 to p0
---
 .../hive_text_complex_type/create_table.hql        |  27 +++++++++
 .../hive_text_complex_type/data.tar.gz             | Bin 0 -> 560 bytes
 .../multi_catalog/hive_text_complex_type/run.sh    |  12 ++++
 .../hive_text_complex_type2/create_table.hql       |  21 +++++++
 .../hive_text_complex_type2/data.tar.gz            | Bin 0 -> 366 bytes
 .../multi_catalog/hive_text_complex_type2/run.sh   |  12 ++++
 .../hive_text_complex_type3/create_table.hql       |  24 ++++++++
 .../hive_text_complex_type3/data.tar.gz            | Bin 0 -> 977 bytes
 .../multi_catalog/hive_text_complex_type3/run.sh   |  12 ++++
 .../create_table.hql                               |  33 +++++++++++
 .../hive_text_complex_type_delimiter/data.tar.gz   | Bin 0 -> 568 bytes
 .../hive_text_complex_type_delimiter/run.sh        |  12 ++++
 .../create_table.hql                               |  27 +++++++++
 .../hive_text_complex_type_delimiter2/data.tar.gz  | Bin 0 -> 376 bytes
 .../hive_text_complex_type_delimiter2/run.sh       |  12 ++++
 .../create_table.hql                               |  26 +++++++++
 .../hive_text_complex_type_delimiter3/data.tar.gz  | Bin 0 -> 978 bytes
 .../hive_text_complex_type_delimiter3/run.sh       |  12 ++++
 .../parquet_predicate_table/create_table.hql       |  18 ++++++
 .../parquet_predicate_table/data.tar.gz            | Bin 0 -> 828 bytes
 .../multi_catalog/parquet_predicate_table/run.sh   |  12 ++++
 .../hive/test_hive_text_complex_type.out           |  65 +++++++++++++++++++++
 .../hive/test_hive_text_complex_type.groovy        |  15 +++--
 23 files changed, 334 insertions(+), 6 deletions(-)

diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/create_table.hql
new file mode 100644
index 00000000000..3b20db98019
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/create_table.hql
@@ -0,0 +1,27 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.hive_text_complex_type`(
+  `column1` int, 
+  `column2` map<int,boolean>, 
+  `column3` map<int,tinyint>, 
+  `column4` map<string,smallint>, 
+  `column5` map<string,int>, 
+  `column6` map<string,bigint>, 
+  `column7` map<string,float>, 
+  `column8` map<string,double>, 
+  `column9` map<int,string>, 
+  `column10` map<string,timestamp>, 
+  `column11` map<string,date>, 
+  `column12` 
struct<field1:boolean,field2:tinyint,field3:smallint,field4:int,field5:bigint,field6:float,field7:double,field8:string,field9:timestamp,field10:date>)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1690518015');
+
+msck repair table hive_text_complex_type;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/data.tar.gz
new file mode 100644
index 00000000000..dd8a3c6b068
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/create_table.hql
new file mode 100644
index 00000000000..ac75375d950
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.hive_text_complex_type2`(
+  `id` int, 
+  `col1` map<int,map<string,int>>, 
+  `col2` array<array<map<int,boolean>>>, 
+  `col3` 
struct<field1:int,field2:map<int,string>,field3:struct<sub_field1:boolean,sub_field2:boolean,sub_field3:int>,field4:array<int>>,
 
+  `col4` map<int,map<int,array<boolean>>>, 
+  `col5` map<int,struct<sub_field1:boolean,sub_field2:string>>)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type2'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1692719086');
+
+msck repair table hive_text_complex_type2;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/data.tar.gz
new file mode 100644
index 00000000000..ab5e0a5f7c8
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type2/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/create_table.hql
new file mode 100644
index 00000000000..8b0ccdaaa1f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/create_table.hql
@@ -0,0 +1,24 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.hive_text_complex_type3`(
+  `id` int, 
+  `column1` 
map<int,struct<a:int,b:int,c:array<map<string,array<array<array<array<struct<aa:int,bb:string,cc:boolean>>>>>>>>>,
 
+  `column2` 
array<struct<a:int,b:array<map<string,map<int,map<string,array<struct<aaa:struct<aa:int,bb:string,cc:boolean>,bbb:boolean,ccc:string,ddd:date>>>>>>,c:int>>,
 
+  `column3` 
struct<a:int,b:struct<a:array<map<string,array<map<int,map<boolean,array<struct<aa:int,bb:string,cc:boolean>>>>>>>>,c:map<int,string>,d:array<int>>,
 
+  `column4` 
map<int,map<date,map<int,map<double,map<string,map<int,map<string,map<int,map<int,map<int,boolean>>>>>>>>>>,
 
+  `column5` 
array<array<array<array<array<array<array<array<array<array<int>>>>>>>>>>, 
+  `column6` 
struct<a:map<int,map<int,map<string,string>>>,b:struct<aa:struct<aaa:struct<aaaa:struct<aaaaa:struct<aaaaaa:struct<aaaaaaa:struct<aaaaaaaa:struct<a1:int,a2:string>,bbbbbbbb:map<int,int>>,bbbbbbb:array<string>>>>,bbbb:map<int,double>>>,bb:date>,c:date>)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'hive.serialization.extend.nesting.levels'='true') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type3'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1693389680');
+
+msck repair table hive_text_complex_type3;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/data.tar.gz
new file mode 100644
index 00000000000..c3004ca0d90
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type3/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/create_table.hql
new file mode 100644
index 00000000000..eade16ce4a4
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/create_table.hql
@@ -0,0 +1,33 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.hive_text_complex_type_delimiter`(
+  `column1` int, 
+  `column2` map<int,boolean>, 
+  `column3` map<int,tinyint>, 
+  `column4` map<string,smallint>, 
+  `column5` map<string,int>, 
+  `column6` map<string,bigint>, 
+  `column7` map<string,float>, 
+  `column8` map<string,double>, 
+  `column9` map<int,string>, 
+  `column10` map<string,timestamp>, 
+  `column11` map<string,date>, 
+  `column12` 
struct<field1:boolean,field2:tinyint,field3:smallint,field4:int,field5:bigint,field6:float,field7:double,field8:string,field9:timestamp,field10:date>)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'colelction.delim'='|', 
+  'field.delim'=',', 
+  'line.delim'='\n', 
+  'mapkey.delim'=':', 
+  'serialization.format'=',') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type_delimiter'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1690517298');
+
+msck repair table hive_text_complex_type_delimiter;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/data.tar.gz
new file mode 100644
index 00000000000..0e312584bb0
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/create_table.hql
new file mode 100644
index 00000000000..fcc0d3631b6
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/create_table.hql
@@ -0,0 +1,27 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.hive_text_complex_type_delimiter2`(
+  `id` int, 
+  `col1` map<int,map<string,int>>, 
+  `col2` array<array<map<int,boolean>>>, 
+  `col3` 
struct<field1:int,field2:map<int,string>,field3:struct<sub_field1:boolean,sub_field2:boolean,sub_field3:int>,field4:array<int>>,
 
+  `col4` map<int,map<int,array<boolean>>>, 
+  `col5` map<int,struct<sub_field1:boolean,sub_field2:string>>)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'colelction.delim'=',', 
+  'field.delim'='\t', 
+  'line.delim'='\n', 
+  'mapkey.delim'=':', 
+  'serialization.format'='\t') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type_delimiter2'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1692719456');
+
+msck repair table hive_text_complex_type_delimiter2;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/data.tar.gz
new file mode 100644
index 00000000000..de23af1c69e
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter2/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/create_table.hql
new file mode 100644
index 00000000000..a7e1cc4804d
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/create_table.hql
@@ -0,0 +1,26 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.hive_text_complex_type_delimiter3`(
+  `id` int, 
+  `column1` 
map<int,struct<a:int,b:int,c:array<map<string,array<array<array<array<struct<aa:int,bb:string,cc:boolean>>>>>>>>>,
 
+  `column2` 
array<struct<a:int,b:array<map<string,map<int,map<string,array<struct<aaa:struct<aa:int,bb:string,cc:boolean>,bbb:boolean,ccc:string,ddd:date>>>>>>,c:int>>,
 
+  `column3` 
struct<a:int,b:struct<a:array<map<string,array<map<int,map<boolean,array<struct<aa:int,bb:string,cc:boolean>>>>>>>>,c:map<int,string>,d:array<int>>,
 
+  `column4` 
map<int,map<date,map<int,map<double,map<string,map<int,map<string,map<int,map<int,map<int,boolean>>>>>>>>>>,
 
+  `column5` 
array<array<array<array<array<array<array<array<array<array<int>>>>>>>>>>, 
+  `column6` 
struct<a:map<int,map<int,map<string,string>>>,b:struct<aa:struct<aaa:struct<aaaa:struct<aaaaa:struct<aaaaaa:struct<aaaaaaa:struct<aaaaaaaa:struct<a1:int,a2:string>,bbbbbbbb:map<int,int>>,bbbbbbb:array<string>>>>,bbbb:map<int,double>>>,bb:date>,c:date>)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'collection.delim'=',', 
+  'hive.serialization.extend.nesting.levels'='true', 
+  'mapkey.delim'=':') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type_delimiter3'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1693390056');
+
+msck repair table hive_text_complex_type_delimiter3;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/data.tar.gz
new file mode 100644
index 00000000000..a33042cf178
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/hive_text_complex_type_delimiter3/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/create_table.hql
new file mode 100644
index 00000000000..754ce2360a5
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/create_table.hql
@@ -0,0 +1,18 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_predicate_table`(
+  `column_primitive_integer` int, 
+  `column1_struct` struct<field0:bigint,field1:bigint>, 
+  `column_primitive_bigint` bigint)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_predicate_table'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1692368377');
+
+msck repair table parquet_predicate_table;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/data.tar.gz
new file mode 100644
index 00000000000..27d818cb4d8
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_predicate_table/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/regression-test/data/external_table_p2/hive/test_hive_text_complex_type.out 
b/regression-test/data/external_table_p0/hive/test_hive_text_complex_type.out
similarity index 50%
rename from 
regression-test/data/external_table_p2/hive/test_hive_text_complex_type.out
rename to 
regression-test/data/external_table_p0/hive/test_hive_text_complex_type.out
index 334aef473a6..8a0fbd65290 100644
--- 
a/regression-test/data/external_table_p2/hive/test_hive_text_complex_type.out
+++ 
b/regression-test/data/external_table_p0/hive/test_hive_text_complex_type.out
@@ -64,3 +64,68 @@
 -- !sql16 --
 2
 
+-- !sql1 --
+1      {101:1} {102:10}        {"field1":100}  {"field2":2000000}      
{"field3":300000000}    {"field4":3.14} {"field5":3.14159}      {103:"Hello"}   
{"field6":"2023-07-28 12:34:56.000000"} {"field7":"2023-07-28"} {"field1": 1, 
"field2": 1, "field3": 20, "field4": 3000000, "field5": 44444444444, "field6": 
3.14, "field7": 3.14159, "field8": "Hello", "field9": "2023-07-28 
12:34:56.000000", "field10": "2023-07-28"}
+2      {201:1} {202:11}        {"field1":200}  {"field2":9000000}      
{"field3":8000000000}   {"field4":9.13321}      {"field5":322.14159}    
{203:"Hello"}   {"field6":"2023-07-28 12:34:56.000000"} {"field7":"2023-07-28"} 
{"field1": 1, "field2": 1, "field3": 201, "field4": 300011000, "field5": 
44444444444, "field6": 3.14, "field7": 3.14159, "field8": "world", "field9": 
"2023-07-28 12:34:56.000000", "field10": "2023-06-28"}
+3      {201:1} {202:10}        {"field1":120}  {"field2":44440000}     
{"field3":700000000}    {"field4":3.100004}     {"field5":3.00014159}   
{103:"Hello"}   {"field6":"2023-07-28 12:34:56.000000"} {"field7":"2023-07-28"} 
{"field1": 1, "field2": 1, "field3": 700, "field4": 300011000, "field5": 
3333333334, "field6": 3.00014, "field7": 3.3314159, "field8": "hello world", 
"field9": "2023-07-28 01:34:56.000000", "field10": "2023-07-27"}
+10     {101:1, 102:1, 103:1}   {102:10, 104:1, 105:2}  {"field1":100, 
"field0":100}    {"field2":3000000}      {"field3":300000000}    
{"field4":3.14, "hello world":0.111, "hell0":7.001}     {"field5":3.14159}      
{103:"Hello"}   {"field6":"2023-07-28 12:34:56.000000", 
"field000006":"2023-07-08 12:34:57.000000", "field2432456":"2023-07-28 
12:34:50.000000"}        {"field7":"2023-07-28"} {"field1": 1, "field2": 1, 
"field3": 20, "field4": 3000000, "field5": 44444444444, "field6": 3.14, 
"field7": 3.14159, "field8": "Hello",  [...]
+11     {101:1, 102:1, 13:1, 12:1}      {102:10, 14:1, 15:2, 12:10}     
{"field1":100, "fie88ld0":100, "fieweld0":100, "fieeeld1":100, "fieeeld0":100, 
"feeield0":100, "feeield1":100, "firreld0":100, "field0":100}    
{"field2":3000000, "abcd":4000000, "1231":3000000}      {"fi7eld3":300000000, 
"field30":300000000, "fielwwd3":300000000, "fi055":300000000, 
"field7":300000121323}      {"field4":3.14, "hello world":0.111, "hell0":7.001} 
    {"field5":3.14159}      {103:"Hello", 0:"hello"}        
{"field6":"2023-07-28 12:34:56.000000", [...]
+
+-- !sql2 --
+1      {101:1} {102:10}        {"field1":100}  {"field2":2000000}      
{"field3":300000000}    {"field4":3.14} {"field5":3.14159}      {103:"Hello"}   
{"field6":"2023-07-28 12:34:56.000000"} {"field7":"2023-07-28"} {"field1": 1, 
"field2": 1, "field3": 20, "field4": 3000000, "field5": 44444444444, "field6": 
3.14, "field7": 3.14159, "field8": "Hello", "field9": "2023-07-28 
12:34:56.000000", "field10": "2023-07-28"}
+2      {201:1} {202:11}        {"field1":200}  {"field2":9000000}      
{"field3":8000000000}   {"field4":9.13321}      {"field5":322.14159}    
{203:"Hello"}   {"field6":"2023-07-28 12:34:56.000000"} {"field7":"2023-07-28"} 
{"field1": 1, "field2": 1, "field3": 201, "field4": 300011000, "field5": 
44444444444, "field6": 3.14, "field7": 3.14159, "field8": "world", "field9": 
"2023-07-28 12:34:56.000000", "field10": "2023-06-28"}
+3      {201:1} {202:10}        {"field1":120}  {"field2":44440000}     
{"field3":700000000}    {"field4":3.100004}     {"field5":3.00014159}   
{103:"Hello"}   {"field6":"2023-07-28 12:34:56.000000"} {"field7":"2023-07-28"} 
{"field1": 1, "field2": 1, "field3": 700, "field4": 300011000, "field5": 
3333333334, "field6": 3.00014, "field7": 3.3314159, "field8": "hello world", 
"field9": "2023-07-28 01:34:56.000000", "field10": "2023-07-27"}
+10     {101:1, 102:1, 103:1}   {102:10, 104:1, 105:2}  {"field1":100, 
"field0":100}    {"field2":3000000}      {"field3":300000000}    
{"field4":3.14, "hello world":0.111, "hell0":7.001}     {"field5":3.14159}      
{103:"Hello"}   {"field6":"2023-07-28 12:34:56.000000", 
"field000006":"2023-07-08 12:34:57.000000", "field2432456":"2023-07-28 
12:34:50.000000"}        {"field7":"2023-07-28"} {"field1": 1, "field2": 1, 
"field3": 20, "field4": 3000000, "field5": 44444444444, "field6": 3.14, 
"field7": 3.14159, "field8": "Hello",  [...]
+11     {101:1, 102:1, 13:1, 12:1}      {102:10, 14:1, 15:2, 12:10}     
{"field1":100, "fie88ld0":100, "fieweld0":100, "fieeeld1":100, "fieeeld0":100, 
"feeield0":100, "feeield1":100, "firreld0":100, "field0":100}    
{"field2":3000000, "abcd":4000000, "1231":3000000}      {"fi7eld3":300000000, 
"field30":300000000, "fielwwd3":300000000, "fi055":300000000, 
"field7":300000121323}      {"field4":3.14, "hello world":0.111, "hell0":7.001} 
    {"field5":3.14159}      {103:"Hello", 0:"hello"}        
{"field6":"2023-07-28 12:34:56.000000", [...]
+
+-- !filter_complex --
+50000  50000   50000
+
+-- !sql3 --
+1      {20:{"key":100, "key10":100, "abcd":1}, 200:{"key100":33300, 
"abwedewcd":2}, 2000:{"key1000":1000, "abcqqqqd":3}}       [[{1:1, 20:1, 
200:1}, {1:1, 20:1, 200:1}, {1:1, 20:1, 200:1}]]  {"field1": 50000, "field2": 
{3:"value3", 4:"value4", 5:"value5", 10:"11"}, "field3": {"sub_field1": 1, 
"sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3, 3333, 4, 4, 5, 5, 6, 0]} 
 {4:{50:[1, 1, 0, 0, 1, 1, 1, 1], 500:[1, 1, 0, 0], 40:[1, 1, 0, 0, 0], 3:[1, 
1, 0, 0]}, 1:{0:[0, 0]}}   {6:{"sub_field1": 0, "sub_fi [...]
+2      {10:{"key10":100, "abcd":1}, 100:{"key100":100, "abcd":2}, 
1000:{"key1000":1000, "abcd":3}}     [[{1:1, 10:1, 100:1}]]  {"field1": 5, 
"field2": {3:"value3", 4:"value4", 5:"value5"}, "field3": {"sub_field1": 1, 
"sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3]}       {4:{5:[1, 1, 0, 
0], 4:[1, 1, 0, 0, 0], 3:[1, 1, 0, 0]}, 0:{0:[0, 0]}}   {6:{"sub_field1": 0, 
"sub_field2": "example"}}
+
+-- !sql4 --
+1      {20:{"key":100, "key10":100, "abcd":1}, 200:{"key100":33300, 
"abwedewcd":2}, 2000:{"key1000":1000, "abcqqqqd":3}}       [[{1:1, 20:1, 
200:1}, {1:1, 20:1, 200:1}, {1:1, 20:1, 200:1}]]  {"field1": 50000, "field2": 
{3:"value3", 4:"value4", 5:"value5", 10:"11"}, "field3": {"sub_field1": 1, 
"sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3, 3333, 4, 4, 5, 5, 6, 0]} 
 {4:{50:[1, 1, 0, 0, 1, 1, 1, 1], 500:[1, 1, 0, 0], 40:[1, 1, 0, 0, 0], 3:[1, 
1, 0, 0]}, 1:{0:[0, 0]}}   {6:{"sub_field1": 0, "sub_fi [...]
+2      {10:{"key10":100, "abcd":1}, 100:{"key100":100, "abcd":2}, 
1000:{"key1000":1000, "abcd":3}}     [[{1:1, 10:1, 100:1}]]  {"field1": 5, 
"field2": {3:"value3", 4:"value4", 5:"value5"}, "field3": {"sub_field1": 1, 
"sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3]}       {4:{5:[1, 1, 0, 
0], 4:[1, 1, 0, 0, 0], 3:[1, 1, 0, 0]}, 0:{0:[0, 0]}}   {6:{"sub_field1": 0, 
"sub_field2": "example"}}
+
+-- !sql5 --
+1      {1:{"a": 10, "b": 20, "c": [{"key1":[[[[{"aa": 1003, "bb": "text1", 
"cc": 1}, {"aa": 10133, "bb": "text2", "cc": 0}], [{"aa": 10332, "bb": "text3", 
"cc": 1}, {"aa": 103, "bb": "text4", "cc": 0}]]]], "key2":[[[[{"aa": 1770, 
"bb": "text1000", "cc": 1}, {"aa": 1177, "bb": "text2111", "cc": 0}], [{"aa": 
1211, "bb": "text311", "cc": 1}, {"aa": 11113, "bb": "text114", "cc": 0}, 
{"aa": 13111, "bb": "text114", "cc": 0}, {"aa": 11131, "bb": "text114", "cc": 
0}, {"aa": 11113, "bb": "text114", "c [...]
+2      {1:{"a": 10, "b": 20, "c": [{"key1":[[[[{"aa": 1003, "bb": "text1", 
"cc": 1}, {"aa": 10133, "bb": "text2", "cc": 0}], [{"aa": 10332, "bb": "text3", 
"cc": 1}, {"aa": 103, "bb": "text4", "cc": 0}]]]], "key2":[[[[{"aa": 1770, 
"bb": "text1000", "cc": 1}, {"aa": 1177, "bb": "text2111", "cc": 0}], [{"aa": 
1211, "bb": "text311", "cc": 1}, {"aa": 11113, "bb": "text114", "cc": 0}, 
{"aa": 13111, "bb": "text114", "cc": 0}, {"aa": 11131, "bb": "text114", "cc": 
0}, {"aa": 11113, "bb": "text114", "c [...]
+
+-- !sql6 --
+1      {1:{"a": 10, "b": 20, "c": [{"key1":[[[[{"aa": 1003, "bb": "text1", 
"cc": 1}, {"aa": 10133, "bb": "text2", "cc": 0}], [{"aa": 10332, "bb": "text3", 
"cc": 1}, {"aa": 103, "bb": "text4", "cc": 0}]]]], "key2":[[[[{"aa": 1770, 
"bb": "text1000", "cc": 1}, {"aa": 1177, "bb": "text2111", "cc": 0}], [{"aa": 
1211, "bb": "text311", "cc": 1}, {"aa": 11113, "bb": "text114", "cc": 0}, 
{"aa": 13111, "bb": "text114", "cc": 0}, {"aa": 11131, "bb": "text114", "cc": 
0}, {"aa": 11113, "bb": "text114", "c [...]
+2      {1:{"a": 10, "b": 20, "c": [{"key1":[[[[{"aa": 1003, "bb": "text1", 
"cc": 1}, {"aa": 10133, "bb": "text2", "cc": 0}], [{"aa": 10332, "bb": "text3", 
"cc": 1}, {"aa": 103, "bb": "text4", "cc": 0}]]]], "key2":[[[[{"aa": 1770, 
"bb": "text1000", "cc": 1}, {"aa": 1177, "bb": "text2111", "cc": 0}], [{"aa": 
1211, "bb": "text311", "cc": 1}, {"aa": 11113, "bb": "text114", "cc": 0}, 
{"aa": 13111, "bb": "text114", "cc": 0}, {"aa": 11131, "bb": "text114", "cc": 
0}, {"aa": 11113, "bb": "text114", "c [...]
+
+-- !sql7 --
+1
+1
+
+-- !sql8 --
+
+-- !sql9 --
+2
+
+-- !sql10 --
+[20, 200, 2000]
+[10, 100, 1000]
+
+-- !sql11 --
+1
+
+-- !sql12 --
+1      {20:{"key":100, "key10":100, "abcd":1}, 200:{"key100":33300, 
"abwedewcd":2}, 2000:{"key1000":1000, "abcqqqqd":3}}       [[{1:1, 20:1, 
200:1}, {1:1, 20:1, 200:1}, {1:1, 20:1, 200:1}]]  {"field1": 50000, "field2": 
{3:"value3", 4:"value4", 5:"value5", 10:"11"}, "field3": {"sub_field1": 1, 
"sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3, 3333, 4, 4, 5, 5, 6, 0]} 
 {4:{50:[1, 1, 0, 0, 1, 1, 1, 1], 500:[1, 1, 0, 0], 40:[1, 1, 0, 0, 0], 3:[1, 
1, 0, 0]}, 1:{0:[0, 0]}}   {6:{"sub_field1": 0, "sub_fi [...]
+2      {10:{"key10":100, "abcd":1}, 100:{"key100":100, "abcd":2}, 
1000:{"key1000":1000, "abcd":3}}     [[{1:1, 10:1, 100:1}]]  {"field1": 5, 
"field2": {3:"value3", 4:"value4", 5:"value5"}, "field3": {"sub_field1": 1, 
"sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3]}       {4:{5:[1, 1, 0, 
0], 4:[1, 1, 0, 0, 0], 3:[1, 1, 0, 0]}, 0:{0:[0, 0]}}   {6:{"sub_field1": 0, 
"sub_field2": "example"}}
+
+-- !sql13 --
+1      {4:{50:[1, 1, 0, 0, 1, 1, 1, 1], 500:[1, 1, 0, 0], 40:[1, 1, 0, 0, 0], 
3:[1, 1, 0, 0]}, 1:{0:[0, 0]}}
+
+-- !sql14 --
+{"field1": 5, "field2": {3:"value3", 4:"value4", 5:"value5"}, "field3": 
{"sub_field1": 1, "sub_field2": 0, "sub_field3": 7}, "field4": [1, 2, 3]}
+
+-- !sql15 --
+2
+
+-- !sql16 --
+2
+
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_text_complex_type.groovy
 
b/regression-test/suites/external_table_p0/hive/test_hive_text_complex_type.groovy
similarity index 86%
rename from 
regression-test/suites/external_table_p2/hive/test_hive_text_complex_type.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_hive_text_complex_type.groovy
index 5bbb1a6e6b5..718f29a1daa 100644
--- 
a/regression-test/suites/external_table_p2/hive/test_hive_text_complex_type.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_text_complex_type.groovy
@@ -15,11 +15,14 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_hive_text_complex_type", 
"p2,external,hive,external_remote,external_remote_hive") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
+suite("test_hive_text_complex_type", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (!"true".equalsIgnoreCase(enabled)) {
+        return;
+    }
+    for (String hivePrefix : ["hive2", "hive3"]) {
+        String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+        String hmsPort = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
         String catalog_name = "test_hive_text_complex_type"
 
         sql """drop catalog if exists ${catalog_name};"""
@@ -27,7 +30,7 @@ suite("test_hive_text_complex_type", 
"p2,external,hive,external_remote,external_
             create catalog if not exists ${catalog_name} properties (
                 'type'='hms',
                 'hadoop.username' = 'hadoop',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
+                'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hmsPort}'
             );
         """
         logger.info("catalog " + catalog_name + " created")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to