This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new b3b035e3227 [test](migrate) move 2 cases from p2 to p0 (#36935)
b3b035e3227 is described below

commit b3b035e3227b6766a5abf6bbf8b29b9c9b27a195
Author: zhangdong <493738...@qq.com>
AuthorDate: Tue Jul 2 23:25:35 2024 +0800

    [test](migrate) move 2 cases from p2 to p0 (#36935)
    
    1. Extend the exploration time of hive docker
    2. move case
    - test_hive_remove_partition
    - test_hive_statistics_from_hms
    - test_hive_partition_column_analyze
    - test_hive_parquet_alter_column
    - test_hive_analyze_db
---
 .../docker-compose/hive/hive-2x.yaml.tpl           |    2 +-
 .../docker-compose/hive/hive-3x.yaml.tpl           |    2 +-
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_bigint/data.tar.gz     |  Bin 0 -> 3714 bytes
 .../parquet_alter_column_to_bigint/run.sh          |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_boolean/data.tar.gz    |  Bin 0 -> 3718 bytes
 .../parquet_alter_column_to_boolean/run.sh         |   12 +
 .../parquet_alter_column_to_char/create_table.hql  |   30 +
 .../parquet_alter_column_to_char/data.tar.gz       |  Bin 0 -> 3717 bytes
 .../parquet_alter_column_to_char/run.sh            |   12 +
 .../parquet_alter_column_to_date/create_table.hql  |   30 +
 .../parquet_alter_column_to_date/data.tar.gz       |  Bin 0 -> 3718 bytes
 .../parquet_alter_column_to_date/run.sh            |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_decimal/data.tar.gz    |  Bin 0 -> 3718 bytes
 .../parquet_alter_column_to_decimal/run.sh         |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_double/data.tar.gz     |  Bin 0 -> 3713 bytes
 .../parquet_alter_column_to_double/run.sh          |   12 +
 .../parquet_alter_column_to_float/create_table.hql |   30 +
 .../parquet_alter_column_to_float/data.tar.gz      |  Bin 0 -> 3716 bytes
 .../parquet_alter_column_to_float/run.sh           |   12 +
 .../parquet_alter_column_to_int/create_table.hql   |   30 +
 .../parquet_alter_column_to_int/data.tar.gz        |  Bin 0 -> 3715 bytes
 .../parquet_alter_column_to_int/run.sh             |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_smallint/data.tar.gz   |  Bin 0 -> 3718 bytes
 .../parquet_alter_column_to_smallint/run.sh        |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_string/data.tar.gz     |  Bin 0 -> 3716 bytes
 .../parquet_alter_column_to_string/run.sh          |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_timestamp/data.tar.gz  |  Bin 0 -> 3716 bytes
 .../parquet_alter_column_to_timestamp/run.sh       |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_tinyint/data.tar.gz    |  Bin 0 -> 3716 bytes
 .../parquet_alter_column_to_tinyint/run.sh         |   12 +
 .../create_table.hql                               |   30 +
 .../parquet_alter_column_to_varchar/data.tar.gz    |  Bin 0 -> 3717 bytes
 .../parquet_alter_column_to_varchar/run.sh         |   12 +
 .../partition_manual_remove/create_table.hql       |   24 +
 .../partition_manual_remove/data.tar.gz            |  Bin 0 -> 1026 bytes
 .../multi_catalog/partition_manual_remove/run.sh   |   13 +
 .../bigint_partition/create_table.hql              |   21 +
 .../partition_type/bigint_partition/data.tar.gz    |  Bin 0 -> 258064 bytes
 .../data/partition_type/bigint_partition/run.sh    |   12 +
 .../partition_type/char_partition/create_table.hql |   21 +
 .../data/partition_type/char_partition/data.tar.gz |  Bin 0 -> 258066 bytes
 .../data/partition_type/char_partition/run.sh      |   12 +
 .../partition_type/date_partition/create_table.hql |   21 +
 .../data/partition_type/date_partition/data.tar.gz |  Bin 0 -> 257868 bytes
 .../data/partition_type/date_partition/run.sh      |   12 +
 .../decimal_partition/create_table.hql             |   21 +
 .../partition_type/decimal_partition/data.tar.gz   |  Bin 0 -> 258769 bytes
 .../data/partition_type/decimal_partition/run.sh   |   12 +
 .../double_partition/create_table.hql              |   21 +
 .../partition_type/double_partition/data.tar.gz    |  Bin 0 -> 258850 bytes
 .../data/partition_type/double_partition/run.sh    |   12 +
 .../float_partition/create_table.hql               |   21 +
 .../partition_type/float_partition/data.tar.gz     |  Bin 0 -> 258843 bytes
 .../data/partition_type/float_partition/run.sh     |   12 +
 .../partition_type/int_partition/create_table.hql  |   21 +
 .../data/partition_type/int_partition/data.tar.gz  |  Bin 0 -> 257936 bytes
 .../data/partition_type/int_partition/run.sh       |   12 +
 .../smallint_partition/create_table.hql            |   21 +
 .../partition_type/smallint_partition/data.tar.gz  |  Bin 0 -> 258000 bytes
 .../data/partition_type/smallint_partition/run.sh  |   12 +
 .../string_partition/create_table.hql              |   21 +
 .../partition_type/string_partition/data.tar.gz    |  Bin 0 -> 258102 bytes
 .../data/partition_type/string_partition/run.sh    |   12 +
 .../tinyint_partition/create_table.hql             |   21 +
 .../partition_type/tinyint_partition/data.tar.gz   |  Bin 0 -> 258082 bytes
 .../data/partition_type/tinyint_partition/run.sh   |   12 +
 .../varchar_partition/create_table.hql             |   21 +
 .../partition_type/varchar_partition/data.tar.gz   |  Bin 0 -> 258064 bytes
 .../data/partition_type/varchar_partition/run.sh   |   12 +
 .../data/statistics/empty_table/create_table.hql   |   16 +
 .../scripts/data/statistics/empty_table/run.sh     |    7 +
 .../scripts/data/statistics/stats/create_table.hql |   35 +
 .../hive/scripts/data/statistics/stats/data.tar.gz |  Bin 0 -> 3923 bytes
 .../hive/scripts/data/statistics/stats/run.sh      |   12 +
 .../hive/test_hive_parquet_alter_column.out        | 1675 ++++++++++++++++++++
 .../hive/test_hive_remove_partition.out            |    4 +
 .../hive/test_hive_analyze_db.groovy               |   17 +-
 .../hive/test_hive_parquet_alter_column.groovy     |   16 +-
 .../hive/test_hive_partition_column_analyze.groovy |   59 +-
 .../hive/test_hive_remove_partition.groovy         |   17 +-
 .../hive/test_hive_statistics_from_hms.groovy      |  230 ---
 89 files changed, 2740 insertions(+), 298 deletions(-)

diff --git a/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl 
b/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
index 0aec9ec2365..5eddbb81c53 100644
--- a/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
+++ b/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
@@ -89,7 +89,7 @@ services:
       - hive-metastore-postgresql
     healthcheck:
       test: ["CMD", "sh", "-c", "/mnt/scripts/healthy_check.sh"]
-      interval: 10s
+      interval: 20s
       timeout: 60s
       retries: 120
     network_mode: "host"
diff --git a/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl 
b/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
index 901e5b3f71a..9d838ddb7b4 100644
--- a/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
+++ b/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
@@ -89,7 +89,7 @@ services:
       - hive-metastore-postgresql
     healthcheck:
       test: ["CMD", "sh", "-c", "/mnt/scripts/healthy_check.sh"]
-      interval: 10s
+      interval: 20s
       timeout: 60s
       retries: 120
     network_mode: "host"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/create_table.hql
new file mode 100644
index 00000000000..fb12678964c
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_bigint`(
+  `col_int` bigint, 
+  `col_smallint` bigint, 
+  `col_tinyint` bigint, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_bigint'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217352', 
+  'transient_lastDdlTime'='1697217352');
+
+msck repair table parquet_alter_column_to_bigint;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/data.tar.gz
new file mode 100644
index 00000000000..980db9ea878
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_bigint/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/create_table.hql
new file mode 100644
index 00000000000..5b994bfce12
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_boolean`(
+  `col_int` int, 
+  `col_smallint` smallint, 
+  `col_tinyint` tinyint, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_boolean'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217386', 
+  'transient_lastDdlTime'='1697217386');
+
+msck repair table parquet_alter_column_to_boolean;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/data.tar.gz
new file mode 100644
index 00000000000..d23de22dbb1
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_boolean/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/create_table.hql
new file mode 100644
index 00000000000..68e5fe475a4
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_char`(
+  `col_int` char(10), 
+  `col_smallint` char(10), 
+  `col_tinyint` char(10), 
+  `col_bigint` char(10), 
+  `col_float` char(10), 
+  `col_double` char(10), 
+  `col_boolean` boolean, 
+  `col_string` char(10), 
+  `col_char` char(10), 
+  `col_varchar` char(10), 
+  `col_date` char(10), 
+  `col_timestamp` char(10), 
+  `col_decimal` char(10))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_char'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697275142', 
+  'transient_lastDdlTime'='1697275142');
+
+msck repair table parquet_alter_column_to_char;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/data.tar.gz
new file mode 100644
index 00000000000..769c21026a0
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_char/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/create_table.hql
new file mode 100644
index 00000000000..dafb00eeb1a
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_date`(
+  `col_int` int, 
+  `col_smallint` smallint, 
+  `col_tinyint` tinyint, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_date'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217393', 
+  'transient_lastDdlTime'='1697217393');
+
+msck repair table parquet_alter_column_to_date;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/data.tar.gz
new file mode 100644
index 00000000000..f45f9983f77
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_date/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/create_table.hql
new file mode 100644
index 00000000000..ee58c9d4de5
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_decimal`(
+  `col_int` decimal(5,1), 
+  `col_smallint` decimal(5,1), 
+  `col_tinyint` decimal(5,1), 
+  `col_bigint` decimal(5,1), 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(5,1))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_decimal'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217403', 
+  'transient_lastDdlTime'='1697217403');
+
+msck repair table parquet_alter_column_to_decimal;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/data.tar.gz
new file mode 100644
index 00000000000..c81f23b20e6
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_decimal/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/create_table.hql
new file mode 100644
index 00000000000..4cf53aafa5b
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_double`(
+  `col_int` double, 
+  `col_smallint` double, 
+  `col_tinyint` double, 
+  `col_bigint` double, 
+  `col_float` double, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_double'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697270364', 
+  'transient_lastDdlTime'='1697270364');
+
+msck repair table parquet_alter_column_to_double;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/data.tar.gz
new file mode 100644
index 00000000000..e039c003f46
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_double/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/create_table.hql
new file mode 100644
index 00000000000..fd6d9999063
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_float`(
+  `col_int` float, 
+  `col_smallint` float, 
+  `col_tinyint` float, 
+  `col_bigint` float, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_float'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697270277', 
+  'transient_lastDdlTime'='1697270277');
+
+msck repair table parquet_alter_column_to_float;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/data.tar.gz
new file mode 100644
index 00000000000..951e242d2ff
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_float/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/create_table.hql
new file mode 100644
index 00000000000..027121b193a
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_int`(
+  `col_int` int, 
+  `col_smallint` int, 
+  `col_tinyint` int, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_int'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697216968', 
+  'transient_lastDdlTime'='1697216968');
+
+msck repair table parquet_alter_column_to_int;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/data.tar.gz
new file mode 100644
index 00000000000..404baf85c32
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_int/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/create_table.hql
new file mode 100644
index 00000000000..33a9423532c
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_smallint`(
+  `col_int` int, 
+  `col_smallint` smallint, 
+  `col_tinyint` smallint, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_smallint'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217290', 
+  'transient_lastDdlTime'='1697217290');
+
+msck repair table parquet_alter_column_to_smallint;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/data.tar.gz
new file mode 100644
index 00000000000..cc542d269d5
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_smallint/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/create_table.hql
new file mode 100644
index 00000000000..158642b9e7f
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_string`(
+  `col_int` string, 
+  `col_smallint` string, 
+  `col_tinyint` string, 
+  `col_bigint` string, 
+  `col_float` string, 
+  `col_double` string, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` string, 
+  `col_varchar` string, 
+  `col_date` string, 
+  `col_timestamp` string, 
+  `col_decimal` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_string'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217389', 
+  'transient_lastDdlTime'='1697217389');
+
+msck repair table parquet_alter_column_to_string;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/data.tar.gz
new file mode 100644
index 00000000000..210b521bbb4
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_string/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/create_table.hql
new file mode 100644
index 00000000000..b8d7c1e52db
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_timestamp`(
+  `col_int` int, 
+  `col_smallint` smallint, 
+  `col_tinyint` tinyint, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_timestamp'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217395', 
+  'transient_lastDdlTime'='1697217395');
+
+msck repair table parquet_alter_column_to_timestamp;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/data.tar.gz
new file mode 100644
index 00000000000..ea31f33bff6
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_timestamp/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/create_table.hql
new file mode 100644
index 00000000000..c65210160a1
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_tinyint`(
+  `col_int` int, 
+  `col_smallint` smallint, 
+  `col_tinyint` tinyint, 
+  `col_bigint` bigint, 
+  `col_float` float, 
+  `col_double` double, 
+  `col_boolean` boolean, 
+  `col_string` string, 
+  `col_char` char(10), 
+  `col_varchar` varchar(255), 
+  `col_date` date, 
+  `col_timestamp` timestamp, 
+  `col_decimal` decimal(10,2))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_tinyint'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697217350', 
+  'transient_lastDdlTime'='1697217350');
+
+msck repair table parquet_alter_column_to_tinyint;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/data.tar.gz
new file mode 100644
index 00000000000..301012dc523
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_tinyint/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/create_table.hql
new file mode 100644
index 00000000000..3b9f825b9dd
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/create_table.hql
@@ -0,0 +1,30 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `multi_catalog.parquet_alter_column_to_varchar`(
+  `col_int` varchar(20), 
+  `col_smallint` varchar(20), 
+  `col_tinyint` varchar(20), 
+  `col_bigint` varchar(20), 
+  `col_float` varchar(20), 
+  `col_double` varchar(20), 
+  `col_boolean` boolean, 
+  `col_string` varchar(20), 
+  `col_char` varchar(20), 
+  `col_varchar` varchar(20), 
+  `col_date` varchar(20), 
+  `col_timestamp` varchar(20), 
+  `col_decimal` varchar(20))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_alter_column_to_varchar'
+TBLPROPERTIES (
+  'last_modified_by'='hadoop', 
+  'last_modified_time'='1697275145', 
+  'transient_lastDdlTime'='1697275145');
+
+msck repair table parquet_alter_column_to_varchar;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/data.tar.gz
new file mode 100644
index 00000000000..30e52067a34
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_alter_column_to_varchar/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/create_table.hql
new file mode 100644
index 00000000000..b3f354ff0f6
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/create_table.hql
@@ -0,0 +1,24 @@
+create database if not exists multi_catalog;
+
+use multi_catalog;
+
+CREATE EXTERNAL TABLE `partition_manual_remove`(
+    `id` int)
+PARTITIONED BY (
+    `part1` int)
+ROW FORMAT SERDE
+    'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES (
+    'field.delim'='|',
+    'serialization.format'='|')
+STORED AS INPUTFORMAT
+    'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT
+    'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+    '/user/doris/suites/multi_catalog/partition_manual_remove'
+TBLPROPERTIES (
+    'transient_lastDdlTime'='1684941779');
+
+msck repair table partition_manual_remove;
+
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/data.tar.gz
new file mode 100644
index 00000000000..3453a1357b9
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/run.sh
new file mode 100755
index 00000000000..7c2e7e7aed2
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/partition_manual_remove/run.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}"/create_table.hql
+
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/create_table.hql
new file mode 100644
index 00000000000..88ad103f704
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.bigint_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `bigint_part` bigint)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/bigint_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697099374');
+
+msck repair table bigint_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/data.tar.gz
new file mode 100644
index 00000000000..27a74ea1bba
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/bigint_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/create_table.hql
new file mode 100644
index 00000000000..64f8f082342
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.char_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `char_part` char(20))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/char_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100413');
+
+msck repair table char_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/data.tar.gz
new file mode 100644
index 00000000000..82cb397833b
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/char_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/create_table.hql
new file mode 100644
index 00000000000..a3a5d79186e
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.date_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `date_part` date)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/date_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100297');
+
+msck repair table date_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/data.tar.gz
new file mode 100644
index 00000000000..3f6be9418d1
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/date_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/create_table.hql
new file mode 100644
index 00000000000..a4f12e3c6d4
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.decimal_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `decimal_part` decimal(12,4))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/decimal_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100746');
+
+msck repair table decimal_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/data.tar.gz
new file mode 100644
index 00000000000..391afa9ccd9
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/decimal_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/create_table.hql
new file mode 100644
index 00000000000..26e51d09c4d
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.double_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `double_part` double)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/double_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100288');
+
+msck repair table double_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/data.tar.gz
new file mode 100644
index 00000000000..fefa9c98236
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/double_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/create_table.hql
new file mode 100644
index 00000000000..6a97c4b2375
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.float_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `float_part` float)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/float_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100288');
+
+msck repair table float_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/data.tar.gz
new file mode 100644
index 00000000000..b6a56f8cd12
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/float_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/create_table.hql
new file mode 100644
index 00000000000..d3cc016f386
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.int_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `int_part` int)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/int_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697099371');
+
+msck repair table int_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/data.tar.gz
new file mode 100644
index 00000000000..f254ed6976f
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/int_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/create_table.hql
new file mode 100644
index 00000000000..55509300e30
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.smallint_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `smallint_part` smallint)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/smallint_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697099288');
+
+msck repair table smallint_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/data.tar.gz
new file mode 100644
index 00000000000..01278f9db83
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/smallint_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/create_table.hql
new file mode 100644
index 00000000000..a5a5d0d1046
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.string_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `string_part` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/string_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100288');
+
+msck repair table string_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/data.tar.gz
new file mode 100644
index 00000000000..a717b2b4f5b
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/string_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/create_table.hql
new file mode 100644
index 00000000000..b69f5f5935b
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.tinyint_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `tinyint_part` tinyint)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/tinyint_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697099282');
+
+msck repair table tinyint_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/data.tar.gz
new file mode 100644
index 00000000000..5cab662e5a9
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/tinyint_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/create_table.hql
new file mode 100644
index 00000000000..e105c000d12
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS partition_type;
+USE partition_type;
+
+CREATE TABLE `partition_type.varchar_partition`(
+  `id` int)
+PARTITIONED BY ( 
+  `varchar_part` varchar(50))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='|', 
+  'serialization.format'='|') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/partition_type/varchar_partition'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1697100365');
+
+msck repair table varchar_partition;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/data.tar.gz
new file mode 100644
index 00000000000..2903fb5c91f
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/run.sh
new file mode 100644
index 00000000000..ade2facfd72
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/partition_type/varchar_partition/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/partition_type/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/partition_type/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/statistics/empty_table/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/empty_table/create_table.hql
new file mode 100644
index 00000000000..a53763916d1
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/empty_table/create_table.hql
@@ -0,0 +1,16 @@
+CREATE DATABASE IF NOT EXISTS statistics;
+USE statistics;
+
+CREATE TABLE `statistics.empty_table`(
+  `id` int, 
+  `name` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1702352468');
+
+msck repair table empty_table;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/statistics/empty_table/run.sh
 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/empty_table/run.sh
new file mode 100644
index 00000000000..b4abc8b3b61
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/empty_table/run.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/create_table.hql
 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/create_table.hql
new file mode 100644
index 00000000000..999344eea82
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/create_table.hql
@@ -0,0 +1,35 @@
+CREATE DATABASE IF NOT EXISTS statistics;
+USE statistics;
+
+CREATE TABLE `statistics.stats`(
+  `lo_orderkey` int, 
+  `lo_linenumber` int, 
+  `lo_custkey` int, 
+  `lo_partkey` int, 
+  `lo_suppkey` int, 
+  `lo_orderdate` int, 
+  `lo_orderpriority` string, 
+  `lo_shippriority` int, 
+  `lo_quantity` int, 
+  `lo_extendedprice` int, 
+  `lo_ordtotalprice` int, 
+  `lo_discount` int, 
+  `lo_revenue` int, 
+  `lo_supplycost` int, 
+  `lo_tax` int, 
+  `lo_commitdate` int, 
+  `lo_shipmode` string)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' 
+WITH SERDEPROPERTIES ( 
+  'field.delim'='\t', 
+  'serialization.format'='\t') 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/statistics/stats'
+TBLPROPERTIES (
+  'transient_lastDdlTime'='1687325090');
+
+msck repair table stats;
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/data.tar.gz
 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/data.tar.gz
new file mode 100644
index 00000000000..4a169f609a8
Binary files /dev/null and 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/data.tar.gz
 differ
diff --git 
a/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/run.sh 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/run.sh
new file mode 100644
index 00000000000..0e7361b2cea
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/hive/scripts/data/statistics/stats/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/statistics/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/statistics/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git 
a/regression-test/data/external_table_p2/hive/test_hive_parquet_alter_column.out
 
b/regression-test/data/external_table_p0/hive/test_hive_parquet_alter_column.out
similarity index 50%
rename from 
regression-test/data/external_table_p2/hive/test_hive_parquet_alter_column.out
rename to 
regression-test/data/external_table_p0/hive/test_hive_parquet_alter_column.out
index d566c008c0d..1377d4857c8 100644
--- 
a/regression-test/data/external_table_p2/hive/test_hive_parquet_alter_column.out
+++ 
b/regression-test/data/external_table_p0/hive/test_hive_parquet_alter_column.out
@@ -1674,3 +1674,1678 @@ B
 
 -- !decimal_decimal --
 
+-- !desc --
+col_int        INT     Yes     true    \N      
+col_smallint   INT     Yes     true    \N      
+col_tinyint    INT     Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        INT     Yes     true    \N      
+col_smallint   SMALLINT        Yes     true    \N      
+col_tinyint    SMALLINT        Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        INT     Yes     true    \N      
+col_smallint   SMALLINT        Yes     true    \N      
+col_tinyint    TINYINT Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        BIGINT  Yes     true    \N      
+col_smallint   BIGINT  Yes     true    \N      
+col_tinyint    BIGINT  Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        FLOAT   Yes     true    \N      
+col_smallint   FLOAT   Yes     true    \N      
+col_tinyint    FLOAT   Yes     true    \N      
+col_bigint     FLOAT   Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.5444 50.75   false   First   A       ADC     
2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1.0
+-1.0
+-1.0
+
+-- !order --
+-400.0
+-400.0
+-400.0
+
+-- !order --
+-20.0
+-20.0
+-20.0
+
+-- !order --
+-4.0E8
+-4.0E8
+-4.0E8
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        DOUBLE  Yes     true    \N      
+col_smallint   DOUBLE  Yes     true    \N      
+col_tinyint    DOUBLE  Yes     true    \N      
+col_bigint     DOUBLE  Yes     true    \N      
+col_float      DOUBLE  Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1.0   -400.0  -20.0   -4.0E8  40.54439926147461       50.75   false   First   
A       ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1.0
+-1.0
+-1.0
+
+-- !order --
+-400.0
+-400.0
+-400.0
+
+-- !order --
+-20.0
+-20.0
+-20.0
+
+-- !order --
+-4.0E8
+-4.0E8
+-4.0E8
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        INT     Yes     true    \N      
+col_smallint   SMALLINT        Yes     true    \N      
+col_tinyint    TINYINT Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        TEXT    Yes     true    \N      
+col_smallint   TEXT    Yes     true    \N      
+col_tinyint    TEXT    Yes     true    \N      
+col_bigint     TEXT    Yes     true    \N      
+col_float      TEXT    Yes     true    \N      
+col_double     TEXT    Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       TEXT    Yes     true    \N      
+col_varchar    TEXT    Yes     true    \N      
+col_date       TEXT    Yes     true    \N      
+col_timestamp  TEXT    Yes     true    \N      
+col_decimal    TEXT    Yes     true    \N      
+
+-- !show --
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-200
+-200
+-200
+
+-- !order --
+-10
+-10
+-10
+
+-- !order --
+-20000000
+-20000000
+-20000000
+
+-- !order --
+10.500000
+10.500000
+10.500000
+
+-- !order --
+20.750000
+20.750000
+20.750000
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06 14:30:00
+2023-10-06 14:30:00
+2023-10-06 14:30:00
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        CHAR(10)        Yes     true    \N      
+col_smallint   CHAR(10)        Yes     true    \N      
+col_tinyint    CHAR(10)        Yes     true    \N      
+col_bigint     CHAR(10)        Yes     true    \N      
+col_float      CHAR(10)        Yes     true    \N      
+col_double     CHAR(10)        Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     CHAR(10)        Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    CHAR(10)        Yes     true    \N      
+col_date       CHAR(10)        Yes     true    \N      
+col_timestamp  CHAR(10)        Yes     true    \N      
+col_decimal    CHAR(10)        Yes     true    \N      
+
+-- !show --
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-200
+-200
+-200
+
+-- !order --
+-10
+-10
+-10
+
+-- !order --
+-20000000
+-20000000
+-20000000
+
+-- !order --
+10.500000
+10.500000
+10.500000
+
+-- !order --
+20.750000
+20.750000
+20.750000
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06 14:30:00
+2023-10-06 14:30:00
+2023-10-06 14:30:00
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        VARCHAR(20)     Yes     true    \N      
+col_smallint   VARCHAR(20)     Yes     true    \N      
+col_tinyint    VARCHAR(20)     Yes     true    \N      
+col_bigint     VARCHAR(20)     Yes     true    \N      
+col_float      VARCHAR(20)     Yes     true    \N      
+col_double     VARCHAR(20)     Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     VARCHAR(20)     Yes     true    \N      
+col_char       VARCHAR(20)     Yes     true    \N      
+col_varchar    VARCHAR(20)     Yes     true    \N      
+col_date       VARCHAR(20)     Yes     true    \N      
+col_timestamp  VARCHAR(20)     Yes     true    \N      
+col_decimal    VARCHAR(20)     Yes     true    \N      
+
+-- !show --
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+-1     -200    -10     -20000000       20.577700       30.750000       false   
First   A       ADC     2023-10-06      2023-10-09 17:15:00     1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-200
+-200
+-200
+
+-- !order --
+-10
+-10
+-10
+
+-- !order --
+-20000000
+-20000000
+-20000000
+
+-- !order --
+10.500000
+10.500000
+10.500000
+
+-- !order --
+20.750000
+20.750000
+20.750000
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06 14:30:00
+2023-10-06 14:30:00
+2023-10-06 14:30:00
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        INT     Yes     true    \N      
+col_smallint   SMALLINT        Yes     true    \N      
+col_tinyint    TINYINT Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        INT     Yes     true    \N      
+col_smallint   SMALLINT        Yes     true    \N      
+col_tinyint    TINYINT Yes     true    \N      
+col_bigint     BIGINT  Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(10, 2)  Yes     true    \N      
+
+-- !show --
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+-1     -400    -20     -400000000      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.45
+
+-- !order --
+-1
+-1
+-1
+
+-- !order --
+-400
+-400
+-400
+
+-- !order --
+-20
+-20
+-20
+
+-- !order --
+-400000000
+-400000000
+-400000000
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.45
+123.45
+123.45
+
+-- !desc --
+col_int        DECIMAL(5, 1)   Yes     true    \N      
+col_smallint   DECIMAL(5, 1)   Yes     true    \N      
+col_tinyint    DECIMAL(5, 1)   Yes     true    \N      
+col_bigint     DECIMAL(5, 1)   Yes     true    \N      
+col_float      FLOAT   Yes     true    \N      
+col_double     DOUBLE  Yes     true    \N      
+col_boolean    BOOLEAN Yes     true    \N      
+col_string     TEXT    Yes     true    \N      
+col_char       CHAR(10)        Yes     true    \N      
+col_varchar    VARCHAR(255)    Yes     true    \N      
+col_date       DATE    Yes     true    \N      
+col_timestamp  DATETIME(6)     Yes     true    \N      
+col_decimal    DECIMAL(5, 1)   Yes     true    \N      
+
+-- !show --
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+-1.0   -400.0  -20.0   29496729.6      40.5444 50.75   false   First   A       
ADC     2023-10-06      2023-10-09T17:15        1238.4
+
+-- !order --
+-1.0
+-1.0
+-1.0
+
+-- !order --
+-400.0
+-400.0
+-400.0
+
+-- !order --
+-20.0
+-20.0
+-20.0
+
+-- !order --
+-153960755.2
+-153960755.2
+-153960755.2
+
+-- !order --
+10.5
+10.5
+10.5
+
+-- !order --
+20.75
+20.75
+20.75
+
+-- !order --
+false
+false
+false
+
+-- !order --
+Fifth
+Fifth
+Fifth
+
+-- !order --
+A
+A
+A
+
+-- !order --
+ADC
+ADC
+ADC
+
+-- !order --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !order --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !order --
+123.4
+123.4
+123.4
+
+-- !int_int --
+2
+2
+2
+
+-- !int_smallint --
+100
+100
+100
+
+-- !int_tinyint --
+5
+5
+5
+
+-- !int_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !int_float --
+
+-- !int_double --
+
+-- !int_boolean --
+
+-- !int_string --
+
+-- !int_char --
+B
+B
+B
+
+-- !int_varchar --
+
+-- !int_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !int_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !int_decimal --
+
+-- !smallint_int --
+1
+1
+1
+
+-- !smallint_smallint --
+100
+100
+100
+
+-- !smallint_tinyint --
+5
+5
+5
+
+-- !smallint_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !smallint_float --
+
+-- !smallint_double --
+
+-- !smallint_boolean --
+
+-- !smallint_string --
+
+-- !smallint_char --
+C
+C
+C
+
+-- !smallint_varchar --
+
+-- !smallint_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !smallint_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !smallint_decimal --
+
+-- !tinyint_int --
+3
+3
+3
+
+-- !tinyint_smallint --
+100
+100
+100
+
+-- !tinyint_tinyint --
+5
+5
+5
+
+-- !tinyint_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !tinyint_float --
+
+-- !tinyint_double --
+
+-- !tinyint_boolean --
+
+-- !tinyint_string --
+
+-- !tinyint_char --
+A
+A
+A
+
+-- !tinyint_varchar --
+
+-- !tinyint_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !tinyint_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !tinyint_decimal --
+
+-- !bigint_int --
+3
+3
+3
+
+-- !bigint_smallint --
+100
+100
+100
+
+-- !bigint_tinyint --
+5
+5
+5
+
+-- !bigint_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !bigint_float --
+
+-- !bigint_double --
+
+-- !bigint_boolean --
+
+-- !bigint_string --
+
+-- !bigint_char --
+A
+A
+A
+
+-- !bigint_varchar --
+
+-- !bigint_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !bigint_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !bigint_decimal --
+
+-- !float_int --
+
+-- !float_smallint --
+
+-- !float_tinyint --
+
+-- !float_bigint --
+
+-- !float_float --
+
+-- !float_double --
+
+-- !float_boolean --
+
+-- !float_string --
+
+-- !float_char --
+
+-- !float_varchar --
+
+-- !float_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !float_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !float_decimal --
+
+-- !double_int --
+2.0
+2.0
+2.0
+
+-- !double_smallint --
+
+-- !double_tinyint --
+
+-- !double_bigint --
+
+-- !double_float --
+
+-- !double_double --
+
+-- !double_boolean --
+
+-- !double_string --
+
+-- !double_char --
+A
+A
+A
+
+-- !double_varchar --
+
+-- !double_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !double_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !double_decimal --
+
+-- !boolean_int --
+3
+3
+3
+
+-- !boolean_smallint --
+100
+100
+100
+
+-- !boolean_tinyint --
+5
+5
+5
+
+-- !boolean_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !boolean_float --
+
+-- !boolean_double --
+
+-- !boolean_boolean --
+
+-- !boolean_string --
+
+-- !boolean_char --
+A
+A
+A
+
+-- !boolean_varchar --
+
+-- !boolean_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !boolean_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !boolean_decimal --
+
+-- !string_int --
+
+-- !string_smallint --
+
+-- !string_tinyint --
+
+-- !string_bigint --
+
+-- !string_float --
+
+-- !string_double --
+
+-- !string_boolean --
+
+-- !string_string --
+
+-- !string_char --
+A
+A
+A
+
+-- !string_varchar --
+
+-- !string_date --
+
+-- !string_timestamp --
+
+-- !string_decimal --
+
+-- !char_int --
+
+-- !char_smallint --
+
+-- !char_tinyint --
+
+-- !char_bigint --
+
+-- !char_float --
+
+-- !char_double --
+
+-- !char_boolean --
+
+-- !char_string --
+
+-- !char_char --
+A
+A
+A
+
+-- !char_varchar --
+
+-- !char_date --
+
+-- !char_timestamp --
+
+-- !char_decimal --
+
+-- !varchar_int --
+
+-- !varchar_smallint --
+
+-- !varchar_tinyint --
+
+-- !varchar_bigint --
+
+-- !varchar_float --
+
+-- !varchar_double --
+
+-- !varchar_boolean --
+
+-- !varchar_string --
+
+-- !varchar_char --
+B
+B
+B
+
+-- !varchar_varchar --
+
+-- !varchar_date --
+
+-- !varchar_timestamp --
+
+-- !varchar_decimal --
+
+-- !date_int --
+3
+3
+3
+
+-- !date_smallint --
+100
+100
+100
+
+-- !date_tinyint --
+5
+5
+5
+
+-- !date_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !date_float --
+
+-- !date_double --
+
+-- !date_boolean --
+
+-- !date_string --
+
+-- !date_char --
+A
+A
+A
+
+-- !date_varchar --
+
+-- !date_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !date_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !date_decimal --
+
+-- !timestamp_int --
+3
+3
+3
+
+-- !timestamp_smallint --
+100
+100
+100
+
+-- !timestamp_tinyint --
+5
+5
+5
+
+-- !timestamp_bigint --
+1000000000
+1000000000
+1000000000
+
+-- !timestamp_float --
+
+-- !timestamp_double --
+
+-- !timestamp_boolean --
+
+-- !timestamp_string --
+
+-- !timestamp_char --
+B
+B
+B
+
+-- !timestamp_varchar --
+
+-- !timestamp_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !timestamp_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !timestamp_decimal --
+
+-- !decimal_int --
+
+-- !decimal_smallint --
+
+-- !decimal_tinyint --
+
+-- !decimal_bigint --
+
+-- !decimal_float --
+
+-- !decimal_double --
+
+-- !decimal_boolean --
+
+-- !decimal_string --
+
+-- !decimal_char --
+
+-- !decimal_varchar --
+
+-- !decimal_date --
+2023-10-06
+2023-10-06
+2023-10-06
+
+-- !decimal_timestamp --
+2023-10-06T14:30
+2023-10-06T14:30
+2023-10-06T14:30
+
+-- !decimal_decimal --
+
diff --git 
a/regression-test/data/external_table_p2/hive/test_hive_remove_partition.out 
b/regression-test/data/external_table_p0/hive/test_hive_remove_partition.out
similarity index 84%
rename from 
regression-test/data/external_table_p2/hive/test_hive_remove_partition.out
rename to 
regression-test/data/external_table_p0/hive/test_hive_remove_partition.out
index 1d5ace95d7e..d6ed3d698ad 100644
--- a/regression-test/data/external_table_p2/hive/test_hive_remove_partition.out
+++ b/regression-test/data/external_table_p0/hive/test_hive_remove_partition.out
@@ -3,3 +3,7 @@
 3      2
 4      2
 
+-- !case1 --
+3      2
+4      2
+
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_analyze_db.groovy 
b/regression-test/suites/external_table_p0/hive/test_hive_analyze_db.groovy
similarity index 90%
rename from 
regression-test/suites/external_table_p2/hive/test_hive_analyze_db.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_hive_analyze_db.groovy
index 5853d3e8af9..b47b0cdd3b1 100644
--- a/regression-test/suites/external_table_p2/hive/test_hive_analyze_db.groovy
+++ b/regression-test/suites/external_table_p0/hive/test_hive_analyze_db.groovy
@@ -15,7 +15,7 @@
 // specific language governing permissions and limitations
 // under the License.
 
- suite("test_hive_analyze_db", 
"p2,external,hive,external_remote,external_remote_hive") {
+ suite("test_hive_analyze_db", 
"p0,external,hive,external_docker,external_docker_hive") {
 
      def verify_column_stats_result = { column, result, count, ndv, nulls, 
size, avg_size, min, max ->
          def found = false;
@@ -34,11 +34,16 @@
          assertTrue(found)
      }
 
-     String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-     if (enabled != null && enabled.equalsIgnoreCase("true")) {
-         String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-         String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-         String catalog_name = "test_hive_analyze_db"
+     String enabled = context.config.otherConfigs.get("enableHiveTest")
+     if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+         logger.info("diable Hive test.")
+         return;
+     }
+
+     for (String hivePrefix : ["hive2", "hive3"]) {
+         String extHiveHmsHost = 
context.config.otherConfigs.get("externalEnvIp")
+         String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+         String catalog_name = 
"${hivePrefix}_test_hive_partition_column_analyze"
          sql """drop catalog if exists ${catalog_name};"""
          sql """
             create catalog if not exists ${catalog_name} properties (
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_parquet_alter_column.groovy
 
b/regression-test/suites/external_table_p0/hive/test_hive_parquet_alter_column.groovy
similarity index 98%
rename from 
regression-test/suites/external_table_p2/hive/test_hive_parquet_alter_column.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_hive_parquet_alter_column.groovy
index f07a13e2398..f0984c62f68 100644
--- 
a/regression-test/suites/external_table_p2/hive/test_hive_parquet_alter_column.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_parquet_alter_column.groovy
@@ -15,13 +15,17 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_hive_parquet_alter_column", 
"p2,external,hive,external_remote,external_remote_hive") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
+suite("test_hive_parquet_alter_column", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
 
-        String catalog_name = "test_hive_parquet_alter_column"
+    for (String hivePrefix : ["hive2", "hive3"]) {
+        String extHiveHmsHost = 
context.config.otherConfigs.get("externalEnvIp")
+        String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+        String catalog_name = 
"${hivePrefix}_test_hive_partition_column_analyze"
         sql """drop catalog if exists ${catalog_name};"""
         sql """
             create catalog if not exists ${catalog_name} properties (
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_partition_column_analyze.groovy
 
b/regression-test/suites/external_table_p0/hive/test_hive_partition_column_analyze.groovy
similarity index 78%
rename from 
regression-test/suites/external_table_p2/hive/test_hive_partition_column_analyze.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_hive_partition_column_analyze.groovy
index d4b1fa3eca4..7704b68e399 100644
--- 
a/regression-test/suites/external_table_p2/hive/test_hive_partition_column_analyze.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_partition_column_analyze.groovy
@@ -15,12 +15,17 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_hive_partition_column_analyze", 
"p2,external,hive,external_remote,external_remote_hive") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "test_hive_partition_column_analyze"
+suite("test_hive_partition_column_analyze", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+    for (String hivePrefix : ["hive2", "hive3"]) {
+        String extHiveHmsHost = 
context.config.otherConfigs.get("externalEnvIp")
+        String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+        String catalog_name = 
"${hivePrefix}_test_hive_partition_column_analyze"
         sql """drop catalog if exists ${catalog_name};"""
         sql """
             create catalog if not exists ${catalog_name} properties (
@@ -33,34 +38,9 @@ suite("test_hive_partition_column_analyze", 
"p2,external,hive,external_remote,ex
 
         try {
             sql """set global enable_get_row_count_from_file_list=true"""
-            // Test analyze table without init.
-            sql """analyze table 
${catalog_name}.multi_partition.multi_partition_parquet (event_day) with sync"""
-            sql """analyze table 
${catalog_name}.multi_partition.multi_partition_orc (event_day) with sync"""
 
             sql """switch ${catalog_name};"""
             logger.info("switched to catalog " + catalog_name)
-            sql """use multi_partition;"""
-            def result = sql """show column stats multi_partition_parquet 
(event_day)"""
-            assertEquals(result.size(), 1)
-            assertEquals(result[0][0], "event_day")
-            assertEquals(result[0][2], "3.83714205E8")
-            assertEquals(result[0][3], "99949.0")
-            assertEquals(result[0][4], "0.0")
-            assertEquals(result[0][5], "3.83714205E9")
-            assertEquals(result[0][6], "10.0")
-            assertEquals(result[0][7], "\'1749-09-24\'")
-            assertEquals(result[0][8], "\'2023-05-26\'")
-
-            result = sql """show column stats multi_partition_orc 
(event_day)"""
-            assertEquals(result.size(), 1)
-            assertEquals(result[0][0], "event_day")
-            assertEquals(result[0][2], "1.9007155E8")
-            assertEquals(result[0][3], "99949.0")
-            assertEquals(result[0][4], "0.0")
-            assertEquals(result[0][5], "1.9007155E9")
-            assertEquals(result[0][6], "10.0")
-            assertEquals(result[0][7], "\'1749-09-24\'")
-            assertEquals(result[0][8], "\'2023-05-26\'")
 
             sql """analyze table 
${catalog_name}.partition_type.tinyint_partition (tinyint_part) with sync"""
             sql """analyze table 
${catalog_name}.partition_type.smallint_partition (smallint_part) with sync"""
@@ -73,7 +53,6 @@ suite("test_hive_partition_column_analyze", 
"p2,external,hive,external_remote,ex
             sql """analyze table 
${catalog_name}.partition_type.float_partition (float_part) with sync"""
             sql """analyze table 
${catalog_name}.partition_type.double_partition (double_part) with sync"""
             sql """analyze table 
${catalog_name}.partition_type.decimal_partition (decimal_part) with sync"""
-            sql """analyze table ${catalog_name}.partition_type.two_partition 
(part1, part2) with sync"""
 
             sql """use partition_type;"""
 
@@ -209,22 +188,6 @@ suite("test_hive_partition_column_analyze", 
"p2,external,hive,external_remote,ex
             assertEquals(result[0][6], "8.0")
             assertEquals(result[0][7], "243.2868")
             assertEquals(result[0][8], "32527.1543")
-
-            result = sql """show column stats two_partition (part1)"""
-            assertEquals(result.size(), 1)
-            assertEquals(result[0][0], "part1")
-            assertEquals(result[0][3], "100.0")
-            assertEquals(result[0][4], "0.0")
-            assertEquals(result[0][7], "1")
-            assertEquals(result[0][8], "100")
-
-            result = sql """show column stats two_partition (part2)"""
-            assertEquals(result.size(), 1)
-            assertEquals(result[0][0], "part2")
-            assertEquals(result[0][3], "100.0")
-            assertEquals(result[0][4], "0.0")
-            assertEquals(result[0][7], "\'1\'")
-            assertEquals(result[0][8], "\'99\'")
         } finally {
             sql """set global enable_get_row_count_from_file_list=false"""
         }
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_remove_partition.groovy
 
b/regression-test/suites/external_table_p0/hive/test_hive_remove_partition.groovy
similarity index 76%
rename from 
regression-test/suites/external_table_p2/hive/test_hive_remove_partition.groovy
rename to 
regression-test/suites/external_table_p0/hive/test_hive_remove_partition.groovy
index 8a614eb78a0..6da8748e741 100644
--- 
a/regression-test/suites/external_table_p2/hive/test_hive_remove_partition.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_remove_partition.groovy
@@ -15,14 +15,19 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_hive_remove_partition", 
"p2,external,hive,external_remote,external_remote_hive") {
+suite("test_hive_remove_partition", 
"p0,external,hive,external_docker,external_docker_hive") {
     def case1 = """select * from partition_manual_remove order by id;"""
 
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "hive_remove_partition"
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("diable Hive test.")
+        return;
+    }
+
+    for (String hivePrefix : ["hive2", "hive3"]) {
+        String extHiveHmsHost = 
context.config.otherConfigs.get("externalEnvIp")
+        String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + 
"HmsPort")
+        String catalog_name = "${hivePrefix}_test_hive_remove_partition"
         sql """drop catalog if exists ${catalog_name};"""
         sql """
             create catalog if not exists ${catalog_name} properties (
diff --git 
a/regression-test/suites/external_table_p2/hive/test_hive_statistics_from_hms.groovy
 
b/regression-test/suites/external_table_p2/hive/test_hive_statistics_from_hms.groovy
deleted file mode 100644
index 3a067fa42f9..00000000000
--- 
a/regression-test/suites/external_table_p2/hive/test_hive_statistics_from_hms.groovy
+++ /dev/null
@@ -1,230 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_hive_statistics_from_hms", 
"p2,external,hive,external_remote,external_remote_hive") {
-    String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        String extHiveHmsHost = 
context.config.otherConfigs.get("extHiveHmsHost")
-        String extHiveHmsPort = 
context.config.otherConfigs.get("extHiveHmsPort")
-        String catalog_name = "test_hive_statistics_from_hms"
-        sql """drop catalog if exists ${catalog_name};"""
-        sql """
-            create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hadoop.username' = 'hadoop',
-                'hive.metastore.uris' = 
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
-            );
-        """
-        logger.info("catalog " + catalog_name + " created")
-
-        sql """switch ${catalog_name};"""
-        logger.info("switched to catalog " + catalog_name)
-        sql """use tpch1_parquet;"""
-        // Load cache
-        sql """show column cached stats lineitem"""
-        Thread.sleep(3000)
-        // Get result
-        def result = sql """show column cached stats lineitem (l_returnflag)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_returnflag")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "2.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "6001215.0")
-        assertTrue(result[0][6] == "1.0")
-
-        result = sql """show column cached stats lineitem (l_receiptdate)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_receiptdate")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "2535.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_tax)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_tax")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "8.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "4.800972E7")
-        assertTrue(result[0][6] == "8.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_shipmode)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_shipmode")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "7.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.5717007E7")
-        assertTrue(result[0][6] == "4.285300060071169")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_suppkey)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_suppkey")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "6.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_shipdate)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_shipdate")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "2535.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_commitdate)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_commitdate")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "2427.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_partkey)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_partkey")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "13152.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_orderkey)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_orderkey")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "1000998.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_quantity)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_quantity")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "31.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "4.800972E7")
-        assertTrue(result[0][6] == "8.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_linestatus)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_linestatus")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "2.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "6001215.0")
-        assertTrue(result[0][6] == "1.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_comment)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_comment")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "3834237.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "1.5899739E8")
-        assertTrue(result[0][6] == "26.494199924515286")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_extendedprice)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_extendedprice")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "1000998.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "4.800972E7")
-        assertTrue(result[0][6] == "8.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_linenumber)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_linenumber")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "261329.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "2.400486E7")
-        assertTrue(result[0][6] == "4.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_discount)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_discount")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "15.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "4.800972E7")
-        assertTrue(result[0][6] == "8.0")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        result = sql """show column cached stats lineitem (l_shipinstruct)"""
-        assertTrue(result.size() == 1)
-        assertTrue(result[0][0] == "l_shipinstruct")
-        assertTrue(result[0][2] == "6001215.0")
-        assertTrue(result[0][3] == "4.0")
-        assertTrue(result[0][4] == "0.0")
-        assertTrue(result[0][5] == "7.2006178E7")
-        assertTrue(result[0][6] == "11.998599950176756")
-        assertTrue(result[0][7] == "N/A")
-        assertTrue(result[0][8] == "N/A")
-
-        for (int i = 0; i < 10; i++) {
-            result = sql """show table stats lineitem"""
-            logger.info("show table stats result: " + result)
-            assertTrue(result.size() == 1)
-            if (result[0][2] == "0") {
-                Thread.sleep(1000)
-                continue;
-            }
-            assertTrue(result[0][2] == "6001215")
-            break;
-        }
-
-        sql """drop catalog ${catalog_name}"""
-    }
-}
-


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to