This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 7a681b76247c9f685ab1ca1f8e57eaa585cc568f
Author: zy-kkk <zhongy...@gmail.com>
AuthorDate: Wed Aug 23 21:17:16 2023 +0800

    [feature](jdbc catalog) support doris jdbc catalog array type (#23056)
---
 docs/en/docs/lakehouse/multi-catalog/jdbc.md       |  18 +-
 docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md    |  18 +-
 .../datasource/jdbc/client/JdbcMySQLClient.java    | 181 +++++++++++++++------
 regression-test/conf/regression-conf.groovy        |   1 +
 .../jdbc/test_doris_jdbc_catalog.out               |  12 ++
 .../jdbc/test_doris_jdbc_catalog.groovy            | 124 +++++++-------
 6 files changed, 241 insertions(+), 113 deletions(-)

diff --git a/docs/en/docs/lakehouse/multi-catalog/jdbc.md 
b/docs/en/docs/lakehouse/multi-catalog/jdbc.md
index 13afc47ac0..dded188564 100644
--- a/docs/en/docs/lakehouse/multi-catalog/jdbc.md
+++ b/docs/en/docs/lakehouse/multi-catalog/jdbc.md
@@ -346,6 +346,8 @@ As for data mapping from SQLServer to Doris, one Database 
in Doris corresponds t
 
 Jdbc Catalog also support to connect another Doris database:
 
+* mysql 5.7 Driver
+
 ```sql
 CREATE CATALOG jdbc_doris PROPERTIES (
     "type"="jdbc",
@@ -354,10 +356,21 @@ CREATE CATALOG jdbc_doris PROPERTIES (
     "jdbc_url" = "jdbc:mysql://127.0.0.1:9030?useSSL=false",
     "driver_url" = "mysql-connector-java-5.1.47.jar",
     "driver_class" = "com.mysql.jdbc.Driver"
-);
+)
 ```
 
-**Note:** Currently, Jdbc Catalog only support to use 5.x version of JDBC jar 
package to connect another Doris database. If you use 8.x version of JDBC jar 
package, the data type of column may not be matched.
+* mysql 8 Driver
+
+```sql
+CREATE CATALOG jdbc_doris PROPERTIES (
+    "type"="jdbc",
+    "user"="root",
+    "password"="123456",
+    "jdbc_url" = "jdbc:mysql://127.0.0.1:9030?useSSL=false",
+    "driver_url" = "mysql-connector-java-8.0.25.jar",
+    "driver_class" = "com.mysql.cj.jdbc.Driver"
+)
+```
 
 #### Type Mapping
 
@@ -379,6 +392,7 @@ CREATE CATALOG jdbc_doris PROPERTIES (
 | STRING     | STRING                 |                                        
                                              |
 | TEXT       | STRING                 |                                        
                                              |
 | HLL        | HLL                    | Query HLL needs to set 
`return_object_data_as_binary=true`                           |
+| Array      | Array                  | The internal type adaptation logic of 
Array refers to the above types, and nested complex types are not supported     
   |
 | Other      | UNSUPPORTED            |                                        
                                              |
 
 ### Clickhouse
diff --git a/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md 
b/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md
index 270a2ac52b..91b742867d 100644
--- a/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md
+++ b/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md
@@ -347,6 +347,8 @@ CREATE CATALOG jdbc_sqlserve PROPERTIES (
 
 Jdbc Catalog也支持连接另一个Doris数据库:
 
+* mysql 5.7 Driver
+
 ```sql
 CREATE CATALOG jdbc_doris PROPERTIES (
     "type"="jdbc",
@@ -355,10 +357,21 @@ CREATE CATALOG jdbc_doris PROPERTIES (
     "jdbc_url" = "jdbc:mysql://127.0.0.1:9030?useSSL=false",
     "driver_url" = "mysql-connector-java-5.1.47.jar",
     "driver_class" = "com.mysql.jdbc.Driver"
-);
+)
 ```
 
-**注意:** 目前 Jdbc Catalog 连接一个 Doris 数据库只支持用 5.x 版本的 jdbc jar 包。如果使用 8.x jdbc 
jar 包,可能会出现列类型无法匹配问题。
+* mysql 8 Driver
+
+```sql
+CREATE CATALOG jdbc_doris PROPERTIES (
+    "type"="jdbc",
+    "user"="root",
+    "password"="123456",
+    "jdbc_url" = "jdbc:mysql://127.0.0.1:9030?useSSL=false",
+    "driver_url" = "mysql-connector-java-8.0.25.jar",
+    "driver_class" = "com.mysql.cj.jdbc.Driver"
+)
+```
 
 #### 类型映射
 
@@ -380,6 +393,7 @@ CREATE CATALOG jdbc_doris PROPERTIES (
 | STRING     | STRING                 |                                        
              |
 | TEXT       | STRING                 |                                        
              |
 | HLL        | HLL                    | 
查询HLL需要设置`return_object_data_as_binary=true`   |
+| Array      | Array                  | Array内部类型适配逻辑参考上述类型,不支持嵌套复杂类型        |
 | Other      | UNSUPPORTED            |                                        
              |
 
 ### Clickhouse
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
index 0f856ad3e9..aed3136161 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
@@ -18,6 +18,7 @@
 package org.apache.doris.datasource.jdbc.client;
 
 import org.apache.doris.analysis.DefaultValueExprDef;
+import org.apache.doris.catalog.ArrayType;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.catalog.ScalarType;
@@ -39,10 +40,27 @@ import java.util.function.Consumer;
 public class JdbcMySQLClient extends JdbcClient {
 
     private static boolean convertDateToNull = false;
+    private static boolean isDoris = false;
 
     protected JdbcMySQLClient(JdbcClientConfig jdbcClientConfig) {
         super(jdbcClientConfig);
         convertDateToNull = isConvertDatetimeToNull(jdbcClientConfig);
+        Connection conn = null;
+        Statement stmt = null;
+        ResultSet rs = null;
+        try {
+            conn = super.getConnection();
+            stmt = conn.createStatement();
+            rs = stmt.executeQuery("SHOW VARIABLES LIKE 'version_comment'");
+            if (rs.next()) {
+                String versionComment = rs.getString("Value");
+                isDoris = versionComment.toLowerCase().contains("doris");
+            }
+        } catch (SQLException e) {
+            throw new JdbcClientException("Failed to determine MySQL Version 
Comment", e);
+        } finally {
+            close(rs, stmt, conn);
+        }
     }
 
     @Override
@@ -91,37 +109,6 @@ public class JdbcMySQLClient extends JdbcClient {
         return databaseMetaData.getColumns(schemaName, null, tableName, null);
     }
 
-    /**
-     * get all columns like DatabaseMetaData.getColumns in mysql-jdbc-connector
-     */
-    private Map<String, String> getJdbcColumnsTypeInfo(String dbName, String 
tableName) {
-        Connection conn = getConnection();
-        ResultSet resultSet = null;
-        Map<String, String> fieldtoType = Maps.newHashMap();
-
-        StringBuilder queryBuf = new StringBuilder("SHOW FULL COLUMNS FROM ");
-        queryBuf.append(tableName);
-        queryBuf.append(" FROM ");
-        queryBuf.append(dbName);
-        try (Statement stmt = conn.createStatement()) {
-            resultSet = stmt.executeQuery(queryBuf.toString());
-            while (resultSet.next()) {
-                // get column name
-                String fieldName = resultSet.getString("Field");
-                // get original type name
-                String typeName = resultSet.getString("Type");
-                fieldtoType.put(fieldName, typeName);
-            }
-        } catch (SQLException e) {
-            throw new JdbcClientException("failed to get column list from jdbc 
for table %s:%s", tableName,
-                Util.getRootCauseMessage(e));
-        } finally {
-            close(resultSet, conn);
-        }
-
-        return fieldtoType;
-    }
-
     /**
      * get all columns of one table
      */
@@ -141,7 +128,6 @@ public class JdbcMySQLClient extends JdbcClient {
             String catalogName = getCatalogName(conn);
             rs = getColumns(databaseMetaData, catalogName, dbName, tableName);
             List<String> primaryKeys = getPrimaryKeys(databaseMetaData, 
catalogName, dbName, tableName);
-            boolean needGetDorisColumns = true;
             Map<String, String> mapFieldtoType = null;
             while (rs.next()) {
                 JdbcFieldSchema field = new JdbcFieldSchema();
@@ -151,17 +137,10 @@ public class JdbcMySQLClient extends JdbcClient {
                 // in mysql-jdbc-connector-8.0.*, TYPE_NAME of the HLL column 
in doris will be "UNKNOWN"
                 // in mysql-jdbc-connector-5.1.*, TYPE_NAME of the HLL column 
in doris will be "HLL"
                 field.setDataTypeName(rs.getString("TYPE_NAME"));
-                if (rs.getString("TYPE_NAME").equalsIgnoreCase("UNKNOWN")) {
-                    if (needGetDorisColumns) {
-                        mapFieldtoType = getJdbcColumnsTypeInfo(dbName, 
tableName);
-                        needGetDorisColumns = false;
-                    }
-
-                    if (mapFieldtoType != null) {
-                        
field.setDataTypeName(mapFieldtoType.get(rs.getString("COLUMN_NAME")));
-                    }
+                if (isDoris) {
+                    mapFieldtoType = getColumnsDataTypeUseQuery(dbName, 
tableName);
+                    
field.setDataTypeName(mapFieldtoType.get(rs.getString("COLUMN_NAME")));
                 }
-
                 field.setKey(primaryKeys.contains(field.getColumnName()));
                 field.setColumnSize(rs.getInt("COLUMN_SIZE"));
                 field.setDecimalDigits(rs.getInt("DECIMAL_DIGITS"));
@@ -227,6 +206,10 @@ public class JdbcMySQLClient extends JdbcClient {
 
     @Override
     protected Type jdbcTypeToDoris(JdbcFieldSchema fieldSchema) {
+        // For Doris type
+        if (isDoris) {
+            return 
dorisTypeToDoris(fieldSchema.getDataTypeName().toUpperCase());
+        }
         // For mysql type: "INT UNSIGNED":
         // fieldSchema.getDataTypeName().split(" ")[0] == "INT"
         // fieldSchema.getDataTypeName().split(" ")[1] == "UNSIGNED"
@@ -274,15 +257,10 @@ public class JdbcMySQLClient extends JdbcClient {
                 return Type.INT;
             case "BIGINT":
                 return Type.BIGINT;
-            case "LARGEINT": // for jdbc catalog connecting Doris database
-                return Type.LARGEINT;
             case "DATE":
-            case "DATEV2":
                 return ScalarType.createDateV2Type();
             case "TIMESTAMP":
-            case "DATETIME":
-            // for jdbc catalog connecting Doris database
-            case "DATETIMEV2": {
+            case "DATETIME": {
                 // mysql can support microsecond
                 // use columnSize to calculate the precision of 
timestamp/datetime
                 int columnSize = fieldSchema.getColumnSize();
@@ -299,9 +277,7 @@ public class JdbcMySQLClient extends JdbcClient {
                 return Type.FLOAT;
             case "DOUBLE":
                 return Type.DOUBLE;
-            case "DECIMAL":
-            // for jdbc catalog connecting Doris database
-            case "DECIMALV3": {
+            case "DECIMAL": {
                 int precision = fieldSchema.getColumnSize();
                 int scale = fieldSchema.getDecimalDigits();
                 return createDecimalOrStringType(precision, scale);
@@ -335,8 +311,6 @@ public class JdbcMySQLClient extends JdbcClient {
             case "VARBINARY":
             case "ENUM":
                 return ScalarType.createStringType();
-            case "HLL":
-                return ScalarType.createHllType();
             default:
                 return Type.UNSUPPORTED;
         }
@@ -346,4 +320,105 @@ public class JdbcMySQLClient extends JdbcClient {
         // Check if the JDBC URL contains "zeroDateTimeBehavior=convertToNull".
         return 
jdbcClientConfig.getJdbcUrl().contains("zeroDateTimeBehavior=convertToNull");
     }
+
+    /**
+     * get all columns like DatabaseMetaData.getColumns in mysql-jdbc-connector
+     */
+    private Map<String, String> getColumnsDataTypeUseQuery(String dbName, 
String tableName) {
+        Connection conn = getConnection();
+        ResultSet resultSet = null;
+        Map<String, String> fieldtoType = Maps.newHashMap();
+
+        StringBuilder queryBuf = new StringBuilder("SHOW FULL COLUMNS FROM ");
+        queryBuf.append(tableName);
+        queryBuf.append(" FROM ");
+        queryBuf.append(dbName);
+        try (Statement stmt = conn.createStatement()) {
+            resultSet = stmt.executeQuery(queryBuf.toString());
+            while (resultSet.next()) {
+                // get column name
+                String fieldName = resultSet.getString("Field");
+                // get original type name
+                String typeName = resultSet.getString("Type");
+                fieldtoType.put(fieldName, typeName);
+            }
+        } catch (SQLException e) {
+            throw new JdbcClientException("failed to get column list from jdbc 
for table %s:%s", tableName,
+                Util.getRootCauseMessage(e));
+        } finally {
+            close(resultSet, conn);
+        }
+        return fieldtoType;
+    }
+
+    private Type dorisTypeToDoris(String type) {
+        if (type == null || type.isEmpty()) {
+            return Type.UNSUPPORTED;
+        }
+
+        String upperType = type.toUpperCase();
+
+        // For ARRAY type
+        if (upperType.startsWith("ARRAY")) {
+            String innerType = upperType.substring(6, upperType.length() - 
1).trim();
+            Type arrayInnerType = dorisTypeToDoris(innerType);
+            return ArrayType.create(arrayInnerType, true);
+        }
+
+        int openParen = upperType.indexOf("(");
+        String baseType = (openParen == -1) ? upperType : 
upperType.substring(0, openParen);
+
+        switch (baseType) {
+            case "BOOL":
+            case "BOOLEAN":
+                return Type.BOOLEAN;
+            case "TINYINT":
+                return Type.TINYINT;
+            case "INT":
+                return Type.INT;
+            case "SMALLINT":
+                return Type.SMALLINT;
+            case "BIGINT":
+                return Type.BIGINT;
+            case "LARGEINT":
+                return Type.LARGEINT;
+            case "FLOAT":
+                return Type.FLOAT;
+            case "DOUBLE":
+                return Type.DOUBLE;
+            case "DECIMAL":
+            case "DECIMALV3": {
+                String[] params = upperType.substring(openParen + 1, 
upperType.length() - 1).split(",");
+                int precision = Integer.parseInt(params[0].trim());
+                int scale = Integer.parseInt(params[1].trim());
+                return createDecimalOrStringType(precision, scale);
+            }
+            case "DATE":
+            case "DATEV2":
+                return ScalarType.createDateV2Type();
+            case "DATETIME":
+            case "DATETIMEV2": {
+                int scale = Integer.parseInt(upperType.substring(openParen + 
1, upperType.length() - 1));
+                if (scale > 6) {
+                    scale = 6;
+                }
+                return ScalarType.createDatetimeV2Type(scale);
+            }
+            case "CHAR":
+            case "VARCHAR": {
+                int length = Integer.parseInt(upperType.substring(openParen + 
1, upperType.length() - 1));
+                return baseType.equals("CHAR")
+                    ? ScalarType.createCharType(length) : 
ScalarType.createVarcharType(length);
+            }
+            case "STRING":
+            case "TEXT":
+                return ScalarType.createStringType();
+            case "JSON":
+                return ScalarType.createJsonbType();
+            case "HLL":
+                return ScalarType.createHllType();
+            default:
+                return Type.UNSUPPORTED;
+        }
+    }
 }
diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index 1c77c9ed97..89ddbf3065 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -85,6 +85,7 @@ pg_14_port=5442
 oracle_11_port=1521
 sqlserver_2022_port=1433
 clickhouse_22_port=8123
+doris_port=9030
 
 // hive catalog test config
 // To enable hive test, you need first start hive container.
diff --git 
a/regression-test/data/external_table_p0/jdbc/test_doris_jdbc_catalog.out 
b/regression-test/data/external_table_p0/jdbc/test_doris_jdbc_catalog.out
index cb78c08955..4456a38d50 100644
--- a/regression-test/data/external_table_p0/jdbc/test_doris_jdbc_catalog.out
+++ b/regression-test/data/external_table_p0/jdbc/test_doris_jdbc_catalog.out
@@ -27,6 +27,12 @@ doris_jdbc_catalog
 5      doris5
 6      doris6
 
+-- !base1 --
+true   1       1       1       1       1       1.0     1.0     1.00000 
1.0000000000    2021-01-01      2021-01-01T00:00        a       a       {"a":1}
+
+-- !arr1 --
+1      [1]     [1]     [1]     [1]     [1]     [1]     [1]     [1]     
[1.00000]       [1.0000000000]  [2021-01-01]    [2021-01-01 00:00:00.000]       
["a"]   ["a"]   ["a"]
+
 -- !tb1 --
 1      1
 2      1
@@ -49,6 +55,12 @@ doris_jdbc_catalog
 5      1
 6      1
 
+-- !base2 --
+true   1       1       1       1       1       1.0     1.0     1.00000 
1.0000000000    2021-01-01      2021-01-01T00:00        a       a       {"a":1}
+
+-- !arr2 --
+1      [1]     [1]     [1]     [1]     [1]     [1]     [1]     [1]     
[1.00000]       [1.0000000000]  [2021-01-01]    [2021-01-01 00:00:00.000]       
["a"]   ["a"]   ["a"]
+
 -- !sql --
 doris_jdbc_catalog
 
diff --git 
a/regression-test/suites/external_table_p0/jdbc/test_doris_jdbc_catalog.groovy 
b/regression-test/suites/external_table_p0/jdbc/test_doris_jdbc_catalog.groovy
index 5a189629f4..154b4df2da 100644
--- 
a/regression-test/suites/external_table_p0/jdbc/test_doris_jdbc_catalog.groovy
+++ 
b/regression-test/suites/external_table_p0/jdbc/test_doris_jdbc_catalog.groovy
@@ -28,9 +28,11 @@ suite("test_doris_jdbc_catalog", "p0") {
     String resource_name = "jdbc_resource_catalog_doris"
     String catalog_name = "doris_jdbc_catalog";
     String internal_db_name = "regression_test_jdbc_catalog_p0";
-    String doris_port = 9030;
-    String inDorisTable = "doris_in_tb";
+    String doris_port = context.config.otherConfigs.get("doris_port");
+    String inDorisTable = "test_doris_jdbc_doris_in_tb";
     String hllTable = "bowen_hll_test"
+    String base_table = "base";
+    String arr_table = "arr";
 
     qt_sql """select current_catalog()"""
     sql """drop catalog if exists ${catalog_name} """
@@ -78,62 +80,9 @@ suite("test_doris_jdbc_catalog", "p0") {
           `user_log_acct` hll HLL_UNION NULL COMMENT ""
         ) ENGINE=OLAP
         AGGREGATE KEY(`pin_id`, `pv_date`)
-        COMMENT "OLAP"
-        PARTITION BY RANGE(`pv_date`)
-        (PARTITION pbefore201910 VALUES [('1900-01-01'), ('2019-10-01')),
-        PARTITION p201910 VALUES [('2019-10-01'), ('2019-11-01')),
-        PARTITION p201911 VALUES [('2019-11-01'), ('2019-12-01')),
-        PARTITION p201912 VALUES [('2019-12-01'), ('2020-01-01')),
-        PARTITION p202001 VALUES [('2020-01-01'), ('2020-02-01')),
-        PARTITION p202002 VALUES [('2020-02-01'), ('2020-03-01')),
-        PARTITION p202003 VALUES [('2020-03-01'), ('2020-04-01')),
-        PARTITION p202004 VALUES [('2020-04-01'), ('2020-05-01')),
-        PARTITION p202005 VALUES [('2020-05-01'), ('2020-06-01')),
-        PARTITION p202006 VALUES [('2020-06-01'), ('2020-07-01')),
-        PARTITION p202007 VALUES [('2020-07-01'), ('2020-08-01')),
-        PARTITION p202008 VALUES [('2020-08-01'), ('2020-09-01')),
-        PARTITION p202009 VALUES [('2020-09-01'), ('2020-10-01')),
-        PARTITION p202010 VALUES [('2020-10-01'), ('2020-11-01')),
-        PARTITION p202011 VALUES [('2020-11-01'), ('2020-12-01')),
-        PARTITION p202012 VALUES [('2020-12-01'), ('2021-01-01')),
-        PARTITION p202101 VALUES [('2021-01-01'), ('2021-02-01')),
-        PARTITION p202102 VALUES [('2021-02-01'), ('2021-03-01')),
-        PARTITION p202103 VALUES [('2021-03-01'), ('2021-04-01')),
-        PARTITION p202104 VALUES [('2021-04-01'), ('2021-05-01')),
-        PARTITION p202105 VALUES [('2021-05-01'), ('2021-06-01')),
-        PARTITION p202106 VALUES [('2021-06-01'), ('2021-07-01')),
-        PARTITION p202107 VALUES [('2021-07-01'), ('2021-08-01')),
-        PARTITION p202108 VALUES [('2021-08-01'), ('2021-09-01')),
-        PARTITION p202109 VALUES [('2021-09-01'), ('2021-10-01')),
-        PARTITION p202110 VALUES [('2021-10-01'), ('2021-11-01')),
-        PARTITION p202111 VALUES [('2021-11-01'), ('2021-12-01')),
-        PARTITION p202112 VALUES [('2021-12-01'), ('2022-01-01')),
-        PARTITION p202201 VALUES [('2022-01-01'), ('2022-02-01')),
-        PARTITION p202202 VALUES [('2022-02-01'), ('2022-03-01')),
-        PARTITION p202203 VALUES [('2022-03-01'), ('2022-04-01')),
-        PARTITION p202204 VALUES [('2022-04-01'), ('2022-05-01')),
-        PARTITION p202205 VALUES [('2022-05-01'), ('2022-06-01')),
-        PARTITION p202206 VALUES [('2022-06-01'), ('2022-07-01')),
-        PARTITION p202207 VALUES [('2022-07-01'), ('2022-08-01')),
-        PARTITION p202208 VALUES [('2022-08-01'), ('2022-09-01')),
-        PARTITION p202209 VALUES [('2022-09-01'), ('2022-10-01')),
-        PARTITION p202210 VALUES [('2022-10-01'), ('2022-11-01')),
-        PARTITION p202211 VALUES [('2022-11-01'), ('2022-12-01')),
-        PARTITION p202212 VALUES [('2022-12-01'), ('2023-01-01')),
-        PARTITION p202301 VALUES [('2023-01-01'), ('2023-02-01')),
-        PARTITION p202302 VALUES [('2023-02-01'), ('2023-03-01')),
-        PARTITION p202303 VALUES [('2023-03-01'), ('2023-04-01')),
-        PARTITION p202304 VALUES [('2023-04-01'), ('2023-05-01')),
-        PARTITION p202305 VALUES [('2023-05-01'), ('2023-06-01')),
-        PARTITION p202306 VALUES [('2023-06-01'), ('2023-07-01')),
-        PARTITION p202307 VALUES [('2023-07-01'), ('2023-08-01')),
-        PARTITION p202308 VALUES [('2023-08-01'), ('2023-09-01')),
-        PARTITION p202309 VALUES [('2023-09-01'), ('2023-10-01')))
         DISTRIBUTED BY HASH(`pin_id`) BUCKETS 16
         PROPERTIES (
-        "replication_allocation" = "tag.location.default: 1",
-        "in_memory" = "false",
-        "storage_format" = "DEFAULT"
+        "replication_allocation" = "tag.location.default: 1"
         ); """
 
     sql """ insert into ${hllTable} values(1, "2023-01-01", hll_hash("1"));"""
@@ -143,6 +92,65 @@ suite("test_doris_jdbc_catalog", "p0") {
     sql """ insert into ${hllTable} values(5, "2023-01-05", hll_hash("5"));"""
     sql """ insert into ${hllTable} values(6, "2023-01-06", hll_hash("6"));"""
 
+    sql """drop table if exists ${base_table}"""
+    sql """
+        create table ${base_table} (
+            bool_col boolean,
+            tinyint_col tinyint,
+            smallint_col smallint,
+            int_col int,
+            bigint_col bigint,
+            largeint_col largeint,
+            float_col float,
+            double_col double,
+            decimal_col decimal(10, 5),
+            decimal_col2 decimal(30, 10),
+            date_col date,
+            datetime_col datetime(3),
+            char_col char(10),
+            varchar_col varchar(10),
+            json_col json
+        )
+        DUPLICATE KEY(`bool_col`)
+        DISTRIBUTED BY HASH(`bool_col`) BUCKETS 3
+        PROPERTIES (
+            "replication_allocation" = "tag.location.default: 1"
+        );
+    """
+    sql """insert into ${base_table} values (true, 1, 1, 1, 1, 1, 1.0, 1.0, 
1.0, 1.0, '2021-01-01', '2021-01-01 00:00:00.000', 'a', 'a', '{\"a\": 1}');"""
+    order_qt_base1 """ select * from ${base_table} order by int_col; """
+
+    sql """drop table if exists ${arr_table}"""
+    sql """
+        create table ${arr_table} (
+            int_col int,
+            arr_bool_col array<boolean>,
+            arr_tinyint_col array<tinyint>,
+            arr_smallint_col array<smallint>,
+            arr_int_col array<int>,
+            arr_bigint_col array<bigint>,
+            arr_largeint_col array<largeint>,
+            arr_float_col array<float>,
+            arr_double_col array<double>,
+            arr_decimal1_col array<decimal(10, 5)>,
+            arr_decimal2_col array<decimal(30, 10)>,
+            arr_date_col array<date>,
+            arr_datetime_col array<datetime(3)>,
+            arr_char_col array<char(10)>,
+            arr_varchar_col array<varchar(10)>,
+            arr_string_col array<string>
+        )
+        DUPLICATE KEY(`int_col`)
+        DISTRIBUTED BY HASH(`int_col`) BUCKETS 3
+        PROPERTIES (
+            "replication_allocation" = "tag.location.default: 1"
+        );
+    """
+
+    sql """insert into ${arr_table} values (1, array(true), array(1), 
array(1), array(1), array(1), array(1), array(1.0), array(1.0), array(1.0), 
array(1.0), array('2021-01-01'), array('2021-01-01 00:00:00.000'), array('a'), 
array('a'), array('a'));"""
+    order_qt_arr1 """ select * from ${arr_table} order by int_col; """
+
+
     sql """ set return_object_data_as_binary=true """
     order_qt_tb1 """ select pin_id, hll_union_agg(user_log_acct) from 
${hllTable} group by pin_id; """
 
@@ -153,6 +161,8 @@ suite("test_doris_jdbc_catalog", "p0") {
     qt_sql """select current_catalog()"""
     sql """ use ${internal_db_name} """
     order_qt_tb2 """ select pin_id, hll_union_agg(user_log_acct) from 
${catalog_name}.${internal_db_name}.${hllTable} group by pin_id; """
+    order_qt_base2 """ select * from 
${catalog_name}.${internal_db_name}.${base_table} order by int_col; """
+    order_qt_arr2 """ select * from 
${catalog_name}.${internal_db_name}.${arr_table} order by int_col; """
 
     //clean
     qt_sql """select current_catalog()"""
@@ -161,5 +171,7 @@ suite("test_doris_jdbc_catalog", "p0") {
     sql "use ${internal_db_name}"
     sql """ drop table if exists ${inDorisTable} """
     sql """ drop table if exists ${hllTable} """
+    sql """ drop table if exists ${base_table} """
+    sql """ drop table if exists ${arr_table} """
 
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to