This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 07224686ef0995b1c522724de8c5fb15009734ef
Author: zy-kkk <zhongy...@gmail.com>
AuthorDate: Fri Mar 1 10:21:13 2024 +0800

    [feature](jdbc catalog) support db2 jdbc catalog (#31627)
---
 docker/thirdparties/docker-compose/db2/db2.env     |  19 ++
 .../thirdparties/docker-compose/db2/db2.yaml.tpl   |  54 +++++
 .../docker-compose/db2/init/01-drop-schema.sql     |  18 ++
 .../docker-compose/db2/init/02-create-schema.sql   |  18 ++
 .../docker-compose/db2/init/03-create-table.sql    |  40 ++++
 .../docker-compose/db2/init/04-insert.sql          | 150 ++++++++++++
 docker/thirdparties/run-thirdparties-docker.sh     |  21 +-
 docs/en/docs/lakehouse/multi-catalog/jdbc.md       |  81 +++++--
 docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md    |  81 +++++--
 .../org/apache/doris/jdbc/BaseJdbcExecutor.java    |   8 +
 ...racleJdbcExecutor.java => DB2JdbcExecutor.java} |  83 +++----
 .../org/apache/doris/jdbc/JdbcExecutorFactory.java |   2 +
 .../org/apache/doris/jdbc/OracleJdbcExecutor.java  |   8 -
 .../org/apache/doris/catalog/JdbcResource.java     |   6 +-
 .../java/org/apache/doris/catalog/JdbcTable.java   |   3 +
 .../doris/datasource/jdbc/client/JdbcClient.java   |   2 +
 .../datasource/jdbc/client/JdbcDB2Client.java      |  83 +++++++
 gensrc/thrift/Types.thrift                         |   3 +-
 regression-test/conf/regression-conf.groovy        |   1 +
 .../jdbc/test_db2_jdbc_catalog.out                 |  43 ++++
 regression-test/framework/pom.xml                  |   5 +
 .../org/apache/doris/regression/suite/Suite.groovy |   9 +
 .../doris/regression/suite/SuiteContext.groovy     |  20 ++
 .../pipeline/external/conf/regression-conf.groovy  |   1 +
 .../jdbc/test_db2_jdbc_catalog.groovy              | 253 +++++++++++++++++++++
 25 files changed, 919 insertions(+), 93 deletions(-)

diff --git a/docker/thirdparties/docker-compose/db2/db2.env 
b/docker/thirdparties/docker-compose/db2/db2.env
new file mode 100644
index 00000000000..25e55c59ef3
--- /dev/null
+++ b/docker/thirdparties/docker-compose/db2/db2.env
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+DOCKER_DB2_EXTERNAL_PORT=50000
diff --git a/docker/thirdparties/docker-compose/db2/db2.yaml.tpl 
b/docker/thirdparties/docker-compose/db2/db2.yaml.tpl
new file mode 100644
index 00000000000..79c5789ccf8
--- /dev/null
+++ b/docker/thirdparties/docker-compose/db2/db2.yaml.tpl
@@ -0,0 +1,54 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+version: '3'
+
+services:
+  doris--db2_11:
+    image: ibmcom/db2:11.5.0.0a
+    ports:
+      - ${DOCKER_DB2_EXTERNAL_PORT}:50000
+    privileged: true
+    healthcheck:
+      test: ["CMD-SHELL", "su - db2inst1 -c \"db2 connect to doris && db2 
'select 1 from sysibm.sysdummy1'\""]
+      interval: 20s
+      timeout: 60s
+      retries: 10
+    volumes:
+      - ./init:/docker-entrypoint-initdb.d
+    environment:
+      - LICENSE=accept
+      - DBNAME=doris
+      - DB2INSTANCE=db2inst1
+      - DB2INST1_PASSWORD=123456
+      - TZ=Asia/Shanghai
+    restart: always
+    networks:
+      - doris--db2_network
+  db2-hello-world:
+    image: hello-world
+    depends_on:
+      doris--db2_11:
+        condition: service_healthy
+    networks:
+      - doris--db2_network
+networks:
+  doris--db2_network:
+    ipam:
+      driver: default
+      config:
+        - subnet: 168.50.0.0/24
diff --git a/docker/thirdparties/docker-compose/db2/init/01-drop-schema.sql 
b/docker/thirdparties/docker-compose/db2/init/01-drop-schema.sql
new file mode 100644
index 00000000000..d5450b949d0
--- /dev/null
+++ b/docker/thirdparties/docker-compose/db2/init/01-drop-schema.sql
@@ -0,0 +1,18 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--   http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied.  See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+drop schema doris_test restrict;
\ No newline at end of file
diff --git a/docker/thirdparties/docker-compose/db2/init/02-create-schema.sql 
b/docker/thirdparties/docker-compose/db2/init/02-create-schema.sql
new file mode 100644
index 00000000000..f84ec00b7d8
--- /dev/null
+++ b/docker/thirdparties/docker-compose/db2/init/02-create-schema.sql
@@ -0,0 +1,18 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--   http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied.  See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+create schema doris_test;
\ No newline at end of file
diff --git a/docker/thirdparties/docker-compose/db2/init/03-create-table.sql 
b/docker/thirdparties/docker-compose/db2/init/03-create-table.sql
new file mode 100644
index 00000000000..e54190e2c60
--- /dev/null
+++ b/docker/thirdparties/docker-compose/db2/init/03-create-table.sql
@@ -0,0 +1,40 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--   http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied.  See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+CREATE TABLE doris_test.sample_table (
+    id_column INT GENERATED ALWAYS AS IDENTITY,
+    numeric_column NUMERIC,
+    decimal_column DECIMAL(31, 10),
+    decfloat_column DECFLOAT,
+    float_column FLOAT,
+    real_column REAL,
+    double_column DOUBLE,
+    double_precision_column DOUBLE PRECISION,
+    smallint_column SMALLINT,
+    int_column INT,
+    bigint_column BIGINT,
+    varchar_column VARCHAR(255),
+    varcharphic_column VARGRAPHIC(50),
+    long_varchar_column LONG VARCHAR ,
+    long_varcharphic_column LONG VARGRAPHIC,
+    char_varying_column CHAR VARYING(255),
+    char_column CHAR(255),
+    date_column DATE,
+    timestamp_column TIMESTAMP,
+    time_column TIME,
+    clob_column CLOB
+);
diff --git a/docker/thirdparties/docker-compose/db2/init/04-insert.sql 
b/docker/thirdparties/docker-compose/db2/init/04-insert.sql
new file mode 100644
index 00000000000..b9f659b59bb
--- /dev/null
+++ b/docker/thirdparties/docker-compose/db2/init/04-insert.sql
@@ -0,0 +1,150 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--   http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied.  See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+-- Insert normal data
+INSERT INTO doris_test.sample_table (
+    numeric_column,
+    decimal_column,
+    decfloat_column,
+    float_column,
+    real_column,
+    double_column,
+    double_precision_column,
+    smallint_column,
+    int_column,
+    bigint_column,
+    varchar_column,
+    varcharphic_column,
+    long_varchar_column,
+    long_varcharphic_column,
+    char_varying_column,
+    char_column,
+    date_column,
+    timestamp_column,
+    time_column,
+    clob_column
+) VALUES (
+    123,
+    1234567890.1234567890,
+    1.234567890,
+    12345.6789,
+    12345.6789,
+    1234567890.1234567890,
+    1234567890.1234567890,
+    123,
+    12345,
+    123456789012345,
+    'Varchar text',
+    'Varcharphic text',
+    'Long varchar text',
+    'Long varcharphic text',
+    'Char varying text',
+    'Char text',
+    '2024-01-24',
+    '2024-01-24-12.34.56.789000',
+    '12:34:56',
+    'Sample CLOB text'
+);
+
+-- Insert null data
+INSERT INTO doris_test.sample_table (
+    numeric_column,
+    decimal_column,
+    decfloat_column,
+    float_column,
+    real_column,
+    double_column,
+    double_precision_column,
+    smallint_column,
+    int_column,
+    bigint_column,
+    varchar_column,
+    varcharphic_column,
+    long_varchar_column,
+    long_varcharphic_column,
+    char_varying_column,
+    char_column,
+    date_column,
+    timestamp_column,
+    time_column,
+    clob_column
+) VALUES (
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null,
+    null
+);
+
+INSERT INTO doris_test.sample_table (
+    numeric_column,
+    decimal_column,
+    decfloat_column,
+    float_column,
+    real_column,
+    double_column,
+    double_precision_column,
+    smallint_column,
+    int_column,
+    bigint_column,
+    varchar_column,
+    varcharphic_column,
+    long_varchar_column,
+    long_varcharphic_column,
+    char_varying_column,
+    char_column,
+    date_column,
+    timestamp_column,
+    time_column,
+    clob_column
+) VALUES (
+    123,
+    1234567890.1234567890,
+    1.234567890,
+    12345.6789,
+    12345.6789,
+    1234567890.1234567890,
+    1234567890.1234567890,
+    123,
+    12345,
+    123456789012345,
+    '中文一',
+    '中文二',
+    '中文三',
+    '中文四',
+    '中文五',
+    '中文六',
+    '2024-01-24',
+    '2024-01-24-12.34.56.789000',
+    '12:34:56',
+    '中文七'
+);
diff --git a/docker/thirdparties/run-thirdparties-docker.sh 
b/docker/thirdparties/run-thirdparties-docker.sh
index f8d647c73cb..730367cfad6 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -37,7 +37,7 @@ Usage: $0 <options>
      --stop             stop the specified components
 
   All valid components:
-    
mysql,pg,oracle,sqlserver,clickhouse,es,hive,iceberg,hudi,trino,kafka,mariadb
+    
mysql,pg,oracle,sqlserver,clickhouse,es,hive,iceberg,hudi,trino,kafka,mariadb,db2
   "
     exit 1
 }
@@ -60,7 +60,7 @@ STOP=0
 
 if [[ "$#" == 1 ]]; then
     # default
-    COMPONENTS="mysql,es,hive,pg,oracle,sqlserver,clickhouse,mariadb,iceberg"
+    
COMPONENTS="mysql,es,hive,pg,oracle,sqlserver,clickhouse,mariadb,iceberg,db2"
 else
     while true; do
         case "$1" in
@@ -92,7 +92,7 @@ else
     done
     if [[ "${COMPONENTS}"x == ""x ]]; then
         if [[ "${STOP}" -eq 1 ]]; then
-            
COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino,kafka,mariadb"
+            
COMPONENTS="mysql,es,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino,kafka,mariadb,db2"
         fi
     fi
 fi
@@ -133,6 +133,7 @@ RUN_TRINO=0
 RUN_KAFKA=0
 RUN_SPARK=0
 RUN_MARIADB=0
+RUN_DB2=0
 
 for element in "${COMPONENTS_ARR[@]}"; do
     if [[ "${element}"x == "mysql"x ]]; then
@@ -161,6 +162,8 @@ for element in "${COMPONENTS_ARR[@]}"; do
         RUN_SPARK=1
     elif [[ "${element}"x == "mariadb"x ]];then
         RUN_MARIADB=1
+    elif [[ "${element}"x == "db2"x ]];then
+        RUN_DB2=1
     else
         echo "Invalid component: ${element}"
         usage
@@ -228,6 +231,18 @@ if [[ "${RUN_ORACLE}" -eq 1 ]]; then
     fi
 fi
 
+if [[ "${RUN_DB2}" -eq 1 ]]; then
+    # db2
+    cp "${ROOT}"/docker-compose/db2/db2.yaml.tpl 
"${ROOT}"/docker-compose/db2/db2.yaml
+    sed -i "s/doris--/${CONTAINER_UID}/g" "${ROOT}"/docker-compose/db2/db2.yaml
+    sudo docker compose -f "${ROOT}"/docker-compose/db2/db2.yaml --env-file 
"${ROOT}"/docker-compose/db2/db2.env down
+    if [[ "${STOP}" -ne 1 ]]; then
+        sudo rm "${ROOT}"/docker-compose/db2/data/* -rf
+        sudo mkdir -p "${ROOT}"/docker-compose/db2/data/
+        sudo docker compose -f "${ROOT}"/docker-compose/db2/db2.yaml 
--env-file "${ROOT}"/docker-compose/db2/db2.env up -d
+    fi
+fi
+
 if [[ "${RUN_SQLSERVER}" -eq 1 ]]; then
     # sqlserver
     cp "${ROOT}"/docker-compose/sqlserver/sqlserver.yaml.tpl 
"${ROOT}"/docker-compose/sqlserver/sqlserver.yaml
diff --git a/docs/en/docs/lakehouse/multi-catalog/jdbc.md 
b/docs/en/docs/lakehouse/multi-catalog/jdbc.md
index f3485abe2be..fa1fa4800cd 100644
--- a/docs/en/docs/lakehouse/multi-catalog/jdbc.md
+++ b/docs/en/docs/lakehouse/multi-catalog/jdbc.md
@@ -840,28 +840,81 @@ CREATE CATALOG jdbc_oceanbase PROPERTIES (
 When Doris connects to OceanBase, it will automatically recognize that 
OceanBase is in MySQL or Oracle mode. Hierarchical correspondence and type 
mapping refer to [MySQL](#mysql) and [Oracle](#oracle)
 :::
 
+### DB2
+
+#### Example
+
+```sql
+CREATE CATALOG `jdbc_db2` PROPERTIES (
+    "user" = "db2inst1",
+    "type" = "jdbc",
+    "password" = "123456",
+    "jdbc_url" = "jdbc:db2://127.0.0.1:50000/doris",
+    "driver_url" = "jcc-11.5.8.0.jar",
+    "driver_class" = "com.ibm.db2.jcc.DB2Driver"
+);
+```
+
+#### Hierarchy Mapping
+
+When mapping DB2, Doris's Database corresponds to a Schema under the specified 
DataBase in DB2 (such as "doris" in the `jdbc_url` parameter in the example). 
The Table under Doris' Database corresponds to the Tables under Schema in DB2. 
That is, the mapping relationship is as follows:
+
+|  Doris   |   DB2    |
+|:--------:|:--------:|
+| Catalog  | DataBase |
+| Database |  Schema  |
+|  Table   |  Table   |
+
+
+#### Type Mapping
+
+| DB2 Type         | Trino Type   | Notes |
+|------------------|--------------|-------|
+| SMALLINT         | SMALLINT     |       |
+| INT              | INT          |       |
+| BIGINT           | BIGINT       |       |
+| DOUBLE           | DOUBLE       |       |
+| DOUBLE PRECISION | DOUBLE       |       |
+| FLOAT            | DOUBLE       |       |
+| REAL             | FLOAT        |       |
+| NUMERIC          | DECIMAL      |       |
+| DECIMAL          | DECIMAL      |       |
+| DECFLOAT         | DECIMAL      |       |
+| DATE             | DATE         |       |
+| TIMESTAMP        | DATETIME     |       |
+| CHAR             | CHAR         |       |
+| CHAR VARYING     | VARCHAR      |       |
+| VARCHAR          | VARCHAR      |       |
+| LONG VARCHAR     | VARCHAR      |       |
+| VARGRAPHIC       | STRING       |       |
+| LONG VARGRAPHIC  | STRING       |       |
+| TIME             | STRING       |       |
+| CLOB             | STRING       |       |
+| OTHER            | UNSUPPORTED  |       |
+
 ## JDBC Drivers
 
 It is recommended to use the following versions of Driver to connect to the 
corresponding database. Other versions of the Driver have not been tested and 
may cause unexpected problems.
 
-|  Source | JDBC Driver Version |
-|:--------:|:--------:|
-| MySQL 5.x  | mysql-connector-java-5.1.47.jar |
-| MySQL 8.x  | mysql-connector-java-8.0.25.jar |
-| PostgreSQL | postgresql-42.5.1.jar |
-| Oracle   | ojdbc8.jar|
-| SQLServer | mssql-jdbc-11.2.3.jre8.jar |
-| Doris | mysql-connector-java-5.1.47.jar / mysql-connector-java-8.0.25.jar |
-| Clickhouse | clickhouse-jdbc-0.4.2-all.jar  |
-| SAP HAHA | ngdbc.jar |
-| Trino/Presto | trino-jdbc-389.jar / presto-jdbc-0.280.jar |
-| OceanBase | oceanbase-client-2.4.2.jar |
+|    Source    |                        JDBC Driver Version                    
    |
+|:------------:|:-----------------------------------------------------------------:|
+| MySQL 5.x    |                  mysql-connector-java-5.1.47.jar              
    |
+|  MySQL 8.x   |                  mysql-connector-java-8.0.25.jar              
    |
+|  PostgreSQL  |                       postgresql-42.5.1.jar                   
    |
+|    Oracle    |                            ojdbc8.jar                         
    |
+|  SQLServer   |                    mssql-jdbc-11.2.3.jre8.jar                 
    |
+|    Doris     | mysql-connector-java-5.1.47.jar / 
mysql-connector-java-8.0.25.jar |
+|  Clickhouse  |                   clickhouse-jdbc-0.4.2-all.jar               
    |
+|   SAP HAHA   |                             ngdbc.jar                         
    |
+| Trino/Presto |            trino-jdbc-389.jar / presto-jdbc-0.280.jar         
    |
+|  OceanBase   |                    oceanbase-client-2.4.2.jar                 
    |
+|     DB2      |                         jcc-11.5.8.0.jar                      
    |
 
 ## FAQ
 
-1. Are there any other databases supported besides MySQL, Oracle, PostgreSQL, 
SQLServer, ClickHouse, SAP HANA, Trino and OceanBase?
+1. In addition to MySQL, Oracle, PostgreSQL, SQLServer, ClickHouse, SAP HANA, 
Trino/Presto, OceanBase, DB2, whether it can support more databases
 
-   Currently, Doris supports MySQL, Oracle, PostgreSQL, SQLServer, ClickHouse, 
SAP HANA, Trino and OceanBase. We are planning to expand this list. 
Technically, any databases that support JDBC access can be connected to Doris 
in the form of JDBC external tables. You are more than welcome to be a Doris 
contributor to expedite this effort.
+   Currently, Doris is only adapted to MySQL, Oracle, PostgreSQL, SQLServer, 
ClickHouse, SAP HANA, Trino/Presto, OceanBase, and DB2. Adaptation work for 
other databases is under planning. In principle, any database that supports 
JDBC access can Can be accessed through JDBC appearance. If you need to access 
other surfaces, you are welcome to modify the code and contribute to Doris.
 
 2. Why does Mojibake occur when Doris tries to read emojis from MySQL external 
tables?
 
diff --git a/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md 
b/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md
index fe308c44487..d78cb3e9cbf 100644
--- a/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md
+++ b/docs/zh-CN/docs/lakehouse/multi-catalog/jdbc.md
@@ -843,28 +843,81 @@ CREATE CATALOG jdbc_oceanbase PROPERTIES (
  Doris 在连接 OceanBase 时,会自动识别 OceanBase 处于 MySQL 或者 Oracle 模式,层级对应和类型映射参考 
[MySQL](#mysql) 与 [Oracle](#oracle)
 :::
 
+### DB2
+
+#### 创建示例
+
+```sql
+CREATE CATALOG `jdbc_db2` PROPERTIES (
+    "user" = "db2inst1",
+    "type" = "jdbc",
+    "password" = "123456",
+    "jdbc_url" = "jdbc:db2://127.0.0.1:50000/doris",
+    "driver_url" = "jcc-11.5.8.0.jar",
+    "driver_class" = "com.ibm.db2.jcc.DB2Driver"
+);
+```
+
+#### 层级映射
+
+映射 DB2 时,Doris 的 Database 对应于 DB2 中指定 DataBase(如示例中 `jdbc_url` 参数中的 
"doris")下的一个 Schema。而 Doris 的 Database 下的 Table 则对应于 DB2 中 Schema 下的 
Tables。即映射关系如下:
+
+|  Doris   |   DB2    |
+|:--------:|:--------:|
+| Catalog  | DataBase |
+| Database |  Schema  |
+|  Table   |  Table   |
+
+
+#### 类型映射
+
+| DB2 Type         | Trino Type   | Notes |
+|------------------|--------------|-------|
+| SMALLINT         | SMALLINT     |       |
+| INT              | INT          |       |
+| BIGINT           | BIGINT       |       |
+| DOUBLE           | DOUBLE       |       |
+| DOUBLE PRECISION | DOUBLE       |       |
+| FLOAT            | DOUBLE       |       |
+| REAL             | FLOAT        |       |
+| NUMERIC          | DECIMAL      |       |
+| DECIMAL          | DECIMAL      |       |
+| DECFLOAT         | DECIMAL      |       |
+| DATE             | DATE         |       |
+| TIMESTAMP        | DATETIME     |       |
+| CHAR             | CHAR         |       |
+| CHAR VARYING     | VARCHAR      |       |
+| VARCHAR          | VARCHAR      |       |
+| LONG VARCHAR     | VARCHAR      |       |
+| VARGRAPHIC       | STRING       |       |
+| LONG VARGRAPHIC  | STRING       |       |
+| TIME             | STRING       |       |
+| CLOB             | STRING       |       |
+| OTHER            | UNSUPPORTED  |       |
+
 ## JDBC Driver 列表
 
 推荐使用以下版本的 Driver 连接对应的数据库。其他版本的 Driver 未经测试,可能导致非预期的问题。
 
-|  Source | JDBC Driver Version |
-|:--------:|:--------:|
-| MySQL 5.x  | mysql-connector-java-5.1.47.jar |
-| MySQL 8.x  | mysql-connector-java-8.0.25.jar |
-| PostgreSQL | postgresql-42.5.1.jar |
-| Oracle   | ojdbc8.jar|
-| SQLServer | mssql-jdbc-11.2.3.jre8.jar |
-| Doris | mysql-connector-java-5.1.47.jar / mysql-connector-java-8.0.25.jar |
-| Clickhouse | clickhouse-jdbc-0.4.2-all.jar  |
-| SAP HAHA | ngdbc.jar |
-| Trino/Presto | trino-jdbc-389.jar / presto-jdbc-0.280.jar |
-| OceanBase | oceanbase-client-2.4.2.jar |
+|    Source    |                        JDBC Driver Version                    
    |
+|:------------:|:-----------------------------------------------------------------:|
+|  MySQL 5.x   |                  mysql-connector-java-5.1.47.jar              
    |
+|  MySQL 8.x   |                  mysql-connector-java-8.0.25.jar              
    |
+|  PostgreSQL  |                       postgresql-42.5.1.jar                   
    |
+|    Oracle    |                            ojdbc8.jar                         
    |
+|  SQLServer   |                    mssql-jdbc-11.2.3.jre8.jar                 
    |
+|    Doris     | mysql-connector-java-5.1.47.jar / 
mysql-connector-java-8.0.25.jar |
+|  Clickhouse  |                   clickhouse-jdbc-0.4.2-all.jar               
    |
+|   SAP HAHA   |                             ngdbc.jar                         
    |
+| Trino/Presto |            trino-jdbc-389.jar / presto-jdbc-0.280.jar         
    |
+|  OceanBase   |                    oceanbase-client-2.4.2.jar                 
    |
+|     DB2      |                         jcc-11.5.8.0.jar                      
    |
 
 ## 常见问题
 
-1. 除了 MySQL,Oracle,PostgreSQL,SQLServer,ClickHouse,SAP 
HANA,Trino/Presto,OceanBase 是否能够支持更多的数据库
+1. 除了 MySQL,Oracle,PostgreSQL,SQLServer,ClickHouse,SAP 
HANA,Trino/Presto,OceanBase,DB2 是否能够支持更多的数据库
 
-    目前Doris只适配了 MySQL,Oracle,PostgreSQL,SQLServer,ClickHouse,SAP 
HANA,Trino/Presto,OceanBase. 
关于其他的数据库的适配工作正在规划之中,原则上来说任何支持JDBC访问的数据库都能通过JDBC外表来访问。如果您有访问其他外表的需求,欢迎修改代码并贡献给Doris。
+    目前Doris只适配了 MySQL,Oracle,PostgreSQL,SQLServer,ClickHouse,SAP 
HANA,Trino/Presto,OceanBase,DB2. 
关于其他的数据库的适配工作正在规划之中,原则上来说任何支持JDBC访问的数据库都能通过JDBC外表来访问。如果您有访问其他外表的需求,欢迎修改代码并贡献给Doris。
 
 2. 读写 MySQL外表的emoji表情出现乱码
 
diff --git 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
index 87f67807826..4569ffab075 100644
--- 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
+++ 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
@@ -517,4 +517,12 @@ public abstract class BaseJdbcExecutor implements 
JdbcExecutor {
                 throw new RuntimeException("Unknown type value: " + dorisType);
         }
     }
+
+    protected String trimSpaces(String str) {
+        int end = str.length() - 1;
+        while (end >= 0 && str.charAt(end) == ' ') {
+            end--;
+        }
+        return str.substring(0, end + 1);
+    }
 }
diff --git 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java
similarity index 58%
copy from 
fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
copy to 
fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java
index bfb941939f5..2c20757ef8f 100644
--- 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
+++ 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java
@@ -23,35 +23,36 @@ import org.apache.doris.common.jni.vec.ColumnValueConverter;
 import org.apache.doris.common.jni.vec.VectorTable;
 
 import com.alibaba.druid.pool.DruidDataSource;
-import org.apache.log4j.Logger;
 
 import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.sql.Clob;
+import java.sql.Date;
 import java.sql.SQLException;
+import java.sql.Timestamp;
 import java.time.LocalDate;
 import java.time.LocalDateTime;
 
-public class OracleJdbcExecutor extends BaseJdbcExecutor {
-    private static final Logger LOG = 
Logger.getLogger(OracleJdbcExecutor.class);
-
-    public OracleJdbcExecutor(byte[] thriftParams) throws Exception {
+public class DB2JdbcExecutor extends BaseJdbcExecutor {
+    public DB2JdbcExecutor(byte[] thriftParams) throws Exception {
         super(thriftParams);
     }
 
     @Override
     protected void setValidationQuery(DruidDataSource ds) {
-        ds.setValidationQuery("SELECT 1 FROM dual");
+        ds.setValidationQuery("select 1 from sysibm.sysdummy1");
     }
 
     @Override
     protected void initializeBlock(int columnCount, String[] 
replaceStringList, int batchSizeNum,
             VectorTable outputTable) {
         for (int i = 0; i < columnCount; ++i) {
-            if (outputTable.getColumnType(i).getType() == Type.LARGEINT) {
-                block.add(new BigDecimal[batchSizeNum]);
-            } else if (outputTable.getColumnType(i).getType() == Type.STRING) {
-                block.add(new Object[batchSizeNum]);
+            if (outputTable.getColumnType(i).getType() == Type.SMALLINT) {
+                block.add(new Integer[batchSizeNum]);
+            } else if (outputTable.getColumnType(i).getType() == Type.DATE
+                    || outputTable.getColumnType(i).getType() == Type.DATEV2) {
+                block.add(new Date[batchSizeNum]);
+            } else if (outputTable.getColumnType(i).getType() == Type.DATETIME
+                    || outputTable.getColumnType(i).getType() == 
Type.DATETIMEV2) {
+                block.add(new Timestamp[batchSizeNum]);
             } else {
                 
block.add(outputTable.getColumn(i).newObjectContainerArray(batchSizeNum));
             }
@@ -61,34 +62,25 @@ public class OracleJdbcExecutor extends BaseJdbcExecutor {
     @Override
     protected Object getColumnValue(int columnIndex, ColumnType type, String[] 
replaceStringList) throws SQLException {
         switch (type.getType()) {
-            case TINYINT:
-                return resultSet.getObject(columnIndex + 1, Byte.class);
-            case SMALLINT:
-                return resultSet.getObject(columnIndex + 1, Short.class);
-            case INT:
-                return resultSet.getObject(columnIndex + 1, Integer.class);
-            case BIGINT:
-                return resultSet.getObject(columnIndex + 1, Long.class);
-            case FLOAT:
-                return resultSet.getObject(columnIndex + 1, Float.class);
-            case DOUBLE:
-                return resultSet.getObject(columnIndex + 1, Double.class);
-            case LARGEINT:
             case DECIMALV2:
             case DECIMAL32:
             case DECIMAL64:
             case DECIMAL128:
                 return resultSet.getObject(columnIndex + 1, BigDecimal.class);
+            case SMALLINT:
+            case INT:
+            case BIGINT:
+            case FLOAT:
+            case DOUBLE:
             case DATE:
             case DATEV2:
-                return resultSet.getObject(columnIndex + 1, LocalDate.class);
             case DATETIME:
             case DATETIMEV2:
-                return resultSet.getObject(columnIndex + 1, 
LocalDateTime.class);
+                return resultSet.getObject(columnIndex + 1);
             case CHAR:
             case VARCHAR:
             case STRING:
-                return resultSet.getObject(columnIndex + 1);
+                return resultSet.getObject(columnIndex + 1, String.class);
             default:
                 throw new IllegalArgumentException("Unsupported column type: " 
+ type.getType());
         }
@@ -97,35 +89,22 @@ public class OracleJdbcExecutor extends BaseJdbcExecutor {
     @Override
     protected ColumnValueConverter getOutputConverter(ColumnType columnType, 
String replaceString) {
         switch (columnType.getType()) {
+            case SMALLINT:
+                return createConverter(
+                        input -> ((Integer) input).shortValue(), Short.class);
+            case DATE:
+            case DATEV2:
+                return createConverter(
+                        input -> ((Date) input).toLocalDate(), 
LocalDate.class);
+            case DATETIME:
+            case DATETIMEV2:
+                return createConverter(
+                        input -> ((Timestamp) input).toLocalDateTime(), 
LocalDateTime.class);
             case CHAR:
                 return createConverter(
                         input -> trimSpaces(input.toString()), String.class);
-            case LARGEINT:
-                return createConverter(
-                        input -> ((BigDecimal) input).toBigInteger(), 
BigInteger.class);
-            case STRING:
-                return createConverter(input -> {
-                    if (input instanceof Clob) {
-                        try {
-                            return ((Clob) input).getSubString(1, (int) 
((Clob) input).length());
-                        } catch (SQLException e) {
-                            LOG.error("Failed to get string from clob", e);
-                            return null;
-                        }
-                    } else {
-                        return input.toString();
-                    }
-                }, String.class);
             default:
                 return null;
         }
     }
-
-    private String trimSpaces(String str) {
-        int end = str.length() - 1;
-        while (end >= 0 && str.charAt(end) == ' ') {
-            end--;
-        }
-        return str.substring(0, end + 1);
-    }
 }
diff --git 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutorFactory.java
 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutorFactory.java
index d67c58bb6b4..21316a6a273 100644
--- 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutorFactory.java
+++ 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutorFactory.java
@@ -26,6 +26,8 @@ public class JdbcExecutorFactory {
                 return "org/apache/doris/jdbc/MySQLJdbcExecutor";
             case ORACLE:
                 return "org/apache/doris/jdbc/OracleJdbcExecutor";
+            case DB2:
+                return "org/apache/doris/jdbc/DB2JdbcExecutor";
             default:
                 return "org/apache/doris/jdbc/DefaultJdbcExecutor";
         }
diff --git 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
index bfb941939f5..9ea902efb12 100644
--- 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
+++ 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
@@ -120,12 +120,4 @@ public class OracleJdbcExecutor extends BaseJdbcExecutor {
                 return null;
         }
     }
-
-    private String trimSpaces(String str) {
-        int end = str.length() - 1;
-        while (end >= 0 && str.charAt(end) == ' ') {
-            end--;
-        }
-        return str.substring(0, end + 1);
-    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcResource.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcResource.java
index 6b19137dff0..ab6dd7d41f3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcResource.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcResource.java
@@ -75,6 +75,7 @@ public class JdbcResource extends Resource {
     public static final String JDBC_TRINO = "jdbc:trino";
     public static final String JDBC_PRESTO = "jdbc:presto";
     public static final String JDBC_OCEANBASE = "jdbc:oceanbase";
+    public static final String JDBC_DB2 = "jdbc:db2";
 
     public static final String NEBULA = "NEBULA";
     public static final String MYSQL = "MYSQL";
@@ -87,6 +88,7 @@ public class JdbcResource extends Resource {
     public static final String PRESTO = "PRESTO";
     public static final String OCEANBASE = "OCEANBASE";
     public static final String OCEANBASE_ORACLE = "OCEANBASE_ORACLE";
+    public static final String DB2 = "DB2";
 
     public static final String JDBC_PROPERTIES_PREFIX = "jdbc.";
     public static final String JDBC_URL = "jdbc_url";
@@ -327,6 +329,8 @@ public class JdbcResource extends Resource {
             return OCEANBASE;
         } else if (url.startsWith(JDBC_NEBULA)) {
             return NEBULA;
+        } else if (url.startsWith(JDBC_DB2)) {
+            return DB2;
         }
         throw new DdlException("Unsupported jdbc database type, please check 
jdbcUrl: " + url);
     }
@@ -416,7 +420,7 @@ public class JdbcResource extends Resource {
     }
 
     private static String getDelimiter(String jdbcUrl, String dbType) {
-        if (dbType.equals(SQLSERVER)) {
+        if (dbType.equals(SQLSERVER) || dbType.equals(DB2)) {
             return ";";
         } else if (jdbcUrl.contains("?")) {
             return "&";
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcTable.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcTable.java
index c93f6661074..4d3ded2540e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcTable.java
@@ -103,6 +103,7 @@ public class JdbcTable extends Table {
         tempMap.put("presto", TOdbcTableType.PRESTO);
         tempMap.put("oceanbase", TOdbcTableType.OCEANBASE);
         tempMap.put("oceanbase_oracle", TOdbcTableType.OCEANBASE_ORACLE);
+        tempMap.put("db2", TOdbcTableType.DB2);
         TABLE_TYPE_MAP = Collections.unmodifiableMap(tempMap);
     }
 
@@ -492,6 +493,7 @@ public class JdbcTable extends Table {
             case SAP_HANA:
                 return formatName(name, "\"", "\"", false, false);
             case ORACLE:
+            case DB2:
                 return formatName(name, "\"", "\"", true, false);
             default:
                 return name;
@@ -512,6 +514,7 @@ public class JdbcTable extends Table {
             case OCEANBASE_ORACLE:
             case ORACLE:
             case SAP_HANA:
+            case DB2:
                 return formatNameWithRemoteName(remoteName, "\"", "\"");
             default:
                 return remoteName;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
index e2f5658ee08..6d68ced3624 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
@@ -84,6 +84,8 @@ public abstract class JdbcClient {
             case JdbcResource.TRINO:
             case JdbcResource.PRESTO:
                 return new JdbcTrinoClient(jdbcClientConfig);
+            case JdbcResource.DB2:
+                return new JdbcDB2Client(jdbcClientConfig);
             default:
                 throw new IllegalArgumentException("Unsupported DB type: " + 
dbType);
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcDB2Client.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcDB2Client.java
new file mode 100644
index 00000000000..5cdcbf1ca4f
--- /dev/null
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcDB2Client.java
@@ -0,0 +1,83 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.datasource.jdbc.client;
+
+import org.apache.doris.catalog.PrimitiveType;
+import org.apache.doris.catalog.ScalarType;
+import org.apache.doris.catalog.Type;
+
+public class JdbcDB2Client extends JdbcClient {
+
+    protected JdbcDB2Client(JdbcClientConfig jdbcClientConfig) {
+        super(jdbcClientConfig);
+    }
+
+    @Override
+    protected String getDatabaseQuery() {
+        return "SELECT schemaname FROM syscat.schemata WHERE DEFINER = CURRENT 
USER;";
+    }
+
+    @Override
+    protected Type jdbcTypeToDoris(JdbcFieldSchema fieldSchema) {
+        String db2Type = fieldSchema.getDataTypeName();
+        switch (db2Type) {
+            case "SMALLINT":
+                return Type.SMALLINT;
+            case "INTEGER":
+                return Type.INT;
+            case "BIGINT":
+                return Type.BIGINT;
+            case "DECFLOAT":
+            case "DECIMAL": {
+                int precision = fieldSchema.getColumnSize();
+                int scale = fieldSchema.getDecimalDigits();
+                return createDecimalOrStringType(precision, scale);
+            }
+            case "DOUBLE":
+                return Type.DOUBLE;
+            case "REAL":
+                return Type.FLOAT;
+            case "CHAR":
+                ScalarType charType = 
ScalarType.createType(PrimitiveType.CHAR);
+                charType.setLength(fieldSchema.columnSize);
+                return charType;
+            case "VARCHAR":
+            case "LONG VARCHAR":
+                ScalarType varcharType = 
ScalarType.createType(PrimitiveType.VARCHAR);
+                varcharType.setLength(fieldSchema.columnSize);
+                return varcharType;
+            case "DATE":
+                return ScalarType.createDateV2Type();
+            case "TIMESTAMP": {
+                // postgres can support microsecond
+                int scale = fieldSchema.getDecimalDigits();
+                if (scale > 6) {
+                    scale = 6;
+                }
+                return ScalarType.createDatetimeV2Type(scale);
+            }
+            case "TIME":
+            case "CLOB":
+            case "VARGRAPHIC":
+            case "LONG VARGRAPHIC":
+                return ScalarType.createStringType();
+            default:
+                return Type.UNSUPPORTED;
+        }
+    }
+}
diff --git a/gensrc/thrift/Types.thrift b/gensrc/thrift/Types.thrift
index e3b48c66b87..11ec1093da0 100644
--- a/gensrc/thrift/Types.thrift
+++ b/gensrc/thrift/Types.thrift
@@ -398,7 +398,8 @@ enum TOdbcTableType {
     PRESTO,
     OCEANBASE,
     OCEANBASE_ORACLE,
-    NEBULA
+    NEBULA,
+    DB2
 }
 
 struct TJdbcExecutorCtorParams {
diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index 6d17bd032fd..c6764a2ce86 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -113,6 +113,7 @@ sqlserver_2022_port=1433
 clickhouse_22_port=8123
 doris_port=9030
 mariadb_10_port=3326
+db2_11_port=50000
 
 // hive catalog test config
 // To enable hive/paimon test, you need first start hive container.
diff --git 
a/regression-test/data/external_table_p0/jdbc/test_db2_jdbc_catalog.out 
b/regression-test/data/external_table_p0/jdbc/test_db2_jdbc_catalog.out
new file mode 100644
index 00000000000..b8dbcf1bfad
--- /dev/null
+++ b/regression-test/data/external_table_p0/jdbc/test_db2_jdbc_catalog.out
@@ -0,0 +1,43 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+internal
+
+-- !sql --
+db2_jdbc_catalog
+
+-- !sample_table_desc --
+BIGINT_COLUMN  BIGINT  Yes     true    \N      
+CHAR_COLUMN    CHARACTER       Yes     true    \N      
+CHAR_VARYING_COLUMN    VARCHAR(255)    Yes     true    \N      
+CLOB_COLUMN    TEXT    Yes     true    \N      
+DATE_COLUMN    DATE    Yes     true    \N      
+DECFLOAT_COLUMN        DECIMAL(34, 0)  Yes     true    \N      
+DECIMAL_COLUMN DECIMAL(31, 10) Yes     true    \N      
+DOUBLE_COLUMN  DOUBLE  Yes     true    \N      
+DOUBLE_PRECISION_COLUMN        DOUBLE  Yes     true    \N      
+FLOAT_COLUMN   DOUBLE  Yes     true    \N      
+ID_COLUMN      INT     No      true    \N      
+INT_COLUMN     INT     Yes     true    \N      
+LONG_VARCHARPHIC_COLUMN        TEXT    Yes     true    \N      
+LONG_VARCHAR_COLUMN    VARCHAR(32700)  Yes     true    \N      
+NUMERIC_COLUMN DECIMAL(5, 0)   Yes     true    \N      
+REAL_COLUMN    FLOAT   Yes     true    \N      
+SMALLINT_COLUMN        SMALLINT        Yes     true    \N      
+TIMESTAMP_COLUMN       DATETIME(6)     Yes     true    \N      
+TIME_COLUMN    TEXT    Yes     true    \N      
+VARCHARPHIC_COLUMN     TEXT    Yes     true    \N      
+VARCHAR_COLUMN VARCHAR(255)    Yes     true    \N      
+
+-- !sample_table_select --
+\N     \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N
+123    1234567890.1234567890   1       12345.6789      12345.679       
1.2345678901234567E9    1.2345678901234567E9    123     12345   123456789012345 
Varchar text    Varcharphic text        Long varchar text       Long 
varcharphic text   Char varying text       Char text       2024-01-24      
2024-01-24T12:34:56.789 12:34:56        Sample CLOB text
+123    1234567890.1234567890   1       12345.6789      12345.679       
1.2345678901234567E9    1.2345678901234567E9    123     12345   123456789012345 
中文一     中文二     中文三     中文四     中文五     中文六     2024-01-24      
2024-01-24T12:34:56.789 12:34:56        中文七
+
+-- !sample_table_insert --
+\N     \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N
+\N     \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N
+123    1234567890.1234567890   1       12345.6789      12345.679       
1.2345678901234567E9    1.2345678901234567E9    123     12345   123456789012345 
Varchar text    Varcharphic text        Long varchar text       Long 
varcharphic text   Char varying text       Char text       2024-01-24      
2024-01-24T12:34:56.789 12:34:56        Sample CLOB text
+123    1234567890.1234567890   1       12345.6789      12345.679       
1.2345678901234567E9    1.2345678901234567E9    123     12345   123456789012345 
Varchar text    Varcharphic text        Long varchar text       Long 
varcharphic text   Char varying text       Char text       2024-01-24      
2024-01-24T12:34:56.789 12:34:56        Sample CLOB text
+123    1234567890.1234567890   1       12345.6789      12345.679       
1.2345678901234567E9    1.2345678901234567E9    123     12345   123456789012345 
中文一     中文二     中文三     中文四     中文五     中文六     2024-01-24      
2024-01-24T12:34:56.789 12:34:56        中文七
+123    1234567890.1234567890   1       12345.6789      12345.679       
1.2345678901234567E9    1.2345678901234567E9    123     12345   123456789012345 
中文一     中文二     中文三     中文四     中文五     中文六     2024-01-24      
2024-01-24T12:34:56.789 12:34:56        中文七
+
diff --git a/regression-test/framework/pom.xml 
b/regression-test/framework/pom.xml
index fae65b4c578..e16b178334d 100644
--- a/regression-test/framework/pom.xml
+++ b/regression-test/framework/pom.xml
@@ -312,5 +312,10 @@ under the License.
             <artifactId>flight-sql-jdbc-driver</artifactId>
             <version>${arrow.version}</version>
         </dependency>
+        <dependency>
+            <groupId>com.ibm.db2</groupId>
+            <artifactId>jcc</artifactId>
+            <version>11.5.8.0</version>
+        </dependency>
     </dependencies>
 </project>
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index a83722be2ea..109b1c2a76c 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -750,6 +750,15 @@ class Suite implements GroovyInterceptable {
         return result
     }
 
+    List<List<Object>> db2_docker(String sqlStr, boolean isOrder = false){
+        String cleanedSqlStr = sqlStr.replaceAll("\\s*;\\s*\$", "")
+        def (result, meta) = 
JdbcUtils.executeToList(context.getDB2DockerConnection(), cleanedSqlStr)
+        if (isOrder) {
+            result = DataUtils.sortByToString(result)
+        }
+        return result
+    }
+
     void quickRunTest(String tag, Object arg, boolean isOrder = false) {
         if (context.config.generateOutputFile || 
context.config.forceGenerateOutputFile) {
             Tuple2<List<List<Object>>, ResultSetMetaData> tupleResult = null
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/SuiteContext.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/SuiteContext.groovy
index cdbdf2df2d3..d61f586b414 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/SuiteContext.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/SuiteContext.groovy
@@ -46,6 +46,7 @@ class SuiteContext implements Closeable {
     public final ThreadLocal<ConnectionInfo> threadArrowFlightSqlConn = new 
ThreadLocal<>()
     public final ThreadLocal<Connection> threadHiveDockerConn = new 
ThreadLocal<>()
     public final ThreadLocal<Connection> threadHiveRemoteConn = new 
ThreadLocal<>()
+    public final ThreadLocal<Connection> threadDB2DockerConn = new 
ThreadLocal<>()
     private final ThreadLocal<Syncer> syncer = new ThreadLocal<>()
     public final Config config
     public final File dataPath
@@ -180,6 +181,15 @@ class SuiteContext implements Closeable {
         return threadConn
     }
 
+    Connection getDB2DockerConnection(){
+        def threadConn = threadDB2DockerConn.get()
+        if (threadConn == null) {
+            threadConn = getConnectionByDB2DockerConfig()
+            threadDB2DockerConn.set(threadConn)
+        }
+        return threadConn
+    }
+
     private String getJdbcNetInfo() {
         String subJdbc = 
config.jdbcUrl.substring(config.jdbcUrl.indexOf("://") + 3)
         return subJdbc.substring(0, subJdbc.indexOf("/"))
@@ -233,6 +243,16 @@ class SuiteContext implements Closeable {
         return DriverManager.getConnection(hiveJdbcUrl, hiveJdbcUser, 
hiveJdbcPassword)
     }
 
+    Connection getConnectionByDB2DockerConfig() {
+        Class.forName("com.ibm.db2.jcc.DB2Driver");
+        String db2Host = config.otherConfigs.get("externalEnvIp")
+        String db2Port = config.otherConfigs.get("db2_11_port")
+        String db2JdbcUrl = "jdbc:db2://${db2Host}:${db2Port}/doris"
+        String db2JdbcUser =  "db2inst1"
+        String db2JdbcPassword = "123456"
+        return DriverManager.getConnection(db2JdbcUrl, db2JdbcUser, 
db2JdbcPassword)
+    }
+
     Connection getTargetConnection(Suite suite) {
         def context = getSyncer(suite).context
         if (context.targetConnection == null) {
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy 
b/regression-test/pipeline/external/conf/regression-conf.groovy
index 99a8666fbc9..3c9071886db 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -80,6 +80,7 @@ pg_14_port=7121
 oracle_11_port=1521
 sqlserver_2022_port=1433
 clickhouse_22_port=8123
+db2_11_port=50000
 
 // hive catalog test config
 // To enable jdbc test, you need first start hive container.
diff --git 
a/regression-test/suites/external_table_p0/jdbc/test_db2_jdbc_catalog.groovy 
b/regression-test/suites/external_table_p0/jdbc/test_db2_jdbc_catalog.groovy
new file mode 100644
index 00000000000..68d3066c471
--- /dev/null
+++ b/regression-test/suites/external_table_p0/jdbc/test_db2_jdbc_catalog.groovy
@@ -0,0 +1,253 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import java.net.URLClassLoader
+import java.sql.Connection
+import java.sql.DriverManager
+import java.net.URL
+
+suite("test_db2_jdbc_catalog", 
"p0,external,db2,external_docker,external_docker_db2") {
+    qt_sql """select current_catalog()"""
+
+    String enabled = context.config.otherConfigs.get("enableJdbcTest")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    String s3_endpoint = getS3Endpoint()
+    String bucket = getS3BucketName()
+    String driver_url = 
"https://${bucket}.${s3_endpoint}/regression/jdbc_driver/jcc-11.5.8.0.jar";
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        String catalog_name = "db2_jdbc_catalog";
+        String internal_db_name = "regression_test_jdbc_catalog_p0";
+        String ex_db_name = "DORIS_TEST";
+        String db2_port = context.config.otherConfigs.get("db2_11_port");
+        String sample_table = "SAMPLE_TABLE";
+
+        try {
+            db2_docker "CREATE SCHEMA doris_test;"
+            db2_docker """CREATE TABLE doris_test.sample_table (
+                id_column INT GENERATED ALWAYS AS IDENTITY,
+                numeric_column NUMERIC,
+                decimal_column DECIMAL(31, 10),
+                decfloat_column DECFLOAT,
+                float_column FLOAT,
+                real_column REAL,
+                double_column DOUBLE,
+                double_precision_column DOUBLE PRECISION,
+                smallint_column SMALLINT,
+                int_column INT,
+                bigint_column BIGINT,
+                varchar_column VARCHAR(255),
+                varcharphic_column VARGRAPHIC(50),
+                long_varchar_column LONG VARCHAR,
+                long_varcharphic_column LONG VARGRAPHIC,
+                char_varying_column CHAR VARYING(255),
+                char_column CHAR(255),
+                date_column DATE,
+                timestamp_column TIMESTAMP,
+                time_column TIME,
+                clob_column CLOB
+            );"""
+
+            db2_docker """INSERT INTO doris_test.sample_table (
+                numeric_column,
+                decimal_column,
+                decfloat_column,
+                float_column,
+                real_column,
+                double_column,
+                double_precision_column,
+                smallint_column,
+                int_column,
+                bigint_column,
+                varchar_column,
+                varcharphic_column,
+                long_varchar_column,
+                long_varcharphic_column,
+                char_varying_column,
+                char_column,
+                date_column,
+                timestamp_column,
+                time_column,
+                clob_column
+            ) VALUES (
+                123,
+                1234567890.1234567890,
+                1.234567890,
+                12345.6789,
+                12345.6789,
+                1234567890.1234567890,
+                1234567890.1234567890,
+                123,
+                12345,
+                123456789012345,
+                'Varchar text',
+                'Varcharphic text',
+                'Long varchar text',
+                'Long varcharphic text',
+                'Char varying text',
+                'Char text',
+                '2024-01-24',
+                '2024-01-24-12.34.56.789000',
+                '12:34:56',
+                'Sample CLOB text'
+            );"""
+
+            db2_docker """INSERT INTO doris_test.sample_table (
+                numeric_column,
+                decimal_column,
+                decfloat_column,
+                float_column,
+                real_column,
+                double_column,
+                double_precision_column,
+                smallint_column,
+                int_column,
+                bigint_column,
+                varchar_column,
+                varcharphic_column,
+                long_varchar_column,
+                long_varcharphic_column,
+                char_varying_column,
+                char_column,
+                date_column,
+                timestamp_column,
+                time_column,
+                clob_column
+            ) VALUES (
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null,
+                null
+            );"""
+
+            db2_docker """INSERT INTO doris_test.sample_table (
+                numeric_column,
+                decimal_column,
+                decfloat_column,
+                float_column,
+                real_column,
+                double_column,
+                double_precision_column,
+                smallint_column,
+                int_column,
+                bigint_column,
+                varchar_column,
+                varcharphic_column,
+                long_varchar_column,
+                long_varcharphic_column,
+                char_varying_column,
+                char_column,
+                date_column,
+                timestamp_column,
+                time_column,
+                clob_column
+            ) VALUES (
+                123,
+                1234567890.1234567890,
+                1.234567890,
+                12345.6789,
+                12345.6789,
+                1234567890.1234567890,
+                1234567890.1234567890,
+                123,
+                12345,
+                123456789012345,
+                '中文一',
+                '中文二',
+                '中文三',
+                '中文四',
+                '中文五',
+                '中文六',
+                '2024-01-24',
+                '2024-01-24-12.34.56.789000',
+                '12:34:56',
+                '中文七'
+            );"""
+
+            sql """create database if not exists ${internal_db_name}; """
+
+            sql """drop catalog if exists ${catalog_name} """
+
+            sql """create catalog if not exists ${catalog_name} properties(
+                "type"="jdbc",
+                "user"="db2inst1",
+                "password"="123456",
+                "jdbc_url" = "jdbc:db2://${externalEnvIp}:${db2_port}/doris",
+                "driver_url" = "${driver_url}",
+                "driver_class" = "com.ibm.db2.jcc.DB2Driver"
+            );"""
+
+            sql """switch ${catalog_name}"""
+            qt_sql """select current_catalog()"""
+            sql """ use ${ex_db_name}"""
+
+            order_qt_sample_table_desc """ desc ${sample_table}; """
+
+            order_qt_sample_table_select  """ select * except(ID_COLUMN) from 
${sample_table} order by 1; """
+
+            sql """INSERT INTO ${sample_table} (
+                       numeric_column,
+                       decimal_column,
+                       decfloat_column,
+                       float_column,
+                       real_column,
+                       double_column,
+                       double_precision_column,
+                       smallint_column,
+                       int_column,
+                       bigint_column,
+                       varchar_column,
+                       varcharphic_column,
+                       long_varchar_column,
+                       long_varcharphic_column,
+                       char_varying_column,
+                       char_column,
+                       date_column,
+                       timestamp_column,
+                       time_column,
+                       clob_column
+                   )
+                   select * except(ID_COLUMN) from ${sample_table};
+            """
+
+            order_qt_sample_table_insert  """ select * except(ID_COLUMN) from 
${sample_table} order by 1; """
+
+            sql """ drop catalog if exists ${catalog_name} """
+
+            db2_docker "DROP TABLE IF EXISTS doris_test.sample_table;"
+            db2_docker "DROP SCHEMA doris_test restrict;"
+
+        } catch (Exception e) {
+            e.printStackTrace()
+        }
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to