This is an automated email from the ASF dual-hosted git repository.

zykkk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new a6ff87f32c [docker](trino) add Trino docker compose and hive catalog 
(#21086)
a6ff87f32c is described below

commit a6ff87f32c4bf08d8c12078b2f157a0b72a3cc61
Author: bingquanzhao <bingquan_z...@icloud.com>
AuthorDate: Wed Jun 28 11:04:41 2023 +0800

    [docker](trino) add Trino docker compose and hive catalog (#21086)
---
 .../docker-compose/trino/gen_env.sh.tpl            |  39 ++++
 .../docker-compose/trino/hive.properties.tpl       |  19 ++
 .../trino/scripts/create_trino_table.sql           | 222 +++++++++++++++++++++
 .../docker-compose/trino/trino_hive.env.tpl        |  53 +++++
 .../docker-compose/trino/trino_hive.yaml.tpl       | 141 +++++++++++++
 docker/thirdparties/run-thirdparties-docker.sh     |  87 +++++++-
 .../developer-guide/regression-testing.md          |  16 +-
 7 files changed, 573 insertions(+), 4 deletions(-)

diff --git a/docker/thirdparties/docker-compose/trino/gen_env.sh.tpl 
b/docker/thirdparties/docker-compose/trino/gen_env.sh.tpl
new file mode 100644
index 0000000000..dc1357540a
--- /dev/null
+++ b/docker/thirdparties/docker-compose/trino/gen_env.sh.tpl
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+####################################################################
+# This script will generate hadoop-hive.env from hadoop-hive.env.tpl
+####################################################################
+
+set -eo pipefail
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+FS_PORT=8120
+HMS_PORT=9183
+
+# Need to set hostname of container to same as host machine's.
+# Otherwise, the doris process can not connect to namenode directly.
+HOST_NAME="doris--"
+
+{
+    echo "FS_PORT=${FS_PORT}"
+    echo "HMS_PORT=${HMS_PORT}"
+    echo "CORE_CONF_fs_defaultFS=hdfs://doris--namenode:${FS_PORT}"
+    echo "HOST_NAME=${HOST_NAME}"
+} >>"${ROOT}"/trino_hive.env
diff --git a/docker/thirdparties/docker-compose/trino/hive.properties.tpl 
b/docker/thirdparties/docker-compose/trino/hive.properties.tpl
new file mode 100644
index 0000000000..4f11682864
--- /dev/null
+++ b/docker/thirdparties/docker-compose/trino/hive.properties.tpl
@@ -0,0 +1,19 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+connector.name=hive
+hive.metastore.uri=thrift://metastore_ip:9083
diff --git 
a/docker/thirdparties/docker-compose/trino/scripts/create_trino_table.sql 
b/docker/thirdparties/docker-compose/trino/scripts/create_trino_table.sql
new file mode 100644
index 0000000000..ea9749f18a
--- /dev/null
+++ b/docker/thirdparties/docker-compose/trino/scripts/create_trino_table.sql
@@ -0,0 +1,222 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--   http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied.  See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+create schema hive.doris_test;
+create table hive.doris_test.orc_basic_data_type
+(
+    T_BOOLEAN   BOOLEAN,
+    T_TINYINT   TINYINT,
+    T_SMALLINT  SMALLINT,
+    T_INTEGER   INTEGER,
+    T_BIGINT    BIGINT,
+    T_REAL      REAL,
+    T_DOUBLE    DOUBLE,
+    T_DECIMAL   DECIMAL(38,12),
+    T_CHAR      CHAR,
+    T_VARCHAR   VARCHAR,
+    T_DATE      DATE,
+    T_TIMESTAMP TIMESTAMP
+) WITH (format = 'ORC');
+
+create table hive.doris_test.parquet_basic_data_type
+(
+    T_BOOLEAN   BOOLEAN,
+    T_TINYINT   TINYINT,
+    T_SMALLINT  SMALLINT,
+    T_INTEGER   INTEGER,
+    T_BIGINT    BIGINT,
+    T_REAL      REAL,
+    T_DOUBLE    DOUBLE,
+    T_DECIMAL   DECIMAL(38,12),
+    T_CHAR      CHAR,
+    T_VARCHAR   VARCHAR,
+    T_DATE      DATE,
+    T_TIMESTAMP TIMESTAMP
+) WITH (format = 'PARQUET');
+
+
+insert into hive.doris_test.orc_basic_data_type
+values (true, tinyint '1', smallint '1', integer '126', bigint '123456789', 
real '12.34', double '12.3456', decimal
+        '12.3456789', char 'A', varchar 'Beijing,Shanghai', date '2023-05-23', 
timestamp '2023-05-24 12:00:00.123'),
+       (false, tinyint '1', smallint '1', integer '126', bigint 
'1234567890123456', real '12.34', double '12.3456', decimal
+        '12.345678901', char 'A', varchar 'Beijing,Shanghai', date 
'2023-05-24', timestamp '2023-05-24 13:00:00.123'),
+       (false, tinyint '1', smallint '1', integer '126', bigint 
'123456789012345678', real '12', double '10', decimal
+        '12.3456789012', char 'A', varchar 'Beijing,Shanghai', date 
'2023-05-25', timestamp '2023-05-24 13:00:00.123'),
+       (null, null, null, null, null, null, null, null, null, null, null, 
null);
+
+insert into hive.doris_test.parquet_basic_data_type
+select *
+from hive.doris_test.orc_basic_data_type;
+
+
+CREATE TABLE hive.doris_test.orc_array_data_type
+(
+    t_int_array       array(integer),
+    t_tinyint_array   array(tinyint),
+    t_smallint_array  array(smallint),
+    t_bigint_array    array(bigint),
+    t_real_array      array(real),
+    t_double_array    array(double),
+    t_string_array    array(varchar),
+    t_boolean_array   array(boolean),
+    t_timestamp_array array(timestamp (3)),
+    t_date_array      array(date),
+    t_decimal_array   array(decimal (38, 12))
+)
+    WITH (
+        format = 'ORC'
+        );
+
+CREATE TABLE hive.doris_test.parquet_array_data_type
+(
+    t_int_array       array( integer),
+    t_tinyint_array   array(tinyint),
+    t_smallint_array  array( smallint),
+    t_bigint_array    array(bigint),
+    t_real_array      array( real),
+    t_double_array    array( double),
+    t_string_array    array( varchar),
+    t_boolean_array   array(boolean),
+    t_timestamp_array array( timestamp (3)),
+    t_date_array      array( date),
+    t_decimal_array   array( decimal (38, 12))
+)
+    WITH (
+        format = 'PARQUET'
+        );
+
+insert into hive.doris_test.orc_array_data_type
+values 
(ARRAY[1,2,3,4,5,6,7],ARRAY[1,2,3,4,5,6,7],ARRAY[1,2,3,4,5,6,7],ARRAY[1234567890123,12345678901234],
+        
ARRAY[45.123,123.45,11.0],ARRAY[45.12344,123.4544,11.0],ARRAY['TEST','TEST#12123123'],ARRAY[TRUE,FALSE,TRUE,FALSE],
+        ARRAY[TIMESTAMP '2023-05-24 13:00:00.123',TIMESTAMP '2023-05-24 
14:00:00.123'],
+        ARRAY[DATE '2023-05-24',DATE '2023-05-26'],
+        ARRAY[DECIMAL '10001.11122233344']);
+
+insert into hive.doris_test.parquet_array_data_type select * from 
hive.doris_test.orc_array_data_type;
+
+
+create table hive.doris_test.orc_string_complex
+(
+    t_string_array  array(varchar),
+    t_string_map    map(varchar,varchar),
+    t_string_struct row(f_string varchar,f_int varchar)
+)WITH (
+     FORMAT = 'ORC'
+     );
+
+create table hive.doris_test.parquet_string_complex
+(
+    t_string_array  array(varchar),
+    t_string_map    map(varchar,varchar),
+    t_string_struct row(f_string varchar,f_int varchar)
+)WITH (
+     FORMAT = 'PARQUET'
+     );
+
+insert into hive.doris_test.orc_string_complex
+values (array['1', '2', '3', '北京', 'beijing'],
+        map(array['1', '2', '3'], array['1', 'beijing', '北京']),
+        row('beijing', '1'));
+
+insert into hive.doris_test.parquet_string_complex
+select *
+from hive.doris_test.orc_string_complex;
+
+CREATE TABLE hive.doris_test.orc_supplier_partitioned
+(
+    suppkey   bigint,
+    name      varchar(25),
+    address   varchar(40),
+    phone     varchar(15),
+    acctbal   double,
+    comment   varchar(101),
+    nationkey bigint
+)
+    WITH (
+        format = 'ORC',
+        partitioned_by = ARRAY['nationkey']
+        );
+
+CREATE TABLE hive.doris_test.parquet_supplier_partitioned
+(
+    suppkey   bigint,
+    name      varchar(25),
+    address   varchar(40),
+    phone     varchar(15),
+    acctbal   double,
+    comment   varchar(101),
+    nationkey bigint
+)
+    WITH (
+        format = 'PARQUET',
+        partitioned_by = ARRAY['nationkey']
+        );
+
+insert into hive.doris_test.orc_supplier_partitioned
+select suppkey, name, address, phone, acctbal, comment, nationkey
+from tpch.sf100.supplier;
+
+insert into hive.doris_test.parquet_supplier_partitioned
+select *
+from hive.doris_test.orc_supplier_partitioned;
+
+-- partition and bucket
+CREATE TABLE hive.doris_test.orc_supplier_partitioned_bucketed
+(
+    suppkey   bigint,
+    name      varchar(25),
+    address   varchar(40),
+    phone     varchar(15),
+    acctbal   double,
+    comment   varchar(101),
+    nationkey bigint
+)
+    WITH (
+        format = 'ORC',
+        partitioned_by = ARRAY['nationkey'],
+        bucketed_by = ARRAY['suppkey'],
+        bucket_count = 10
+        );
+
+CREATE TABLE hive.doris_test.parquet_supplier_partitioned_bucketed
+(
+    suppkey   bigint,
+    name      varchar(25),
+    address   varchar(40),
+    phone     varchar(15),
+    acctbal   double,
+    comment   varchar(101),
+    nationkey bigint
+)
+    WITH (
+        format = 'PARQUET',
+        partitioned_by = ARRAY['nationkey'],
+        bucketed_by = ARRAY['suppkey'],
+        bucket_count = 10
+        );
+
+insert into hive.doris_test.orc_supplier_partitioned_bucketed
+select suppkey, name, address, phone, acctbal, comment, nationkey
+from tpch.sf100.supplier;
+
+insert into hive.doris_test.parquet_supplier_partitioned_bucketed
+select *
+from hive.doris_test.orc_supplier_partitioned_bucketed;
+
+
+
+
diff --git a/docker/thirdparties/docker-compose/trino/trino_hive.env.tpl 
b/docker/thirdparties/docker-compose/trino/trino_hive.env.tpl
new file mode 100644
index 0000000000..0f2ce0a443
--- /dev/null
+++ b/docker/thirdparties/docker-compose/trino/trino_hive.env.tpl
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+DOCKER_TRINO_EXTERNAL_PORT=8080
+
+HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:postgresql://doris--hive-metastore-postgresql:5432/metastore
+HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=org.postgresql.Driver
+HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=hive
+HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=hive
+HIVE_SITE_CONF_datanucleus_autoCreateSchema=false
+HIVE_SITE_CONF_hive_metastore_uris=thrift://doris--hive-metastore:9083
+HDFS_CONF_dfs_namenode_datanode_registration_ip___hostname___check=false
+HIVE_SITE_CONF_hive_server2_thrift_bind_host=0.0.0.0
+HIVE_SITE_CONF_hive_server2_thrift_port=10000
+
+CORE_CONF_hadoop_http_staticuser_user=root
+CORE_CONF_hadoop_proxyuser_hue_hosts=*
+CORE_CONF_hadoop_proxyuser_hue_groups=*
+CORE_CONF_hadoop_proxyuser_hive_hosts=*
+
+HDFS_CONF_dfs_webhdfs_enabled=true
+HDFS_CONF_dfs_permissions_enabled=false
+
+YARN_CONF_yarn_log___aggregation___enable=true
+YARN_CONF_yarn_resourcemanager_recovery_enabled=true
+YARN_CONF_yarn_resourcemanager_store_class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore
+YARN_CONF_yarn_resourcemanager_fs_state___store_uri=/rmstate
+YARN_CONF_yarn_nodemanager_remote___app___log___dir=/app-logs
+YARN_CONF_yarn_log_server_url=http://historyserver:8188/applicationhistory/logs/
+YARN_CONF_yarn_timeline___service_enabled=true
+YARN_CONF_yarn_timeline___service_generic___application___history_enabled=true
+YARN_CONF_yarn_resourcemanager_system___metrics___publisher_enabled=true
+YARN_CONF_yarn_resourcemanager_hostname=resourcemanager
+YARN_CONF_yarn_timeline___service_hostname=historyserver
+YARN_CONF_yarn_resourcemanager_address=resourcemanager:8032
+YARN_CONF_yarn_resourcemanager_scheduler_address=resourcemanager:8030
+YARN_CONF_yarn_resourcemanager_resource__tracker_address=resourcemanager:8031
+
diff --git a/docker/thirdparties/docker-compose/trino/trino_hive.yaml.tpl 
b/docker/thirdparties/docker-compose/trino/trino_hive.yaml.tpl
new file mode 100644
index 0000000000..f034a0c7bd
--- /dev/null
+++ b/docker/thirdparties/docker-compose/trino/trino_hive.yaml.tpl
@@ -0,0 +1,141 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+version: "3.8"
+
+networks:
+  doris--network:
+    driver: bridge
+
+services:
+
+  doris--trino:
+    image: trinodb/trino:418
+    hostname: doris--trino
+    container_name: doris--trino
+    env_file:
+      - ./trino_hive.env
+    ports:
+      - "${DOCKER_TRINO_EXTERNAL_PORT}:8080"
+    volumes:
+          - ./scripts:/scripts
+    healthcheck:
+      test: [ "CMD", "curl", "-f", "http://localhost:8080/"; ]
+      interval: 5s
+      timeout: 120s
+      retries: 120
+    networks:
+      - doris--network
+
+  doris--namenode:
+    image: bde2020/hadoop-namenode:2.0.0-hadoop2.7.4-java8
+    environment:
+      - CLUSTER_NAME=test
+    env_file:
+      - ./trino_hive.env
+    hostname: doris--namenode
+    container_name: doris--namenode
+    expose:
+      - "50070"
+      - "8020"
+      - "9000"
+      - "${FS_PORT}"
+    ports:
+      - "${FS_PORT}:${FS_PORT}"
+    healthcheck:
+      test: [ "CMD", "curl", "http://localhost:50070/"; ]
+      interval: 5s
+      timeout: 120s
+      retries: 120
+    networks:
+      - doris--network
+
+  doris--datanode:
+    image: bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8
+    env_file:
+      - ./trino_hive.env
+    environment:
+      SERVICE_PRECONDITION: "doris--namenode:50070"
+    hostname: doris--datanode
+    container_name: doris--datanode
+    expose:
+      - "50075"
+    healthcheck:
+      test: [ "CMD", "curl", "http://localhost:50075"; ]
+      interval: 5s
+      timeout: 60s
+      retries: 120
+    networks:
+      - doris--network
+
+  doris--hive-server:
+    image: bde2020/hive:2.3.2-postgresql-metastore
+    env_file:
+      - ./trino_hive.env
+    environment:
+      HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: 
"jdbc:postgresql://doris--hive-metastore-postgresql:5432/metastore"
+      SERVICE_PRECONDITION: "doris--hive-metastore:9083"
+    hostname: doris--hive-server
+    container_name: doris--hive-server
+    expose:
+      - "10000"
+    depends_on:
+      - doris--datanode
+      - doris--namenode
+    healthcheck:
+      test: beeline -u "jdbc:hive2://127.0.0.1:10000/default" -n health_check 
-e "show databases;"
+      interval: 10s
+      timeout: 120s
+      retries: 120
+    networks:
+      - doris--network
+
+
+  doris--hive-metastore:
+    image: bde2020/hive:2.3.2-postgresql-metastore
+    env_file:
+      - ./trino_hive.env
+    command: /opt/hive/bin/hive --service metastore
+    environment:
+      SERVICE_PRECONDITION: "doris--namenode:50070 doris--datanode:50075 
doris--hive-metastore-postgresql:5432"
+    hostname: doris--hive-metastore
+    container_name: doris--hive-metastore
+    expose:
+      - "9083"
+    ports:
+      - "${HMS_PORT}:9083"
+    volumes:
+      - ./scripts:/mnt/scripts
+    depends_on:
+      - doris--hive-metastore-postgresql
+    networks:
+      - doris--network
+
+  doris--hive-metastore-postgresql:
+    image: bde2020/hive-metastore-postgresql:2.3.0
+    restart: always
+    hostname: doris--hive-metastore-postgresql
+    container_name: doris--hive-metastore-postgresql
+    expose:
+      - "5432"
+    healthcheck:
+      test: ["CMD-SHELL", "pg_isready -U postgres"]
+      interval: 5s
+      timeout: 60s
+      retries: 120
+    networks:
+      - doris--network
diff --git a/docker/thirdparties/run-thirdparties-docker.sh 
b/docker/thirdparties/run-thirdparties-docker.sh
index 283ed7b35b..5d0821da18 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -37,7 +37,7 @@ Usage: $0 <options>
      --stop             stop the specified components
 
   All valid components:
-    mysql,pg,oracle,sqlserver,clickhouse,es,hive,iceberg,hudi
+    mysql,pg,oracle,sqlserver,clickhouse,es,hive,iceberg,hudi,trino
   "
     exit 1
 }
@@ -60,7 +60,7 @@ STOP=0
 
 if [[ "$#" == 1 ]]; then
     # default
-    COMPONENTS="mysql,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi"
+    COMPONENTS="mysql,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino"
 else
     while true; do
         case "$1" in
@@ -92,7 +92,7 @@ else
     done
     if [[ "${COMPONENTS}"x == ""x ]]; then
         if [[ "${STOP}" -eq 1 ]]; then
-            COMPONENTS="mysql,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi"
+            
COMPONENTS="mysql,pg,oracle,sqlserver,clickhouse,hive,iceberg,hudi,trino"
         fi
     fi
 fi
@@ -129,6 +129,8 @@ RUN_HIVE=0
 RUN_ES=0
 RUN_ICEBERG=0
 RUN_HUDI=0
+RUN_TRINO=0
+
 for element in "${COMPONENTS_ARR[@]}"; do
     if [[ "${element}"x == "mysql"x ]]; then
         RUN_MYSQL=1
@@ -148,6 +150,8 @@ for element in "${COMPONENTS_ARR[@]}"; do
         RUN_ICEBERG=1
     elif [[ "${element}"x == "hudi"x ]]; then
         RUN_HUDI=1
+    elif [[ "${element}"x == "trino"x ]];then
+        RUN_TRINO=1
     else
         echo "Invalid component: ${element}"
         usage
@@ -290,3 +294,80 @@ if [[ "${RUN_HUDI}" -eq 1 ]]; then
         docker exec -it adhoc-2 /bin/bash 
/var/scripts/setup_demo_container_adhoc_2.sh
     fi
 fi
+
+if  [[ "${RUN_TRINO}" -eq 1 ]]; then
+    # trino
+    trino_docker="${ROOT}"/docker-compose/trino
+    TRINO_CONTAINER_ID="${CONTAINER_UID}trino"
+    NAMENODE_CONTAINER_ID="${CONTAINER_UID}namenode"
+    HIVE_METASTORE_CONTAINER_ID=${CONTAINER_UID}hive-metastore
+    for file in trino_hive.yaml trino_hive.env gen_env.sh hive.properties
+    do
+        cp "${trino_docker}/$file.tpl" "${trino_docker}/$file"
+        if [[ $file != "hive.properties" ]]; then
+            sed -i "s/doris--/${CONTAINER_UID}/g" "${trino_docker}/$file"
+        fi
+    done
+
+    bash "${trino_docker}"/gen_env.sh
+    sudo docker compose -f "${trino_docker}"/trino_hive.yaml --env-file 
"${trino_docker}"/trino_hive.env down
+    if [[ "${STOP}" -ne 1 ]]; then
+        sudo sed -i "/${NAMENODE_CONTAINER_ID}/d" /etc/hosts
+        sudo docker compose -f "${trino_docker}"/trino_hive.yaml --env-file 
"${trino_docker}"/trino_hive.env up --build --remove-orphans -d
+        sudo echo "127.0.0.1 ${NAMENODE_CONTAINER_ID}" >> /etc/hosts
+        sleep 20s
+        hive_metastore_ip=$(docker inspect --format='{{range 
.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' 
${HIVE_METASTORE_CONTAINER_ID})
+
+        if [ -z "$hive_metastore_ip" ]; then
+            echo "Failed to get Hive Metastore IP address" >&2
+            exit 1
+        else
+          echo "Hive Metastore IP address is: $hive_metastore_ip"
+        fi
+
+        sed -i "s/metastore_ip/${hive_metastore_ip}/g" 
"${trino_docker}"/hive.properties
+        docker cp "${trino_docker}"/hive.properties 
"${CONTAINER_UID}trino":/etc/trino/catalog/
+
+        # trino load hive catalog need restart server
+        max_retries=3
+
+        function control_container() {
+            max_retries=3
+            operation=$1
+            expected_status=$2
+            retries=0
+
+            while [ $retries -lt $max_retries ]
+            do
+                status=$(docker inspect --format '{{.State.Running}}' 
${TRINO_CONTAINER_ID})
+                if [ "${status}" == "${expected_status}" ]; then
+                    echo "Container ${TRINO_CONTAINER_ID} has ${operation}ed 
successfully."
+                    break
+                else
+                    echo "Waiting for container ${TRINO_CONTAINER_ID} to 
${operation}..."
+                    sleep 5s
+                    ((retries++))
+                fi
+                sleep 3s
+            done
+
+            if [ $retries -eq $max_retries ]; then
+                echo "${operation} operation failed to complete after 
$max_retries attempts."
+                exit 1
+            fi
+        }
+        # Stop the container
+        docker stop ${TRINO_CONTAINER_ID}
+        sleep 5s
+        control_container "stop" "false"
+
+        # Start the container
+        docker start ${TRINO_CONTAINER_ID}
+        control_container "start" "true"
+
+        # waite trino init
+        sleep 20s
+        # execute create table sql
+        docker exec -it ${TRINO_CONTAINER_ID} /bin/bash -c 'trino -f 
/scripts/create_trino_table.sql'
+    fi
+fi
diff --git a/docs/zh-CN/community/developer-guide/regression-testing.md 
b/docs/zh-CN/community/developer-guide/regression-testing.md
index 3617b4d769..a9243496c4 100644
--- a/docs/zh-CN/community/developer-guide/regression-testing.md
+++ b/docs/zh-CN/community/developer-guide/regression-testing.md
@@ -605,7 +605,7 @@ Doris 支持一些外部署数据源的查询。所以回归框架也提供了
 
 1. 启动 Container
 
-    Doris 目前支持 es, mysql, pg, hive, sqlserver, oracle, iceberg, hudi 等数据源的 
Docker compose。相关文件存放在 `docker/thirdparties/docker-compose` 目录下。
+    Doris 目前支持 es, mysql, pg, hive, sqlserver, oracle, iceberg, hudi, trino 
等数据源的 Docker compose。相关文件存放在 `docker/thirdparties/docker-compose` 目录下。
 
     默认情况下,可以直接通过以下命令启动所有外部数据源的 Docker container:
     (注意,hive和hudi container 需要下载预制的数据文件,请参阅下面 hive和hudi 相关的文档。)
@@ -795,6 +795,20 @@ Doris 支持一些外部署数据源的查询。所以回归框架也提供了
       ```
 
       更多使用方式可参阅 [Hudi 官方文档](https://hudi.apache.org/docs/docker_demo)。
+
+   10. Trino
+       Trino 相关的 Docker compose 文件存放在 docker/thirdparties/docker-compose/trino 
下。
+       模版文件:
+       * gen_env.sh.tpl :用于生成 HDFS相关端口号,无需修改,若出现端口冲突,可以对端口号进行修改。
+       * hive.properties.tpl :用于配置trino catalog 信息,无需修改。
+       * trino_hive.env.tpl :Hive 的环境配置信息,无需修改。
+       * trino_hive.yaml.tpl :Docker compose 文件,无需修改。
+         启动 Trino docker 后,会配置一套 Trino + hive catalog 环境,此时 Trino 拥有两个catalog
+       1. hive
+       2. tpch(trino docker 自带)
+
+       更多使用方式可参阅 [Trino 
官方文档](https://trino.io/docs/current/installation/containers.html)
+
 2. 运行回归测试
 
     外表相关的回归测试默认是关闭的,可以修改 `regression-test/conf/regression-conf.groovy` 
中的以下配置来开启:


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to