This is an automated email from the ASF dual-hosted git repository.

hellostephen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new aaaf54561c2 [regression](s3) add default conf for s3 releated cases 
(#37952)
aaaf54561c2 is described below

commit aaaf54561c2af53853a5c9843677ee18cda13141
Author: Dongyang Li <hello_step...@qq.com>
AuthorDate: Mon Jul 29 11:00:35 2024 +0800

    [regression](s3) add default conf for s3 releated cases (#37952)
    
    ## Proposed changes
    
    replace COS with OSS in the TeamCity pipeline to improve stability
    
    <!--Describe your changes.-->
    
    ---------
    
    Co-authored-by: stephen <hello-step...@qq.com>
---
 regression-test/conf/regression-conf.groovy        | 16 ++--
 .../org/apache/doris/regression/Config.groovy      | 92 +++++++++++++++++++++-
 .../org/apache/doris/regression/suite/Suite.groovy |  2 +-
 .../cloud_p0/conf/regression-conf-custom.groovy    |  2 +-
 regression-test/pipeline/cloud_p0/prepare.sh       |  2 +-
 regression-test/pipeline/cloud_p0/run.sh           |  6 +-
 .../cloud_p1/conf/regression-conf-custom.groovy    |  2 +-
 .../pipeline/external/conf/regression-conf.groovy  |  5 +-
 .../pipeline/p0/conf/regression-conf.groovy        |  5 +-
 .../pipeline/p1/conf/regression-conf.groovy        |  5 +-
 .../spark_connector/spark_connector.groovy         |  3 +-
 ...test_export_table_with_materialized_view.groovy |  2 +-
 .../hive/test_trino_different_parquet_types.groovy | 20 ++---
 .../hive/test_trino_hive_orc.groovy                | 20 ++---
 .../hive/test_trino_hive_other.groovy              | 20 ++---
 .../hive/test_trino_hive_parquet.groovy            | 22 +++---
 .../hive/test_trino_hive_schema_evolution.groovy   | 23 +++---
 .../hive/test_trino_hive_serde_prop.groovy         | 22 +++---
 .../hive/test_trino_hive_tablesample_p0.groovy     | 20 ++---
 .../hive/test_trino_hive_tpch_sf1_orc.groovy       | 20 ++---
 .../hive/test_trino_hive_tpch_sf1_parquet.groovy   | 20 ++---
 .../test_trino_prepare_hive_data_in_case.groovy    | 22 +++---
 .../kafka/test_trino_kafka_base.groovy             | 23 +++---
 .../trino_connector/test_plugins_download.groovy   | 21 ++---
 regression-test/suites/variant_p0/load.groovy      |  4 +-
 25 files changed, 245 insertions(+), 154 deletions(-)

diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index 527b0231394..08d03632c37 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -104,8 +104,16 @@ brokerName = "broker_name"
 
 // broker load test config
 enableBrokerLoad=true
-ak=""
-sk=""
+
+// for s3 releated cases, "aliyun" or "aliyun-internal" or "tencent" or 
"huawei" or "azure" or "gcp"
+// if s3Source is set,  s3Endpoint s3BucketName s3Region s3Provider will be 
filled with default value if not set
+s3Source="aliyun"
+// s3Endpoint = ""
+// s3BucketName = ""
+// s3Region = ""
+// s3Provider = ""
+ak="***********"
+sk="***********"
 
 // jdbc connector test config
 // To enable jdbc test, you need first start mysql/pg container.
@@ -194,10 +202,6 @@ aliYunSk="***********"
 hwYunAk="***********"
 hwYunSk="***********"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-
 //arrow flight sql test config
 extArrowFlightSqlHost = "127.0.0.1"
 extArrowFlightSqlPort = 8080
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
index b901095eac1..b7c3090e0b8 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
@@ -144,9 +144,12 @@ class Config {
     public String kafkaBrokerList
     public String cloudVersion
 
+    public String s3Source
+
     Config() {}
 
     Config(
+            String s3Source,
             String caseNamePrefix,
             String defaultDb, 
             String jdbcUrl, 
@@ -199,6 +202,7 @@ class Config {
             String clusterDir, 
             String kafkaBrokerList, 
             String cloudVersion) {
+        this.s3Source = s3Source
         this.caseNamePrefix = caseNamePrefix
         this.defaultDb = defaultDb
         this.jdbcUrl = jdbcUrl
@@ -449,7 +453,6 @@ class Config {
         }
         log.info("recycleAddr : $config.recycleServiceHttpAddress, socketAddr 
: $config.recycleServiceHttpInetSocketAddress")
 
-
         config.defaultDb = cmd.getOptionValue(defaultDbOpt, config.defaultDb)
         config.jdbcUrl = cmd.getOptionValue(jdbcOpt, config.jdbcUrl)
         config.jdbcUser = cmd.getOptionValue(userOpt, config.jdbcUser)
@@ -478,6 +481,16 @@ class Config {
         log.info("withOutLoadData is ${config.withOutLoadData}".toString())
         log.info("caseNamePrefix is ${config.caseNamePrefix}".toString())
         log.info("dryRun is ${config.dryRun}".toString())
+        def s3SourceList = ["aliyun", "aliyun-internal", "tencent", "huawei", 
"azure", "gcp"]
+        if (s3SourceList.contains(config.s3Source)) {
+            log.info("s3Source is ${config.s3Source}".toString())
+            log.info("s3Provider is 
${config.otherConfigs.get("s3Provider")}".toString())
+            log.info("s3BucketName is 
${config.otherConfigs.get("s3BucketName")}".toString())
+            log.info("s3Region is 
${config.otherConfigs.get("s3Region")}".toString())
+            log.info("s3Endpoint is 
${config.otherConfigs.get("s3Endpoint")}".toString())
+        } else {
+            throw new Exception("The s3Source '${config.s3Source}' is invalid, 
optional values ${s3SourceList}")
+        }
 
         Properties props = cmd.getOptionProperties("conf")
         config.otherConfigs.putAll(props)
@@ -490,6 +503,7 @@ class Config {
 
     static Config fromConfigObject(ConfigObject obj) {
         def config = new Config(
+            configToString(obj.s3Source),
             configToString(obj.caseNamePrefix),
             configToString(obj.defaultDb),
             configToString(obj.jdbcUrl),
@@ -602,6 +616,82 @@ class Config {
     }
 
     static void fillDefaultConfig(Config config) {
+        if (config.s3Source == null) {
+            config.s3Source = "aliyun"
+            log.info("Set s3Source to 'aliyun' because not 
specify.".toString())
+        }
+
+        if (config.otherConfigs.get("s3Provider") == null) {
+            def s3Provider = "OSS"
+            if (config.s3Source == "aliyun" || config.s3Source == 
"aliyun-internal") {
+                s3Provider = "OSS"
+            } else if (config.s3Source == "tencent") {
+                s3Provider = "COS"
+            } else if (config.s3Source == "huawei") {
+                s3Provider = "OBS"
+            } else if (config.s3Source == "azure") {
+                s3Provider = "AZURE"
+            } else if (config.s3Source == "gcp") {
+                s3Provider = "GCP"
+            }
+            config.otherConfigs.put("s3Provider", "${s3Provider}")
+            log.info("Set s3Provider to '${s3Provider}' because not 
specify.".toString())
+        }
+        if (config.otherConfigs.get("s3BucketName") == null) {
+            def s3BucketName = "doris-regression-hk"
+            if (config.s3Source == "aliyun") {
+                s3BucketName = "doris-regression-hk"
+            } else if (config.s3Source == "aliyun-internal") {
+                s3BucketName = "doris-regression"
+            } else if (config.s3Source == "tencent") {
+                s3BucketName = "doris-build-1308700295"
+            } else if (config.s3Source == "huawei") {
+                s3BucketName = "doris-build"
+            } else if (config.s3Source == "azure") {
+                s3BucketName = "qa-build"
+            } else if (config.s3Source == "gcp") {
+                s3BucketName = "doris-regression"
+            }
+            config.otherConfigs.put("s3BucketName", "${s3BucketName}")
+            log.info("Set s3BucketName to '${s3BucketName}' because not 
specify.".toString())
+        }
+        if (config.otherConfigs.get("s3Region") == null) {
+            def s3Region = "oss-cn-hongkong"
+            if (config.s3Source == "aliyun") {
+                s3Region = "oss-cn-hongkong"
+            } else if (config.s3Source == "aliyun-internal") {
+                s3Region = "oss-cn-beijing"
+            } else if (config.s3Source == "tencent") {
+                s3Region = "ap-beijing"
+            } else if (config.s3Source == "huawei") {
+                s3Region = "cn-north-4"
+            } else if (config.s3Source == "azure") {
+                s3Region = "azure-region"
+            } else if (config.s3Source == "gcp") {
+                s3Region = "us-central1"
+            }
+            config.otherConfigs.put("s3Region", "${s3Region}")
+            log.info("Set s3Region to '${s3Region}' because not 
specify.".toString())
+        }
+        if (config.otherConfigs.get("s3Endpoint") == null) {
+            def s3Endpoint = "oss-cn-hongkong.aliyuncs.com"
+            if (config.s3Source == "aliyun") {
+                s3Endpoint = "oss-cn-hongkong.aliyuncs.com"
+            } else if (config.s3Source == "aliyun-internal") {
+                s3Endpoint = "oss-cn-beijing-internal.aliyuncs.com"
+            } else if (config.s3Source == "tencent") {
+                s3Endpoint = "cos.ap-beijing.myqcloud.com"
+            } else if (config.s3Source == "huawei") {
+                s3Endpoint = "obs.cn-north-4.myhuaweicloud.com"
+            } else if (config.s3Source == "azure") {
+                s3Endpoint = "azure-endpoint"
+            } else if (config.s3Source == "gcp") {
+                s3Endpoint = "storage.googleapis.com"
+            }
+            config.otherConfigs.put("s3Endpoint", "${s3Endpoint}")
+            log.info("Set s3Endpoint to '${s3Endpoint}' because not 
specify.".toString())
+        }
+
         if (config.caseNamePrefix == null) {
             config.caseNamePrefix = ""
             log.info("set caseNamePrefix to '' because not 
specify.".toString())
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index 4d5127b8644..2cd27b0968d 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -877,7 +877,7 @@ class Suite implements GroovyInterceptable {
                 if (exitcode != 0) {
                     staticLogger.info("exit code: ${exitcode}, output\n: 
${proc.text}")
                     if (mustSuc == true) {
-                       Assert.assertEquals(0, exitCode)
+                       Assert.assertEquals(0, exitcode)
                     }
                 }
             } catch (IOException e) {
diff --git 
a/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy 
b/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
index c04b3742bbf..4a79c5cc909 100644
--- a/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
+++ b/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
@@ -74,4 +74,4 @@ enableKafkaTest=true
 // trino-connector catalog test config
 enableTrinoConnectorTest = false
 
-s3Provider = "COS"
+s3Source = "aliyun"
diff --git a/regression-test/pipeline/cloud_p0/prepare.sh 
b/regression-test/pipeline/cloud_p0/prepare.sh
index 211870d99d5..df3cbe9fb9e 100644
--- a/regression-test/pipeline/cloud_p0/prepare.sh
+++ b/regression-test/pipeline/cloud_p0/prepare.sh
@@ -47,7 +47,7 @@ if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env 
pr_num_from_trigger
 if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env 
commit_id_from_trigger not set" && exit 1; fi
 if [[ -z "${commit_id_from_checkout}" ]]; then echo "ERROR: env 
commit_id_from_checkout not set" && exit 1; fi
 if [[ -z "${target_branch}" ]]; then echo "ERROR: env target_branch not set" 
&& exit 1; fi
-if [[ -z "${cos_ak}" || -z "${cos_sk}" ]]; then echo "ERROR: env cos_ak or 
cos_sk not set" && exit 1; fi
+if [[ -z "${s3SourceAk}" || -z "${s3SourceSk}" ]]; then echo "ERROR: env 
s3SourceAk or s3SourceSk not set" && exit 1; fi
 if [[ -z "${oss_ak}" || -z "${oss_sk}" ]]; then echo "ERROR: env oss_ak or 
oss_sk not set." && exit 1; fi
 
 echo "#### 1. check if need run"
diff --git a/regression-test/pipeline/cloud_p0/run.sh 
b/regression-test/pipeline/cloud_p0/run.sh
index 3c126460b56..2079d182ef3 100644
--- a/regression-test/pipeline/cloud_p0/run.sh
+++ b/regression-test/pipeline/cloud_p0/run.sh
@@ -33,7 +33,7 @@ echo "#### Check env"
 if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env 
teamcity_build_checkoutDir not set" && exit 1; fi
 if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env 
pr_num_from_trigger not set" && exit 1; fi
 if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env 
commit_id_from_trigger not set" && exit 1; fi
-if [[ -z "${cos_ak}" || -z "${cos_sk}" ]]; then echo "ERROR: env cos_ak or 
cos_sk not set" && exit 1; fi
+if [[ -z "${s3SourceAk}" || -z "${s3SourceSk}" ]]; then echo "ERROR: env 
s3SourceAk or s3SourceSk not set" && exit 1; fi
 
 # shellcheck source=/dev/null
 source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
@@ -53,8 +53,8 @@ run() {
     cd "${teamcity_build_checkoutDir}" || return 1
     {
         echo # add a new line to prevent two config items from being combined, 
which will cause the error "No signature of method"
-        echo "ak='${cos_ak}'"
-        echo "sk='${cos_sk}'"
+        echo "ak='${s3SourceAk}'"
+        echo "sk='${s3SourceSk}'"
     } 
>>"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
 \
         "${teamcity_build_checkoutDir}"/regression-test/conf/
diff --git 
a/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy 
b/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
index 42a18b7f22e..2662090b401 100644
--- a/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
+++ b/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
@@ -21,4 +21,4 @@ excludeDirectories = "000_the_start_sentinel_do_not_touch," + 
// keep this line
 
 max_failure_num = 50
 
-s3Provider = "COS"
+s3Source = "aliyun"
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy 
b/regression-test/pipeline/external/conf/regression-conf.groovy
index 28956568e58..93965b84219 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -138,10 +138,7 @@ es_8_port=39200
 
 cacheDataPath = "/data/regression/"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-s3Provider = "COS"
+s3Source="aliyun"
 
 max_failure_num=50
 
diff --git a/regression-test/pipeline/p0/conf/regression-conf.groovy 
b/regression-test/pipeline/p0/conf/regression-conf.groovy
index c30dd20481c..72368697f2c 100644
--- a/regression-test/pipeline/p0/conf/regression-conf.groovy
+++ b/regression-test/pipeline/p0/conf/regression-conf.groovy
@@ -137,10 +137,7 @@ es_8_port=39200
 
 cacheDataPath = "/data/regression/"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-s3Provider = "COS"
+s3Source="aliyun"
 
 //arrow flight sql test config
 extArrowFlightSqlHost = "127.0.0.1"
diff --git a/regression-test/pipeline/p1/conf/regression-conf.groovy 
b/regression-test/pipeline/p1/conf/regression-conf.groovy
index d4ecd55d38f..49f52c1fa32 100644
--- a/regression-test/pipeline/p1/conf/regression-conf.groovy
+++ b/regression-test/pipeline/p1/conf/regression-conf.groovy
@@ -70,10 +70,7 @@ excludeDirectories = "000_the_start_sentinel_do_not_touch," 
+ // keep this line
 
 cacheDataPath="/data/regression/"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-s3Provider = "COS"
+s3Source="aliyun"
 
 max_failure_num=0
 
diff --git 
a/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy 
b/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy
index 2bd618fcc3c..06699d7c8ff 100644
--- a/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy
+++ b/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy
@@ -25,7 +25,8 @@ suite("spark_connector", "connector") {
     logger.info("start download spark doris demo ...")
     logger.info("getS3Url ==== ${getS3Url()}")
     def download_spark_jar = "/usr/bin/curl 
${getS3Url()}/regression/spark-doris-connector-demo-jar-with-dependencies.jar 
--output spark-doris-demo.jar".execute().getText()
-    logger.info("finish download spark doris demo ...")
+    def out = "/usr/bin/ls -al spark-doris-demo.jar".execute().getText()
+    logger.info("finish download spark doris demo, out: ${out}")
     def run_cmd = "java -jar spark-doris-demo.jar 
$context.config.feHttpAddress $context.config.feHttpUser 
regression_test_connector_p0_spark_connector.$tableName"
     logger.info("run_cmd : $run_cmd")
     def proc = run_cmd.execute()
diff --git 
a/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
 
b/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
index bc51b99a991..adba998b16f 100644
--- 
a/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
+++ 
b/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
@@ -114,7 +114,7 @@ suite("test_export_table_with_materialized_view", "p0") {
         def outfile_url = waiting_export.call(label)
 
         qt_select_load1 """ select * from s3(
-                "uri" = "http://${s3_endpoint}${outfile_url.substring(4, 
outfile_url.length() - 1)}0.parquet",
+                "uri" = 
"http://${bucket}.${s3_endpoint}${outfile_url.substring(5+bucket.length(), 
outfile_url.length() - 1)}0.parquet",
                 "s3.access_key"= "${ak}",
                 "s3.secret_key" = "${sk}",
                 "format" = "parquet",
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
index 4a450b99901..fbf60ed2497 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_different_parquet_types", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String hms_port = context.config.otherConfigs.get("hive2HmsPort")
     String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
@@ -185,6 +175,16 @@ suite("test_trino_different_parquet_types", 
"p0,external,hive,external_docker,ex
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String catalog_name = "test_trino_different_parquet_types"
             sql """drop catalog if exists ${catalog_name}"""
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
index 89255cfbc56..d4cbcbe409b 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_orc", 
"all_types,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     // Ensure that all types are parsed correctly
     def select_top50 = {
@@ -88,6 +78,16 @@ suite("test_trino_hive_orc", 
"all_types,external,hive,external_docker,external_d
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_orc"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
index 9d3430d1ad0..6d410b2cb9a 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_other", 
"external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     def q01 = {
         qt_q24 """ select name, count(1) as c from student group by name order 
by name desc;"""
@@ -62,6 +52,16 @@ suite("test_trino_hive_other", 
"external,hive,external_docker,external_docker_hi
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String hms_port = context.config.otherConfigs.get("hive2HmsPort")
         String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
index 658b342dff0..748ac02b651 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
@@ -16,17 +16,7 @@
 // under the License.
 
 suite("test_trino_hive_parquet", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     def q01 = {
         qt_q01 """
         select * from partition_table order by l_orderkey, l_partkey, 
l_suppkey;
@@ -182,6 +172,16 @@ suite("test_trino_hive_parquet", 
"p0,external,hive,external_docker,external_dock
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_parquet"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
index 1eb4a0f1b9e..c803a988574 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
@@ -16,17 +16,7 @@
 // under the License.
 
 suite("test_trino_hive_schema_evolution", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     def q_text = {
         qt_q01 """
         select * from schema_evo_test_text order by id;
@@ -67,6 +57,17 @@ suite("test_trino_hive_schema_evolution", 
"p0,external,hive,external_docker,exte
     String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
 
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
+            
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_schema_evolution"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
index 8479c14fcbc..b996d94f95d 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
@@ -16,19 +16,19 @@
 // under the License.
 
 suite("test_trino_hive_serde_prop", 
"external_docker,hive,external_docker_hive,p0,external") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String catalog_name = "test_trino_hive_serde_prop"
         String ex_db_name = "`stats_test`"
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
index be760d381df..8752f2f4b41 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
@@ -16,19 +16,19 @@
 // under the License.
 
 suite("test_trino_hive_tablesample_p0", 
"all_types,p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_tablesample_p0"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
index cafd9301753..6ee38e0021d 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_tpch_sf1_orc", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String enable_file_cache = "false"
     def q01 = { 
@@ -857,6 +847,16 @@ order by
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String hms_port = context.config.otherConfigs.get("hive2HmsPort")
         String catalog_name = "test_trino_hive_tpch_sf1_orc"
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
index 4034ac805ef..8bd86c7def1 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_tpch_sf1_parquet", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String enable_file_cache = "false"
     def q01 = { 
@@ -857,6 +847,16 @@ order by
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String hms_port = context.config.otherConfigs.get("hive2HmsPort")
         String catalog_name = "test_trino_hive_tpch_sf1_parquet"
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
index 84049ac1db6..a8b86014aa9 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
@@ -16,20 +16,20 @@
 // under the License.
 
 suite("test_trino_prepare_hive_data_in_case", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     def catalog_name = "test_trino_prepare_hive_data_in_case"
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String externalEnvIp = 
context.config.otherConfigs.get("externalEnvIp")
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
index b1da4641866..2f61764ec12 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
@@ -21,17 +21,6 @@ import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.clients.producer.ProducerConfig
 
 suite("test_trino_kafka_base", 
"external,kafka,external_docker,external_docker_kafka") {
-    // set up trino-connector plugins
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     // Ensure that all types are parsed correctly
     def select_top50 = {
@@ -43,6 +32,18 @@ suite("test_trino_kafka_base", 
"external,kafka,external_docker,external_docker_k
     String enabled_trino_connector = 
context.config.otherConfigs.get("enableTrinoConnectorTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")
         && enabled_trino_connector!= null && 
enabled_trino_connector.equalsIgnoreCase("true")) {
+        // set up trino-connector plugins
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
+
         def kafkaCsvTpoics = [
                 "trino_kafka_basic_data"
             ]
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
index 3d28612cf62..028383b6609 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
@@ -16,14 +16,17 @@
 // under the License.
 
 suite("test_plugins_download", 
"external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
+    String enabled = 
context.config.otherConfigs.get("enableTrinoConnectorTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
     }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 }
\ No newline at end of file
diff --git a/regression-test/suites/variant_p0/load.groovy 
b/regression-test/suites/variant_p0/load.groovy
index 2b9ec514031..d74155b182d 100644
--- a/regression-test/suites/variant_p0/load.groovy
+++ b/regression-test/suites/variant_p0/load.groovy
@@ -203,7 +203,7 @@ suite("regression_test_variant", "p0"){
         // 7. gh data
         table_name = "ghdata"
         create_table table_name
-        load_json_data.call(table_name, """${getS3Url() + 
'/load/ghdata_sample.json'}""")
+        load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/ghdata_sample.json'}""")
         qt_sql_26 "select count() from ${table_name}"
 
         // 8. json empty string
@@ -218,7 +218,7 @@ suite("regression_test_variant", "p0"){
         // // // 9. btc data
         // // table_name = "btcdata"
         // // create_table table_name
-        // // load_json_data.call(table_name, """${getS3Url() + 
'/load/btc_transactions.json'}""")
+        // // load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/btc_transactions.json'}""")
         // // qt_sql_28 "select count() from ${table_name}"
 
         // 10. alter add variant


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org


Reply via email to