This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-refactor_property
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-refactor_property by 
this push:
     new 1085c1a853b [Fix](TVF/Outfile) add regression tests for `S3 tvf` and 
`outfile` (#49410)
1085c1a853b is described below

commit 1085c1a853b6723f98c26cddb9a7d7e41ab8b51a
Author: Tiewei Fang <fangtie...@selectdb.com>
AuthorDate: Mon Mar 24 17:41:36 2025 +0800

    [Fix](TVF/Outfile) add regression tests for `S3 tvf` and `outfile` (#49410)
---
 .../data/s3_storage/test_outfile_s3_storage.out    | Bin 0 -> 1005 bytes
 .../data/s3_storage/test_s3_tvf_s3_storage.out     | Bin 0 -> 4899 bytes
 .../s3_storage/test_outfile_s3_storage.groovy      | 215 ++++++++++++++++++
 .../s3_storage/test_s3_tvf_s3_storage.groovy       | 245 +++++++++++++++++++++
 4 files changed, 460 insertions(+)

diff --git a/regression-test/data/s3_storage/test_outfile_s3_storage.out 
b/regression-test/data/s3_storage/test_outfile_s3_storage.out
new file mode 100644
index 00000000000..bb72b202ca3
Binary files /dev/null and 
b/regression-test/data/s3_storage/test_outfile_s3_storage.out differ
diff --git a/regression-test/data/s3_storage/test_s3_tvf_s3_storage.out 
b/regression-test/data/s3_storage/test_s3_tvf_s3_storage.out
new file mode 100644
index 00000000000..6f44ed9b1e7
Binary files /dev/null and 
b/regression-test/data/s3_storage/test_s3_tvf_s3_storage.out differ
diff --git a/regression-test/suites/s3_storage/test_outfile_s3_storage.groovy 
b/regression-test/suites/s3_storage/test_outfile_s3_storage.groovy
new file mode 100644
index 00000000000..9e9b147e838
--- /dev/null
+++ b/regression-test/suites/s3_storage/test_outfile_s3_storage.groovy
@@ -0,0 +1,215 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_outfile_s3_storage", "p0") {
+
+    def export_table_name = "test_outfile_s3_storage"
+    
+    def s3_tvf = {bucket, s3_endpoint, region, ak, sk, path ->
+        // http schema
+        order_qt_s3_tvf_1_http """ SELECT * FROM S3 (
+            "uri" = "http://${bucket}.${s3_endpoint}${path}0.parquet";,
+            "s3.access_key"= "${ak}",
+            "s3.secret_key" = "${sk}",
+            "format" = "parquet",
+            "region" = "${region}"
+        );
+        """
+    }
+
+
+    sql """ DROP TABLE IF EXISTS ${export_table_name} """
+    sql """
+    CREATE TABLE IF NOT EXISTS ${export_table_name} (
+        `user_id` LARGEINT NOT NULL COMMENT "用户id",
+        `Name` STRING COMMENT "用户年龄",
+        `Age` int(11) NULL
+        )
+        DISTRIBUTED BY HASH(user_id) BUCKETS 3
+        PROPERTIES("replication_num" = "1");
+    """
+    StringBuilder sb = new StringBuilder()
+    int i = 1
+    for (; i < 10; i ++) {
+        sb.append("""
+            (${i}, 'ftw-${i}', ${i + 18}),
+        """)
+    }
+    sb.append("""
+            (${i}, NULL, NULL)
+        """)
+    sql """ INSERT INTO ${export_table_name} VALUES
+            ${sb.toString()}
+        """
+    qt_select_export """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+
+    String ak = ""
+    String sk = ""
+    String s3_endpoint = ""
+    String region = ""
+    String bucket = ""
+
+    
/*******************************************************************************************************
+     *****************************      TEST AWS      
*****************************************************
+     
*******************************************************************************************************/
+    try {
+        ak = getS3AK()
+        sk = getS3SK()
+        s3_endpoint = getS3Endpoint()
+        region = getS3Region()
+        bucket = context.config.otherConfigs.get("s3BucketName");
+
+        // 1. test s3 schema
+        def outFilePath = "${bucket}/test_outfile_s3_storage/exp_"
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "s3.endpoint" = "${s3_endpoint}",
+                "s3.region" = "${region}",
+                "s3.secret_key"="${sk}",
+                "s3.access_key" = "${ak}"
+            );
+        """
+        def outfile_url = res[0][3];
+        s3_tvf(bucket, s3_endpoint, region, ak, sk, outfile_url.substring(5 + 
bucket.length(), outfile_url.length() - 1));
+
+        // 2. test AWS_ENDPOINT
+        outFilePath = "${bucket}/test_outfile_s3_storage/exp_"
+        res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "AWS_ENDPOINT" = "${s3_endpoint}",
+                "AWS_REGION" = "${region}",
+                "AWS_SECRET_KEY"="${sk}",
+                "AWS_ACCESS_KEY" = "${ak}"
+            );
+        """
+        outfile_url = res[0][3];
+        s3_tvf(bucket, s3_endpoint, region, ak, sk, outfile_url.substring(5 + 
bucket.length(), outfile_url.length() - 1));
+
+    } finally {
+    }
+
+    
/*******************************************************************************************************
+     *****************************      TEST COS & COSN    
*************************************************
+     
*******************************************************************************************************/
+    try {
+        ak = context.config.otherConfigs.get("txYunAk")
+        sk = context.config.otherConfigs.get("txYunSk")
+        s3_endpoint = "cos.ap-beijing.myqcloud.com"
+        region = "ap-beijing"
+        bucket = "doris-build-1308700295";
+
+        // 1. test s3 schema
+        def outFilePath = "${bucket}/test_outfile_s3_storage/exp_"
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "s3.endpoint" = "${s3_endpoint}",
+                "s3.region" = "${region}",
+                "s3.secret_key"="${sk}",
+                "s3.access_key" = "${ak}"
+            );
+        """
+        def outfile_url = res[0][3];
+        s3_tvf(bucket, s3_endpoint, region, ak, sk, outfile_url.substring(5 + 
bucket.length(), outfile_url.length() - 1));
+
+        // 2. test AWS_ENDPOINT
+        outFilePath = "${bucket}/test_outfile_s3_storage/exp_"
+        res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "AWS_ENDPOINT" = "${s3_endpoint}",
+                "AWS_REGION" = "${region}",
+                "AWS_SECRET_KEY"="${sk}",
+                "AWS_ACCESS_KEY" = "${ak}"
+            );
+        """
+        outfile_url = res[0][3];
+        s3_tvf(bucket, s3_endpoint, region, ak, sk, outfile_url.substring(5 + 
bucket.length(), outfile_url.length() - 1));
+
+    } finally {
+    }
+
+     
/*******************************************************************************************************
+     *****************************      TEST OSS    
********************************************************
+     
*******************************************************************************************************/
+     try {
+        ak = context.config.otherConfigs.get("aliYunAk")
+        sk = context.config.otherConfigs.get("aliYunSk")
+        s3_endpoint = "oss-cn-hongkong.aliyuncs.com"
+        region = "oss-cn-hongkong"
+        bucket = "doris-regression-hk";
+
+        // 1. test s3 schema
+        def outFilePath = "${bucket}/test_outfile_s3_storage/exp_"
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "s3.endpoint" = "${s3_endpoint}",
+                "s3.region" = "${region}",
+                "s3.secret_key"="${sk}",
+                "s3.access_key" = "${ak}"
+            );
+        """
+        def outfile_url = res[0][3];
+        s3_tvf(bucket, s3_endpoint, region, ak, sk, outfile_url.substring(5 + 
bucket.length(), outfile_url.length() - 1));
+
+        // 2. test AWS_ENDPOINT
+        outFilePath = "${bucket}/test_outfile_s3_storage/exp_"
+        res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "AWS_ENDPOINT" = "${s3_endpoint}",
+                "AWS_REGION" = "${region}",
+                "AWS_SECRET_KEY"="${sk}",
+                "AWS_ACCESS_KEY" = "${ak}"
+            );
+        """
+        outfile_url = res[0][3];
+        s3_tvf(bucket, s3_endpoint, region, ak, sk, outfile_url.substring(5 + 
bucket.length(), outfile_url.length() - 1));
+     } finally {
+        
+     }
+
+
+    
/*******************************************************************************************************
+     *****************************      TEST OBS    
********************************************************
+     
*******************************************************************************************************/
+    try {
+        ak = context.config.otherConfigs.get("hwYunAk")
+        sk = context.config.otherConfigs.get("hwYunSk")
+        s3_endpoint = "obs.cn-north-4.myhuaweicloud.com"
+        region = "cn-north-4"
+        bucket = "doris-build";
+    } finally {
+    }
+
+}
diff --git a/regression-test/suites/s3_storage/test_s3_tvf_s3_storage.groovy 
b/regression-test/suites/s3_storage/test_s3_tvf_s3_storage.groovy
new file mode 100644
index 00000000000..46a3b49f167
--- /dev/null
+++ b/regression-test/suites/s3_storage/test_s3_tvf_s3_storage.groovy
@@ -0,0 +1,245 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_s3_tvf_s3_storage", "p0") {
+
+    def export_table_name = "test_s3_tvf_s3_storage"
+
+    def outfile_to_S3 = {bucket, s3_endpoint, region, ak, sk  ->
+        def outFilePath = "${bucket}/outfile_different_s3/exp_"
+        // select ... into outfile ...
+        def res = sql """
+            SELECT * FROM ${export_table_name} t ORDER BY user_id
+            INTO OUTFILE "s3://${outFilePath}"
+            FORMAT AS parquet
+            PROPERTIES (
+                "s3.endpoint" = "${s3_endpoint}",
+                "s3.region" = "${region}",
+                "s3.secret_key"="${sk}",
+                "s3.access_key" = "${ak}"
+            );
+        """
+        return res[0][3]
+    }
+
+
+    sql """ DROP TABLE IF EXISTS ${export_table_name} """
+    sql """
+    CREATE TABLE IF NOT EXISTS ${export_table_name} (
+        `user_id` LARGEINT NOT NULL COMMENT "用户id",
+        `Name` STRING COMMENT "用户年龄",
+        `Age` int(11) NULL
+        )
+        DISTRIBUTED BY HASH(user_id) BUCKETS 3
+        PROPERTIES("replication_num" = "1");
+    """
+    StringBuilder sb = new StringBuilder()
+    int i = 1
+    for (; i < 10; i ++) {
+        sb.append("""
+            (${i}, 'ftw-${i}', ${i + 18}),
+        """)
+    }
+    sb.append("""
+            (${i}, NULL, NULL)
+        """)
+    sql """ INSERT INTO ${export_table_name} VALUES
+            ${sb.toString()}
+        """
+    qt_select_export """ SELECT * FROM ${export_table_name} t ORDER BY 
user_id; """
+
+
+    String ak = ""
+    String sk = ""
+    String s3_endpoint = ""
+    String region = ""
+    String bucket = ""
+    String outfile_url = ""
+
+    def s3_tvf = {uri_prefix, endpoint_key, ak_key, sk_key, region_key, 
is_path_style  ->
+        // http schema
+        order_qt_s3_tvf """ SELECT * FROM S3 (
+            "uri" = "${uri_prefix}${outfile_url.substring(5 + bucket.length(), 
outfile_url.length() - 1)}0.parquet",
+            "${endpoint_key}" = "${s3_endpoint}",
+            "${ak_key}"= "${ak}",
+            "${sk_key}" = "${sk}",
+            "${region_key}" = "${region}",
+            "use_path_style" = "${is_path_style}",
+            "format" = "parquet"
+        );
+        """
+    }
+
+
+    
/*******************************************************************************************************
+     *****************************      TEST AWS      
*****************************************************
+     
*******************************************************************************************************/
+    try {
+        ak = context.config.otherConfigs.get("AWSAK")
+        sk = context.config.otherConfigs.get("AWSSK")
+        s3_endpoint = "s3.ap-northeast-1.amazonaws.com"
+        region = "ap-northeast-1"
+        bucket = "selectdb-qa-datalake-test"
+
+        outfile_url = outfile_to_S3(bucket, s3_endpoint, region, ak, sk)
+
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "s3.region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "cos.access_key" , 
"cos.secret_key", "region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "cos.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "s3.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        // s3_tvf("s3://${s3_endpoint}/${bucket}", "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "s3.region", "true");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "s3.region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "AWS_REGION", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "cos.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+
+    } finally {
+    }
+
+    
/*******************************************************************************************************
+     *****************************      TEST COS & COSN    
*************************************************
+     
*******************************************************************************************************/
+    try {
+        ak = context.config.otherConfigs.get("txYunAk")
+        sk = context.config.otherConfigs.get("txYunSk")
+        s3_endpoint = "cos.ap-beijing.myqcloud.com"
+        region = "ap-beijing"
+        bucket = "doris-build-1308700295";
+
+
+        outfile_url = outfile_to_S3(bucket, s3_endpoint, region, ak, sk)
+
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "s3.region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "cos.access_key" , 
"cos.secret_key", "region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "cos.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "s3.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        // s3_tvf("s3://${s3_endpoint}/${bucket}", "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "s3.region", "true");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "s3.region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "AWS_REGION", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "cos.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+
+    } finally {
+    }
+
+     
/*******************************************************************************************************
+     *****************************      TEST OSS    
********************************************************
+     
*******************************************************************************************************/
+     try {
+        ak = context.config.otherConfigs.get("aliYunAk")
+        sk = context.config.otherConfigs.get("aliYunSk")
+        s3_endpoint = "oss-cn-hongkong.aliyuncs.com"
+        region = "oss-cn-hongkong"
+        bucket = "doris-regression-hk";
+
+
+        outfile_url = outfile_to_S3(bucket, s3_endpoint, region, ak, sk)
+
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "s3.region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "cos.access_key" , 
"cos.secret_key", "region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "cos.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "s3.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+
+        // TODO(ftw): Note this case
+        // s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+
+        // s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        // s3_tvf("s3://${s3_endpoint}/${bucket}", "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+
+        // TODO(ftw): Note this case
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "s3.region", "true");
+
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "s3.region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "AWS_REGION", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "cos.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+
+     } finally {
+        
+     }
+
+
+    
/*******************************************************************************************************
+     *****************************      TEST OBS    
********************************************************
+     
*******************************************************************************************************/
+    try {
+        ak = context.config.otherConfigs.get("hwYunAk")
+        sk = context.config.otherConfigs.get("hwYunSk")
+        s3_endpoint = "obs.cn-north-4.myhuaweicloud.com"
+        region = "cn-north-4"
+        bucket = "doris-build";
+
+
+        outfile_url = outfile_to_S3(bucket, s3_endpoint, region, ak, sk)
+
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "s3.region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "cos.access_key" , 
"cos.secret_key", "region", "false");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${bucket}.${s3_endpoint}";, "cos.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${bucket}.${s3_endpoint}";, "s3.endpoint", 
"s3.access_key" , "s3.secret_key", "region", "false");
+        s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "true");
+        // s3_tvf("http://${s3_endpoint}/${bucket}";, "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        // s3_tvf("s3://${s3_endpoint}/${bucket}", "", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "s3.region", "true");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "s3.region", "false");
+        // s3_tvf("s3://${bucket}", "AWS_ENDPOINT", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "AWS_REGION", "false");
+        s3_tvf("s3://${bucket}", "s3.endpoint", "AWS_ACCESS_KEY" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "s3.access_key" , 
"AWS_SECRET_KEY", "region", "false");
+        // s3_tvf("s3://${bucket}", "cos.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        // s3_tvf("s3://${bucket}", "s3.endpoint", "cos.access_key" , 
"cos.secret_key", "cos.region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+        s3_tvf("cos://${bucket}", "s3.endpoint", "s3.access_key" , 
"s3.secret_key", "region", "false");
+    } finally {
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to