This is an automated email from the ASF dual-hosted git repository. hellostephen pushed a commit to branch branch-2.0 in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.0 by this push: new ec0bd03108c [Test](regression-test): do not use path style to access s3 (#35787) ec0bd03108c is described below commit ec0bd03108ce243cea3c2a0222d0e261073250f2 Author: Thearas <thearas...@gmail.com> AuthorDate: Sat Jul 13 22:38:33 2024 +0800 [Test](regression-test): do not use path style to access s3 (#35787) ## Proposed changes CP from #35666 and #35725. --------- Co-authored-by: Kang <kxiao.ti...@gmail.com> --- .../suites/export_p0/export/test_show_export.groovy | 4 ++-- .../outfile/csv/test_outfile_empty_data.groovy | 4 ++-- .../suites/export_p0/test_outfile_file_suffix.groovy | 4 ++-- .../suites/export_p0/test_with_bom.groovy | 8 ++++---- .../suites/export_p2/test_export_with_s3.groovy | 2 +- .../tvf/test_insert_from_tvf_with_common_user.groovy | 2 +- .../suites/external_table_p0/tvf/test_s3_tvf.groovy | 6 +++--- .../tvf/test_s3_tvf_compression.groovy | 20 ++++++++++---------- .../tvf/test_path_partition_keys.groovy | 16 ++++++++-------- .../suites/load_p0/tvf/test_tvf_error_url.groovy | 8 ++++---- 10 files changed, 37 insertions(+), 37 deletions(-) diff --git a/regression-test/suites/export_p0/export/test_show_export.groovy b/regression-test/suites/export_p0/export/test_show_export.groovy index fb90e111c91..ea60b1ebe83 100644 --- a/regression-test/suites/export_p0/export/test_show_export.groovy +++ b/regression-test/suites/export_p0/export/test_show_export.groovy @@ -119,7 +119,7 @@ suite("test_show_export", "p0") { def outfile_url = waiting_export.call(label) order_qt_select_load1 """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.parquet", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.parquet", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "parquet", @@ -154,7 +154,7 @@ suite("test_show_export", "p0") { def outfile_url = waiting_export.call(label) order_qt_select_load1 """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.parquet", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.parquet", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "parquet", diff --git a/regression-test/suites/export_p0/outfile/csv/test_outfile_empty_data.groovy b/regression-test/suites/export_p0/outfile/csv/test_outfile_empty_data.groovy index 1804fff2a11..25e0dbbeec9 100644 --- a/regression-test/suites/export_p0/outfile/csv/test_outfile_empty_data.groovy +++ b/regression-test/suites/export_p0/outfile/csv/test_outfile_empty_data.groovy @@ -152,12 +152,12 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") { """ qt_select_tvf3 """ SELECT * FROM S3 ( - "uri" = "http://${s3_endpoint}${outfile_to_s3_directly_url.substring(4, outfile_to_s3_directly_url.length())}0.csv", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_to_s3_directly_url.substring(5 + bucket.length(), outfile_to_s3_directly_url.length())}0.csv", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "${format}", "region" = "${region}", - "use_path_style" = "true" + "use_path_style" = "false" -- aliyun does not support path_style ); """ diff --git a/regression-test/suites/export_p0/test_outfile_file_suffix.groovy b/regression-test/suites/export_p0/test_outfile_file_suffix.groovy index 30f9fea23de..bbd791052c9 100644 --- a/regression-test/suites/export_p0/test_outfile_file_suffix.groovy +++ b/regression-test/suites/export_p0/test_outfile_file_suffix.groovy @@ -60,9 +60,9 @@ suite("test_outfile_file_suffix", "p0") { def file_suffix = "txt"; def file_format = "csv"; def outfile_url = csv_suffix_result(file_suffix, file_format); - print("http://${s3_endpoint}${outfile_url.substring(4)}0.${file_suffix}") + print("http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.${file_suffix}") qt_select """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.${file_suffix}", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.${file_suffix}", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "${file_format}", diff --git a/regression-test/suites/export_p0/test_with_bom.groovy b/regression-test/suites/export_p0/test_with_bom.groovy index 3ab404d3cf8..2b9a55ae6c9 100644 --- a/regression-test/suites/export_p0/test_with_bom.groovy +++ b/regression-test/suites/export_p0/test_with_bom.groovy @@ -115,7 +115,7 @@ suite("test_with_bom", "p0") { def outfile_url = waiting_export.call(label) order_qt_select_load1 """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.${file_format}", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.${file_format}", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "csv", @@ -152,7 +152,7 @@ suite("test_with_bom", "p0") { def outfile_url = waiting_export.call(label) order_qt_select_load1 """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.${file_format}", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.${file_format}", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "csv", @@ -189,7 +189,7 @@ suite("test_with_bom", "p0") { def outfile_url = waiting_export.call(label) order_qt_select_load1 """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.${file_format}", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.${file_format}", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "csv_with_names", @@ -226,7 +226,7 @@ suite("test_with_bom", "p0") { def outfile_url = waiting_export.call(label) order_qt_select_load1 """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.${file_format}", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.${file_format}", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "csv_with_names_and_types", diff --git a/regression-test/suites/export_p2/test_export_with_s3.groovy b/regression-test/suites/export_p2/test_export_with_s3.groovy index 4ca7967f485..ade844fe7d3 100644 --- a/regression-test/suites/export_p2/test_export_with_s3.groovy +++ b/regression-test/suites/export_p2/test_export_with_s3.groovy @@ -99,7 +99,7 @@ suite("test_export_with_s3", "p2") { // check data correctness order_qt_select """ select * from s3( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.${file_suffix}", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length())}0.${file_suffix}", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "column_separator"=",", diff --git a/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy b/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy index 48ebfe91fac..efad332753f 100644 --- a/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy +++ b/regression-test/suites/external_table_p0/tvf/test_insert_from_tvf_with_common_user.groovy @@ -93,7 +93,7 @@ suite("test_insert_from_tvf_with_common_user","p0,external,hive,tvf,external_doc sql """ INSERT INTO ${load_table_name} SELECT * FROM S3 ( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.csv", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.csv", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "csv", diff --git a/regression-test/suites/external_table_p0/tvf/test_s3_tvf.groovy b/regression-test/suites/external_table_p0/tvf/test_s3_tvf.groovy index 546e5a1ac06..e6f4b0e7ac7 100644 --- a/regression-test/suites/external_table_p0/tvf/test_s3_tvf.groovy +++ b/regression-test/suites/external_table_p0/tvf/test_s3_tvf.groovy @@ -79,7 +79,7 @@ suite("test_s3_tvf", "p0") { // 1. normal try { order_qt_select_1 """ SELECT * FROM S3 ( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.orc", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.orc", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "orc", @@ -107,11 +107,11 @@ suite("test_s3_tvf", "p0") { // 3.test use_path_style try { order_qt_select_3 """ SELECT * FROM S3 ( - "uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.orc", + "uri" = "http://${bucket}.${s3_endpoint}${outfile_url.substring(5 + bucket.length(), outfile_url.length())}0.orc", "s3.access_key"= "${ak}", "s3.secret_key" = "${sk}", "format" = "orc", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "region" = "${region}" ); """ diff --git a/regression-test/suites/external_table_p0/tvf/test_s3_tvf_compression.groovy b/regression-test/suites/external_table_p0/tvf/test_s3_tvf_compression.groovy index 5eb63c94ddc..2975f4f0cc9 100644 --- a/regression-test/suites/external_table_p0/tvf/test_s3_tvf_compression.groovy +++ b/regression-test/suites/external_table_p0/tvf/test_s3_tvf_compression.groovy @@ -35,7 +35,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = ",", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}") order by c1,c2,c3,c4,c5 limit 20; """ @@ -49,7 +49,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = ",", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}") order by cast(c1 as int),c4 limit 20; """ @@ -65,7 +65,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = ",", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}") order by c1,c2,c3,c4,c5 limit 15; """ @@ -79,7 +79,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = ",", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}") where c1!="100" order by cast(c4 as date),c1 limit 13; """ @@ -95,7 +95,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = ",", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}FRAME") order by c1,c2,c3,c4,c5 limit 14; """ @@ -109,7 +109,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = ",", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}FRAME") where c3="buHDwfGeNHfpRFdNaogneddi" order by c3,c1 limit 14; """ @@ -126,7 +126,7 @@ suite("test_s3_tvf_compression", "p0") { "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "column_separator" = '\001', "compress_type" ="${compress_type}") ${orderBy_limit}; """ @@ -140,7 +140,7 @@ suite("test_s3_tvf_compression", "p0") { "REGION" = "${region}", "FORMAT" = "csv", "column_separator" = '\001', - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "compress_type" ="${compress_type}") group by c1,c2 order by c1,c2 limit 5; """ @@ -156,7 +156,7 @@ suite("test_s3_tvf_compression", "p0") { "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "column_separator" = '\001', "compress_type" ="${compress_type}block") ${orderBy_limit}; """ @@ -170,7 +170,7 @@ suite("test_s3_tvf_compression", "p0") { "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style "column_separator" = '\001', "compress_type" ="${compress_type}block") where c2 ="abccc"; """ diff --git a/regression-test/suites/external_table_p2/tvf/test_path_partition_keys.groovy b/regression-test/suites/external_table_p2/tvf/test_path_partition_keys.groovy index 231bbc73b6c..b3a0c558f4a 100644 --- a/regression-test/suites/external_table_p2/tvf/test_path_partition_keys.groovy +++ b/regression-test/suites/external_table_p2/tvf/test_path_partition_keys.groovy @@ -137,8 +137,8 @@ suite("test_path_partition_keys", "p2,external,tvf,external_remote,external_remo "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "column_separator"="${column_separator}", - "use_path_style" = "true", + "column_separator" = ",", + "use_path_style" = "false", -- aliyun does not support path_style "path_partition_keys"="dt1") """ @@ -152,8 +152,8 @@ suite("test_path_partition_keys", "p2,external,tvf,external_remote,external_remo "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "column_separator"="${column_separator}", - "use_path_style" = "true", + "column_separator" = ",", + "use_path_style" = "false", -- aliyun does not support path_style "path_partition_keys"="dt1") limit 3; """ @@ -166,8 +166,8 @@ suite("test_path_partition_keys", "p2,external,tvf,external_remote,external_remo "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "column_separator"="${column_separator}", - "use_path_style" = "true", + "column_separator" = ",", + "use_path_style" = "false", -- aliyun does not support path_style "path_partition_keys"="dt1") limit 3; """ @@ -180,8 +180,8 @@ suite("test_path_partition_keys", "p2,external,tvf,external_remote,external_remo "s3.secret_key" = "${sk}", "REGION" = "${region}", "FORMAT" = "csv", - "column_separator"="${column_separator}", - "use_path_style" = "true", + "use_path_style" = "false", -- aliyun does not support path_style + "column_separator" = ",", "path_partition_keys"="dt2,dt1") limit 3; """ } diff --git a/regression-test/suites/load_p0/tvf/test_tvf_error_url.groovy b/regression-test/suites/load_p0/tvf/test_tvf_error_url.groovy index d1dcff4d530..f1b1df31ddf 100644 --- a/regression-test/suites/load_p0/tvf/test_tvf_error_url.groovy +++ b/regression-test/suites/load_p0/tvf/test_tvf_error_url.groovy @@ -24,7 +24,7 @@ suite("test_tvf_error_url", "p0") { String path = "select_tvf/no_exists_file_test" order_qt_select """ SELECT * FROM S3 ( - "uri" = "http://${s3_endpoint}/${bucket}/${path}/no_exist_file1.csv", + "uri" = "http://${bucket}.${s3_endpoint}/${path}/no_exist_file1.csv", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "csv", @@ -33,7 +33,7 @@ suite("test_tvf_error_url", "p0") { """ order_qt_desc """ desc function S3 ( - "uri" = "http://${s3_endpoint}/${bucket}/${path}/no_exist_file1.csv", + "uri" = "http://${bucket}.${s3_endpoint}/${path}/no_exist_file1.csv", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "csv", @@ -42,7 +42,7 @@ suite("test_tvf_error_url", "p0") { """ order_qt_select2 """ SELECT * FROM S3 ( - "uri" = "http://${s3_endpoint}/${bucket}/${path}/*.csv", + "uri" = "http://${bucket}.${s3_endpoint}/${path}/*.csv", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "csv", @@ -51,7 +51,7 @@ suite("test_tvf_error_url", "p0") { """ order_qt_desc2 """ desc function S3 ( - "uri" = "http://${s3_endpoint}/${bucket}/${path}/*.csv", + "uri" = "http://${bucket}.${s3_endpoint}/${path}/*.csv", "ACCESS_KEY"= "${ak}", "SECRET_KEY" = "${sk}", "format" = "csv", --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org