This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 76c0a906e54 [regression test](stream load) load_to_single_tablet
(#26720)
76c0a906e54 is described below
commit 76c0a906e546c476dc5fbcaf4f26ffbcebf5ab0b
Author: Guangdong Liu <[email protected]>
AuthorDate: Fri Nov 10 10:20:24 2023 +0800
[regression test](stream load) load_to_single_tablet (#26720)
---
.../stream_load/test_stream_load_properties.groovy | 31 ++++++++++++++++++++++
1 file changed, 31 insertions(+)
diff --git
a/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
b/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
index ab608d7975f..757a6f9f19e 100644
---
a/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
+++
b/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
@@ -375,6 +375,37 @@ suite("test_stream_load_properties", "p0") {
sql new
File("""${context.file.parent}/ddl/dup_tbl_basic_drop_random_bucket.sql""").text
}
+ try {
+ sql new
File("""${context.file.parent}/ddl/dup_tbl_basic_drop_random_bucket.sql""").text
+ sql new
File("""${context.file.parent}/ddl/dup_tbl_basic_create_random_bucket.sql""").text
+
+ streamLoad {
+ table 'stream_load_dup_tbl_basic_random_bucket'
+ set 'column_separator', '|'
+ set 'columns', columns[0]
+ set 'load_to_single_tablet', 'false'
+ file files[0]
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(20, json.NumberTotalRows)
+ assertEquals(20, json.NumberLoadedRows)
+ assertEquals(0, json.NumberFilteredRows)
+ assertEquals(0, json.NumberUnselectedRows)
+ }
+ }
+ // def res = sql "show tablets from
stream_load_dup_tbl_basic_random_bucket"
+ // assertEquals(res[0][10].toString(), "20")
+ } finally {
+ sql new
File("""${context.file.parent}/ddl/dup_tbl_basic_drop_random_bucket.sql""").text
+ }
+
// sequence
try {
sql new
File("""${context.file.parent}/ddl/uniq_tbl_basic_drop_sequence.sql""").text
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]