This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 4ac431b8c8b branch-2.1-pick: [Opt](partial update) Add some cases for 
partial update #48161 (#48542)
4ac431b8c8b is described below

commit 4ac431b8c8b6795eaaa1343c19645475b3b2b353
Author: bobhan1 <bao...@selectdb.com>
AuthorDate: Wed Mar 5 10:04:01 2025 +0800

    branch-2.1-pick: [Opt](partial update) Add some cases for partial update 
#48161 (#48542)
    
    pick https://github.com/apache/doris/pull/48161
---
 .../partial_update/test_partial_update.out         | Bin 1839 -> 2031 bytes
 .../partial_update/test_partial_update_s3_load.out | Bin 0 -> 363 bytes
 .../org/apache/doris/regression/suite/Suite.groovy |  19 ++++
 .../partial_update/test_partial_update.groovy      |  80 +++++++++++++++
 .../test_partial_update_s3_load.groovy             | 110 +++++++++++++++++++++
 5 files changed, 209 insertions(+)

diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
index 6c611c00030..e88eb818667 100644
Binary files 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
 and 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update.out
 differ
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_s3_load.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_s3_load.out
new file mode 100644
index 00000000000..f43da803e61
Binary files /dev/null and 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_s3_load.out
 differ
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index 1626a6795a8..322b15c70d7 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -732,6 +732,25 @@ class Suite implements GroovyInterceptable {
         }
     }
 
+    void waitForBrokerLoadDone(String label, int timeoutInSecond = 60) {
+        if (timeoutInSecond < 0 || label == null) {
+            return
+        }
+        var start = System.currentTimeMillis()
+        var timeout = timeoutInSecond * 1000
+        while (System.currentTimeMillis() - start < timeout) {
+            def lists = sql "show load where label = '${label}'"
+            if (lists.isEmpty()) {
+                return
+            }
+            def state = lists[0][2]
+            if ("FINISHED".equals(state) || "CANCELLED".equals(state)) {
+                return
+            }
+            sleep(300)
+        }
+        logger.warn("broker load with label `${label}` didn't finish in 
${timeoutInSecond} second, please check it!")
+    }
 
     void expectException(Closure userFunction, String errorMessage = null) {
         try {
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
index 5e91790baf9..432d57a26ca 100644
--- 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update.groovy
@@ -267,6 +267,86 @@ suite("test_primary_key_partial_update", "p0") {
             sql "UPDATE ${tableName} set corp_name = 'B';"
             qt_select_update "select corp_name from ${tableName};"
 
+
+            tableName = "test_primary_key_partial_update_1"
+            sql """ DROP TABLE IF EXISTS ${tableName} FORCE"""
+            sql """ CREATE TABLE IF NOT EXISTS ${tableName} (
+                    `k1` int NOT NULL,
+                    `c1` int,
+                    `c2` int,
+                    `c3` int,
+                    `c4` int
+                    )UNIQUE KEY(k1)
+                DISTRIBUTED BY HASH(k1) BUCKETS 1
+                PROPERTIES (
+                    "disable_auto_compaction" = "true",
+                    "replication_num" = "1"); """
+
+            sql "insert into ${tableName} values(1,1,1,1,1);"
+            sql "insert into ${tableName} values(2,2,2,2,2);"
+            sql "insert into ${tableName} values(3,3,3,3,3);"
+            sql "sync;"
+            qt_sql "select * from ${tableName} order by k1;"
+
+            String content1 = 
+"""
+1,99,99,99,99,0
+2,88,88,88,88,0
+4,77,77,77,77,0
+3,23,23,23,23,1
+""".trim()
+            streamLoad {
+                table "${tableName}"
+                set 'column_separator', ','
+                set 'format', 'csv'
+                set 'partial_columns', 'true'
+                set 'hidden_columns', '__DORIS_DELETE_SIGN__'
+                inputStream new ByteArrayInputStream(content1.getBytes()) 
+                time 10000// limit inflight 10s
+            }
+            qt_sql "select * from ${tableName} order by k1;"
+
+            // MERGE_TYPE=MERGE, test delete on illegal column
+            String content2 = "1,99,1"
+            streamLoad {
+                table "${tableName}"
+                set 'column_separator', ','
+                set 'format', 'csv'
+                set 'columns', 'k1,c2'
+                set 'partial_columns', 'true'
+                set 'merge_type', 'MERGE'
+                set 'delete', 'c3=1'
+                inputStream new ByteArrayInputStream(content2.getBytes()) 
+                time 10000
+                check {result, exception, startTime, endTime ->
+                    assertTrue(exception == null)
+                    def json = parseJson(result)
+                    assertEquals("Fail", json.Status)
+                    assertTrue(json.Message.contains("unknown reference column 
in DELETE ON clause:c3"))
+                }
+            }
+
+            String content3 = 
+"""
+1,99
+2,88,
+""".trim()
+            streamLoad {
+                table "${tableName}"
+                set 'column_separator', ','
+                set 'format', 'csv'
+                set 'columns', 'k1,c4'
+                set 'partial_columns', 'true'
+                set 'where', 'c5=1'
+                inputStream new ByteArrayInputStream(content3.getBytes()) 
+                time 10000
+                check {result, exception, startTime, endTime ->
+                    assertTrue(exception == null)
+                    def json = parseJson(result)
+                    assertEquals("Fail", json.Status)
+                    assertTrue(json.Message.contains("Unknown column 'c5'"))
+                }
+            }
         }
     }
 }
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_s3_load.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_s3_load.groovy
new file mode 100644
index 00000000000..533e954f867
--- /dev/null
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_s3_load.groovy
@@ -0,0 +1,110 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_partial_update_s3_load", "p0") {
+
+    def tableName = "test_partial_update_s3_load"
+    sql """ DROP TABLE IF EXISTS ${tableName} FORCE"""
+    sql """ CREATE TABLE IF NOT EXISTS ${tableName} (
+            `k1` int NOT NULL,
+            `c1` int,
+            `c2` int,
+            `c3` int,
+            `c4` int
+            )UNIQUE KEY(k1)
+        DISTRIBUTED BY HASH(k1) BUCKETS 1
+        PROPERTIES (
+            "disable_auto_compaction" = "true",
+            "replication_num" = "1"); """
+
+    sql "insert into ${tableName} values(1,1,1,1,1);"
+    sql "insert into ${tableName} values(2,2,2,2,2);"
+    sql "insert into ${tableName} values(3,3,3,3,3);"
+    sql "sync;"
+    qt_sql "select * from ${tableName} order by k1;"
+
+
+    def label = "test_pu" + UUID.randomUUID().toString().replace("-", "_")
+    logger.info("test_primary_key_partial_update, label: $label")
+    // 1,99
+    // 4,88
+    sql """
+    LOAD LABEL $label (
+        DATA 
INFILE("s3://${getS3BucketName()}/regression/unqiue_with_mow_p0/partial_update/row_s3_1.csv")
+        INTO TABLE ${tableName}
+        COLUMNS TERMINATED BY ","
+        (k1,c4)
+    ) WITH S3 (
+        "AWS_ACCESS_KEY" = "${getS3AK()}",
+        "AWS_SECRET_KEY" = "${getS3SK()}",
+        "AWS_ENDPOINT" = "${getS3Endpoint()}",
+        "AWS_REGION" = "${getS3Region()}",
+        "provider" = "${getS3Provider()}"
+    );
+    """
+    waitForBrokerLoadDone(label)
+    qt_sql "select * from ${tableName} order by k1;"
+
+
+    label = "test_pu" + UUID.randomUUID().toString().replace("-", "_")
+    logger.info("test_primary_key_partial_update, label: $label")
+    // 3,333
+    // 5,555
+    sql """
+    LOAD LABEL $label (
+        DATA 
INFILE("s3://${getS3BucketName()}/regression/unqiue_with_mow_p0/partial_update/row_s3_2.csv")
+        INTO TABLE ${tableName}
+        COLUMNS TERMINATED BY ","
+        (k1,c1)
+    ) WITH S3 (
+        "AWS_ACCESS_KEY" = "${getS3AK()}",
+        "AWS_SECRET_KEY" = "${getS3SK()}",
+        "AWS_ENDPOINT" = "${getS3Endpoint()}",
+        "AWS_REGION" = "${getS3Region()}",
+        "provider" = "${getS3Provider()}"
+    ) properties(
+        "partial_columns" = "false"
+    );
+    """
+    waitForBrokerLoadDone(label)
+    qt_sql "select * from ${tableName} order by k1;"
+
+
+    label = "test_pu" + UUID.randomUUID().toString().replace("-", "_")
+    logger.info("test_primary_key_partial_update, label: $label")
+    // 1,123,876
+    // 2,345,678
+    sql """
+    LOAD LABEL $label (
+        DATA 
INFILE("s3://${getS3BucketName()}/regression/unqiue_with_mow_p0/partial_update/pu_s3.csv")
+        INTO TABLE ${tableName}
+        COLUMNS TERMINATED BY ","
+        (k1,c2,c3)
+    )
+    WITH S3 (
+        "AWS_ACCESS_KEY" = "${getS3AK()}",
+        "AWS_SECRET_KEY" = "${getS3SK()}",
+        "AWS_ENDPOINT" = "${getS3Endpoint()}",
+        "AWS_REGION" = "${getS3Region()}",
+        "provider" = "${getS3Provider()}"
+    ) properties(
+        "partial_columns" = "true"
+    );
+    """
+    waitForBrokerLoadDone(label)
+    qt_sql "select * from ${tableName} order by k1;"
+}
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to