This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 74313c7d54 [feature-wip](autoinc)(step-3) add auto increment support 
for unique table (#22036)
74313c7d54 is described below

commit 74313c7d540765ac4f0b3ae70e7d17f348d3c33b
Author: bobhan1 <bh2444151...@outlook.com>
AuthorDate: Fri Jul 21 13:24:41 2023 +0800

    [feature-wip](autoinc)(step-3) add auto increment support for unique table 
(#22036)
---
 .../Create/CREATE-TABLE.md                         |   4 +-
 .../Create/CREATE-TABLE.md                         |   2 +-
 .../java/org/apache/doris/common/ErrorCode.java    |   4 +-
 .../apache/doris/datasource/InternalCatalog.java   |   4 +-
 .../data/data_model_p0/unique/auto_inc_basic.csv   |   9 +
 .../unique/auto_inc_partial_update1.csv            |   5 +
 .../unique/auto_inc_partial_update2.csv            |   5 +
 .../unique/auto_inc_update_inplace.csv             |   9 +
 .../data_model_p0/unique/auto_inc_with_null.csv    |   9 +
 .../unique/test_unique_table_auto_inc.out          | 140 ++++++++++
 .../aggregate/test_aggregate_table.groovy          |   2 +-
 .../data_model_p0/unique/test_unique_table.groovy  |  16 --
 .../unique/test_unique_table_auto_inc.groovy       | 289 +++++++++++++++++++++
 13 files changed, 474 insertions(+), 24 deletions(-)

diff --git 
a/docs/en/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
 
b/docs/en/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
index 04f0100b26..1d24ceb27e 100644
--- 
a/docs/en/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
+++ 
b/docs/en/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
@@ -117,9 +117,9 @@ Column definition list:
 
     * `AUTO_INCREMENT`(only avaliable in master branch)
 
-        To indicate if the column is a auto-increment column. Auto-increment 
column can be used to generate a unique identity for new row. If no values are 
assgined for auto-increment column when inserting, Doris will generate sequence 
numbers automatically. You can also assign the auto-increment column with NULL 
literal to indicate Doris to generate sequence numbers. It should be noted 
that, for performance reasons, BE will cache some values of auto-increment 
columns in memory. Therefor [...]
+        To indicate if the column is a auto-increment column. Auto-increment 
column can be used to generate a unique identity for new row. If no values are 
assgined for auto-increment column when inserting, Doris will generate sequence 
numbers automatically. You can also assign the auto-increment column with NULL 
literal to indicate Doris to generate sequence numbers. It should be noted 
that, for performance reasons, BE will cache some values of auto-increment 
column in memory. Therefore [...]
         A table can have at most one auto-incremnt column. The auto-increment 
column should be BIGINT type and be NOT NULL.
-        Currently, only table of duplicate model supports auto-increment 
column.
+        Both Duplicate model table and Unique model table support 
auto-increment column
 
     * `default_value`
 
diff --git 
a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
 
b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
index a0648af94d..d26b62f493 100644
--- 
a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
+++ 
b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md
@@ -110,7 +110,7 @@ distribution_desc
             
         
是否为自增列,自增列可以用来为新插入的行生成一个唯一标识。在插入表数据时如果没有指定自增列的值,则会自动生成一个合法的值。当自增列被显示地插入NULL时,其值也会被替换为生成的合法值。需要注意的是,处于性能考虑,BE会在内存中缓存部分自增列的值,所以自增列自动生成的值只能保证单调性和唯一性,无法保证严格的连续性。
         一张表中至多有一个列是自增列,自增列必须是BIGINT类型,且必须为NOT NULL。
-        目前只有duplicate模型支持自增列。
+        Duplicate模型表和Unique模型表均支持自增列。
 
   * `default_value`
         列默认值,当导入数据未指定该列的值时,系统将赋予该列default_value。
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/ErrorCode.java 
b/fe/fe-core/src/main/java/org/apache/doris/common/ErrorCode.java
index 04d7fbf7de..5a4806b12e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/ErrorCode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/ErrorCode.java
@@ -1203,8 +1203,8 @@ public enum ErrorCode {
     ERR_AUTO_INCREMENT_COLUMN_NOT_BIGINT_TYPE(5095, new byte[]{'4', '2', '0', 
'0', '0'},
             "the auto increment must be BIGINT type."),
 
-    ERR_AUTO_INCREMENT_COLUMN_NOT_INT_DUPLICATE_TABLE(5096, new byte[]{'4', 
'2', '0', '0', '0'},
-            "the auto increment is only supported in duplicate table.");
+    ERR_AUTO_INCREMENT_COLUMN_IN_AGGREGATE_TABLE(5096, new byte[]{'4', '2', 
'0', '0', '0'},
+            "the auto increment is only supported in duplicate table and 
unique table.");
 
     // This is error code
     private final int code;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
index 696dc37d59..8422758089 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
@@ -2692,8 +2692,8 @@ public class InternalCatalog implements 
CatalogIf<Database> {
                 }
             }
         }
-        if (encounterAutoIncColumn && type != KeysType.DUP_KEYS) {
-            
ErrorReport.reportDdlException(ErrorCode.ERR_AUTO_INCREMENT_COLUMN_NOT_INT_DUPLICATE_TABLE);
+        if (encounterAutoIncColumn && type == KeysType.AGG_KEYS) {
+            
ErrorReport.reportDdlException(ErrorCode.ERR_AUTO_INCREMENT_COLUMN_IN_AGGREGATE_TABLE);
         }
     }
 
diff --git a/regression-test/data/data_model_p0/unique/auto_inc_basic.csv 
b/regression-test/data/data_model_p0/unique/auto_inc_basic.csv
new file mode 100644
index 0000000000..d482056470
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/auto_inc_basic.csv
@@ -0,0 +1,9 @@
+Bob, 100
+Alice, 200
+Tom, 300
+Test, 400
+Carter, 500
+Smith, 600
+Beata, 700
+Doris, 800
+Nereids, 900
\ No newline at end of file
diff --git 
a/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv 
b/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv
new file mode 100644
index 0000000000..da1cfb4175
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv
@@ -0,0 +1,5 @@
+0, 123
+2, 323
+4, 523
+6, 723
+8, 923
\ No newline at end of file
diff --git 
a/regression-test/data/data_model_p0/unique/auto_inc_partial_update2.csv 
b/regression-test/data/data_model_p0/unique/auto_inc_partial_update2.csv
new file mode 100644
index 0000000000..1f9554bd4a
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/auto_inc_partial_update2.csv
@@ -0,0 +1,5 @@
+Bob, 9990
+Tom, 9992
+Carter, 9994
+Beata, 9996
+Nereids, 9998
\ No newline at end of file
diff --git 
a/regression-test/data/data_model_p0/unique/auto_inc_update_inplace.csv 
b/regression-test/data/data_model_p0/unique/auto_inc_update_inplace.csv
new file mode 100644
index 0000000000..8ac7970434
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/auto_inc_update_inplace.csv
@@ -0,0 +1,9 @@
+null, Bob, 100
+null, Alice, 200
+null, Tom, 300
+null, Test, 400
+0, Carter, 500
+1, Smith, 600
+2, Beata, 700
+3, Doris, 800
+4, Nereids, 900
\ No newline at end of file
diff --git a/regression-test/data/data_model_p0/unique/auto_inc_with_null.csv 
b/regression-test/data/data_model_p0/unique/auto_inc_with_null.csv
new file mode 100644
index 0000000000..14390ca3cf
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/auto_inc_with_null.csv
@@ -0,0 +1,9 @@
+null, Bob, 100
+null, Alice, 200
+null, Tom, 300
+null, Test, 400
+4, Carter, 500
+5, Smith, 600
+6, Beata, 700
+7, Doris, 800
+8, Nereids, 900
\ No newline at end of file
diff --git 
a/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out 
b/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out
new file mode 100644
index 0000000000..6409ff1958
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out
@@ -0,0 +1,140 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+0      Bob     100
+1      Alice   200
+2      Tom     300
+3      Test    400
+4      Carter  500
+5      Smith   600
+6      Beata   700
+7      Doris   800
+8      Nereids 900
+
+-- !sql --
+0      Bob     123
+1      Alice   200
+2      Tom     323
+3      Test    400
+4      Carter  523
+5      Smith   600
+6      Beata   700
+7      Doris   800
+8      Nereids 900
+
+-- !sql --
+0      Bob     100
+1      Alice   200
+2      Tom     300
+3      Test    400
+4      Carter  500
+5      Smith   600
+6      Beata   700
+7      Doris   800
+8      Nereids 900
+
+-- !sql --
+1      Alice   200
+3      Test    400
+5      Smith   600
+6      Beata   700
+7      Doris   800
+8      Nereids 900
+1230   Bob     100
+1232   Tom     300
+1234   Carter  500
+
+-- !sql --
+0       Bob    100
+1       Alice  200
+2       Tom    300
+3       Test   400
+4       Carter 500
+5       Smith  600
+6       Beata  700
+7       Doris  800
+8       Nereids        900
+
+-- !sql --
+0      Bob     123
+1       Alice  200
+2      Tom     323
+3       Test   400
+4      Carter  523
+5       Smith  600
+6       Beata  700
+7       Doris  800
+8       Nereids        900
+
+-- !update_inplace --
+0       Carter 500
+1       Smith  600
+2       Beata  700
+3       Doris  800
+4       Nereids        900
+
+-- !partial_update_key --
+0      Bob     100
+1      Alice   200
+2      Tom     300
+3      Test    400
+4      Carter  500
+5      Smith   600
+6      Beata   700
+7      Doris   800
+8      Nereids 900
+
+-- !partial_update_key --
+0      Bob     123
+1      Alice   200
+2      Tom     323
+3      Test    400
+4      Carter  523
+5      Smith   600
+6      Beata   723
+7      Doris   800
+8      Nereids 923
+
+-- !partial_update_value --
+Bob    100     0
+Alice  200     1
+Tom    300     2
+Test   400     3
+Carter 500     4
+Smith  600     5
+Beata  700     6
+Doris  800     7
+Nereids        900     8
+
+-- !partial_update_value --
+Alice  200     1
+Test   400     3
+Smith  600     5
+Doris  800     7
+Bob    100     9990
+Tom    300     9992
+Carter 500     9994
+Beata  700     9996
+Nereids        900     9998
+
+-- !partial_update_value --
+Bob    100     0
+Alice  200     1
+Tom    300     2
+Test   400     3
+Carter 500     4
+Smith  600     5
+Beata  700     6
+Doris  800     7
+Nereids        900     8
+
+-- !partial_update_value --
+Bob    9990    0
+Alice  200     1
+Tom    9992    2
+Test   400     3
+Carter 9994    4
+Smith  600     5
+Beata  9996    6
+Doris  800     7
+Nereids        9998    8
+
diff --git 
a/regression-test/suites/data_model_p0/aggregate/test_aggregate_table.groovy 
b/regression-test/suites/data_model_p0/aggregate/test_aggregate_table.groovy
index 463b548bb5..f2de06bd7d 100644
--- a/regression-test/suites/data_model_p0/aggregate/test_aggregate_table.groovy
+++ b/regression-test/suites/data_model_p0/aggregate/test_aggregate_table.groovy
@@ -115,7 +115,7 @@ suite("test_aggregate_table") {
             AGGREGATE KEY(k)
             DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = 
"1");
         """
-        exception "the auto increment is only supported in duplicate table."
+        exception "the auto increment is only supported in duplicate table and 
unique table."
     }
     // sql "drop database ${dbName}"
 }
diff --git 
a/regression-test/suites/data_model_p0/unique/test_unique_table.groovy 
b/regression-test/suites/data_model_p0/unique/test_unique_table.groovy
index f3fdc71bde..eb9bd2b57d 100644
--- a/regression-test/suites/data_model_p0/unique/test_unique_table.groovy
+++ b/regression-test/suites/data_model_p0/unique/test_unique_table.groovy
@@ -42,20 +42,4 @@ suite("test_unique_table") {
     order_qt_select_uniq_table "select * from ${tbName}"
     qt_desc_uniq_table "desc ${tbName}"
     sql "DROP TABLE ${tbName}"
-
-    def table_auto_inc = "test_aggregate_tab_with_auto_inc_col"
-    sql "drop table if exists ${table_auto_inc}"
-    test {
-        sql """
-            CREATE TABLE IF NOT EXISTS ${table_auto_inc} (
-                k BIGINT NOT NULL AUTO_INCREMENT,
-                int_value int,
-                char_value char(10),
-                date_value date
-            )
-            UNIQUE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = 
"1");
-        """
-        exception "the auto increment is only supported in duplicate table."
-    }
 }
diff --git 
a/regression-test/suites/data_model_p0/unique/test_unique_table_auto_inc.groovy 
b/regression-test/suites/data_model_p0/unique/test_unique_table_auto_inc.groovy
new file mode 100644
index 0000000000..cdfac6d00c
--- /dev/null
+++ 
b/regression-test/suites/data_model_p0/unique/test_unique_table_auto_inc.groovy
@@ -0,0 +1,289 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_unique_table_auto_inc") {
+    
+    // auto-increment column is key
+    def table1 = "test_unique_tab_auto_inc_col_basic_key"
+    sql "drop table if exists ${table1}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table1}` (
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`id`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`id`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table1}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, value'
+
+        file 'auto_inc_basic.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_sql "select * from ${table1};"
+    sql """ insert into ${table1} values(0, "Bob", 123), (2, "Tom", 323), (4, 
"Carter", 523);"""
+    qt_sql "select * from ${table1} order by id"
+    sql "drop table if exists ${table1};"
+
+    // auto-increment column is value
+    def table2 = "test_unique_tab_auto_inc_col_basic_value"
+    sql "drop table if exists ${table2}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table2}` (
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分",
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`name`, `value`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`name`, `value`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table2}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, value'
+
+        file 'auto_inc_basic.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_sql "select id, name, value from ${table2} order by id;"
+    sql """ insert into ${table2} values("Bob", 100, 1230), ("Tom", 300, 
1232), ("Carter", 500, 1234);"""
+    qt_sql "select id, name, value from ${table2} order by id;"
+    sql "drop table if exists ${table2};"
+
+    // auto inc key with null values
+    def table3 = "test_unique_tab_auto_inc_col_key_with_null"
+    sql "drop table if exists ${table3}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table3}` (
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`id`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`id`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table3}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'id, name, value'
+
+        file 'auto_inc_with_null.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_sql "select * from ${table3};"
+    sql """ insert into ${table3} values(0, "Bob", 123), (2, "Tom", 323), (4, 
"Carter", 523);"""
+    qt_sql "select * from ${table3} order by id"
+    sql "drop table if exists ${table3};"
+
+    // dircetly update rows in one batch
+    def table4 = "test_unique_tab_auto_inc_col_key_with_null"
+    sql "drop table if exists ${table4}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table4}` (
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`id`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`id`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table4}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'id, name, value'
+
+        file 'auto_inc_update_inplace.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_update_inplace "select * from ${table4};"
+    sql "drop table if exists ${table4};"
+
+    // test for partial update, auto inc col is key
+    def table5 = "test_unique_tab_auto_inc_col_key_partial_update"
+    sql "drop table if exists ${table5}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table5}` (
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`id`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`id`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table5}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, value'
+
+        file 'auto_inc_basic.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_partial_update_key "select * from ${table5} order by id;"
+
+    streamLoad {
+        table "${table5}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'id, value'
+        set 'partial_columns', 'true'
+
+        file 'auto_inc_partial_update1.csv'
+        time 10000
+    }
+    qt_partial_update_key "select * from ${table5} order by id;"
+    sql "drop table if exists ${table5};"
+
+    // test for partial update, auto inc col is value, update auto inc col
+    def table6 = "test_unique_tab_auto_inc_col_value_partial_update"
+    sql "drop table if exists ${table6}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table6}` (
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分",
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`name`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`name`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table6}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, value'
+
+        file 'auto_inc_basic.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_partial_update_value "select * from ${table6} order by id;"
+
+    streamLoad {
+        table "${table6}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, id'
+        set 'partial_columns', 'true'
+
+        file 'auto_inc_partial_update2.csv'
+        time 10000
+    }
+    qt_partial_update_value "select * from ${table6} order by id;"
+    sql "drop table if exists ${table6};"
+
+    // test for partial update, auto inc col is value, update other col
+    def table7 = "test_unique_tab_auto_inc_col_value_partial_update"
+    sql "drop table if exists ${table7}"
+    sql """
+        CREATE TABLE IF NOT EXISTS `${table7}` (
+          `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+          `value` int(11) NOT NULL COMMENT "用户得分",
+          `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID"
+        ) ENGINE=OLAP
+        UNIQUE KEY(`name`)
+        COMMENT "OLAP"
+        DISTRIBUTED BY HASH(`name`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "in_memory" = "false",
+        "storage_format" = "V2",
+        "enable_unique_key_merge_on_write" = "true"
+        )
+    """
+    streamLoad {
+        table "${table7}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, value'
+
+        file 'auto_inc_basic.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_partial_update_value "select * from ${table7} order by id;"
+
+    streamLoad {
+        table "${table7}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'name, value'
+        set 'partial_columns', 'true'
+
+        file 'auto_inc_partial_update2.csv'
+        time 10000
+    }
+    qt_partial_update_value "select * from ${table7} order by id;"
+    sql "drop table if exists ${table7};"
+}
+


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to