This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.0 by this push:
     new 91371eaa156 [branch-2.0-pick] "[Fix](merge-on-write) Fix some bugs 
about sequence column #24915" (#25757)
91371eaa156 is described below

commit 91371eaa156369c92e65bfb39713cfc11b974795
Author: bobhan1 <bh2444151...@outlook.com>
AuthorDate: Tue Oct 24 11:36:44 2023 +0800

    [branch-2.0-pick] "[Fix](merge-on-write) Fix some bugs about sequence 
column #24915" (#25757)
---
 .../update-delete/sequence-column-manual.md        |   2 +-
 .../update-delete/sequence-column-manual.md        |   2 +-
 .../apache/doris/alter/SchemaChangeHandler.java    |   5 +
 .../apache/doris/analysis/NativeInsertStmt.java    |  19 ++++
 .../doris/nereids/rules/analysis/BindSink.java     |  34 +++++-
 .../doris/planner/external/LoadScanProvider.java   |  14 ++-
 .../insert_into_table/partial_update_seq_col.out}  |   9 +-
 ...t_partial_update_insert_light_schema_change.out |   4 -
 .../test_partial_update_insert_schema_change.out   |   4 -
 ....out => test_partial_update_insert_seq_col.out} |   9 +-
 .../partial_update/test_partial_update_seq_col.out |   5 +
 .../suites/nereids_p0/delete/delete_cte.groovy     |   1 -
 .../suites/nereids_p0/delete/delete_using.groovy   |   1 -
 .../insert_into_table/complex_insert.groovy        |   3 +-
 .../partial_update_seq_col.groovy                  | 115 +++++++++++++++++++++
 ...artial_update_insert_light_schema_change.groovy |  20 +---
 ...test_partial_update_insert_schema_change.groovy |  20 +---
 .../test_partial_update_insert_seq_col.groovy      | 114 ++++++++++++++++++++
 .../test_partial_update_seq_col.groovy             |  31 ++++++
 19 files changed, 354 insertions(+), 58 deletions(-)

diff --git a/docs/en/docs/data-operate/update-delete/sequence-column-manual.md 
b/docs/en/docs/data-operate/update-delete/sequence-column-manual.md
index 3517d0d9b94..937b71ffe99 100644
--- a/docs/en/docs/data-operate/update-delete/sequence-column-manual.md
+++ b/docs/en/docs/data-operate/update-delete/sequence-column-manual.md
@@ -249,7 +249,7 @@ MySQL [test]> select * from test_table;
 At this point, you can replace the original data in the table. To sum up, the 
sequence column will be compared among all the batches, the largest value of 
the same key will be imported into Doris table.
 
 ## Note
-1. To prevent misuse, in load tasks like StreamLoad/BrokerLoad, user must 
specify the sequence column; otherwise, user will receive the following error 
message:
+1. To prevent misuse, in load tasks like StreamLoad/BrokerLoad and row updates 
with insert statements, user must explicitly specify the sequence column 
(unless the sequence column's default value is CURRENT_TIMESTAMP); otherwise, 
user will receive the following error message:
 ```
 Table test_tbl has a sequence column, need to specify the sequence column
 ```
diff --git 
a/docs/zh-CN/docs/data-operate/update-delete/sequence-column-manual.md 
b/docs/zh-CN/docs/data-operate/update-delete/sequence-column-manual.md
index 709a52efdbb..6b85f333601 100644
--- a/docs/zh-CN/docs/data-operate/update-delete/sequence-column-manual.md
+++ b/docs/zh-CN/docs/data-operate/update-delete/sequence-column-manual.md
@@ -269,7 +269,7 @@ MySQL [test]> select * from test_table;
 此时就可以替换表中原有的数据。综上,在导入过程中,会比较所有批次的sequence列值,选择值最大的记录导入Doris表中。
 
 ## 注意
-1. 为防止误用,在StreamLoad/BrokerLoad等导入任务中,必须要指定sequence列,不然会收到以下报错信息:
+1. 
为防止误用,在StreamLoad/BrokerLoad等导入任务以及行更新insert语句中,用户必须显示指定sequence列(除非sequence列的默认值为CURRENT_TIMESTAMP),不然会收到以下报错信息:
 ```
 Table test_tbl has sequence column, need to specify the sequence column
 ```
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java 
b/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java
index 9b5a232c96b..3aa8e7888e4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java
@@ -323,6 +323,11 @@ public class SchemaChangeHandler extends AlterHandler {
                 throw new DdlException("Can not drop key column in Unique data 
model table");
             }
 
+            if (olapTable.hasSequenceCol() && 
dropColName.equalsIgnoreCase(olapTable.getSequenceMapCol())) {
+                throw new DdlException("Can not drop sequence mapping column[" 
+ dropColName
+                        + "] in Unique data model table[" + 
olapTable.getName() + "]");
+            }
+
         } else if (KeysType.AGG_KEYS == olapTable.getKeysType()) {
             if (null == targetIndexName) {
                 // drop column in base table
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java
index 06d8a55187f..99e6cb8084a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java
@@ -18,6 +18,7 @@
 package org.apache.doris.analysis;
 
 import org.apache.doris.alter.SchemaChangeHandler;
+import org.apache.doris.analysis.ColumnDef.DefaultValue;
 import org.apache.doris.catalog.BrokerTable;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.Database;
@@ -72,6 +73,7 @@ import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -389,6 +391,23 @@ public class NativeInsertStmt extends InsertStmt {
                     targetPartitionIds.add(part.getId());
                 }
             }
+
+            if (olapTable.hasSequenceCol() && olapTable.getSequenceMapCol() != 
null && targetColumnNames != null) {
+                Optional<String> foundCol = targetColumnNames.stream()
+                            .filter(c -> 
c.equalsIgnoreCase(olapTable.getSequenceMapCol())).findAny();
+                Optional<Column> seqCol = olapTable.getFullSchema().stream()
+                                .filter(col -> 
col.getName().equals(olapTable.getSequenceMapCol()))
+                                .findFirst();
+                if (seqCol.isPresent() && !foundCol.isPresent() && 
!isPartialUpdate && !isFromDeleteOrUpdateStmt
+                        && 
!analyzer.getContext().getSessionVariable().isEnableUniqueKeyPartialUpdate()) {
+                    if (seqCol.get().getDefaultValue() == null
+                                    || 
!seqCol.get().getDefaultValue().equals(DefaultValue.CURRENT_TIMESTAMP)) {
+                        throw new AnalysisException("Table " + 
olapTable.getName()
+                                + " has sequence column, need to specify the 
sequence column");
+                    }
+                }
+            }
+
             if (isPartialUpdate && olapTable.hasSequenceCol() && 
olapTable.getSequenceMapCol() != null
                     && 
partialUpdateCols.contains(olapTable.getSequenceMapCol())) {
                 partialUpdateCols.add(Column.SEQUENCE_COL);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
index 2454c635e13..094d6e38d28 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
@@ -17,6 +17,7 @@
 
 package org.apache.doris.nereids.rules.analysis;
 
+import org.apache.doris.analysis.ColumnDef.DefaultValue;
 import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.Database;
@@ -54,6 +55,7 @@ import com.google.common.collect.Maps;
 
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.stream.Collectors;
 
 /**
@@ -100,6 +102,25 @@ public class BindSink implements AnalysisRuleFactory {
                                         "insert into cols should be 
corresponding to the query output");
                             }
 
+                            try {
+                                if (table.hasSequenceCol() && 
table.getSequenceMapCol() != null
+                                            && !sink.getColNames().isEmpty() 
&& !boundSink.isPartialUpdate()) {
+                                    Column seqCol = 
table.getFullSchema().stream()
+                                                    .filter(col -> 
col.getName().equals(table.getSequenceMapCol()))
+                                                    .findFirst().get();
+                                    Optional<String> foundCol = 
sink.getColNames().stream()
+                                                    .filter(col -> 
col.equals(table.getSequenceMapCol()))
+                                                    .findFirst();
+                                    if (!foundCol.isPresent() && 
(seqCol.getDefaultValue() == null
+                                            || 
!seqCol.getDefaultValue().equals(DefaultValue.CURRENT_TIMESTAMP))) {
+                                        throw new AnalysisException("Table " + 
table.getName()
+                                            + " has sequence column, need to 
specify the sequence column");
+                                    }
+                                }
+                            } catch (Exception e) {
+                                throw new AnalysisException(e.getMessage(), 
e.getCause());
+                            }
+
                             Map<Column, NamedExpression> columnToChildOutput = 
Maps.newHashMap();
                             for (int i = 0; i < boundSink.getCols().size(); 
++i) {
                                 
columnToChildOutput.put(boundSink.getCols().get(i), child.getOutput().get(i));
@@ -137,10 +158,17 @@ public class BindSink implements AnalysisRuleFactory {
                                         if (table.hasSequenceCol()
                                                 && 
column.getName().equals(Column.SEQUENCE_COL)
                                                 && table.getSequenceMapCol() 
!= null) {
-                                            Column seqCol = 
table.getBaseSchema(true).stream()
+                                            Optional<Column> seqCol = 
table.getFullSchema().stream()
                                                     .filter(col -> 
col.getName().equals(table.getSequenceMapCol()))
-                                                    .findFirst().get();
-                                            
columnToOutput.put(column.getName(), columnToOutput.get(seqCol.getName()));
+                                                    .findFirst();
+                                            if (!seqCol.isPresent()) {
+                                                throw new 
AnalysisException("sequence column is not contained in"
+                                                        + " target table " + 
table.getName());
+                                            }
+                                            if 
(columnToOutput.get(seqCol.get().getName()) != null) {
+                                                
columnToOutput.put(column.getName(),
+                                                        
columnToOutput.get(seqCol.get().getName()));
+                                            }
                                         } else if (sink.isPartialUpdate()) {
                                             // If the current load is a 
partial update, the values of unmentioned
                                             // columns will be filled in 
SegmentWriter. And the output of sink node
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/external/LoadScanProvider.java
 
b/fe/fe-core/src/main/java/org/apache/doris/planner/external/LoadScanProvider.java
index 52bb119d7a7..872739313ab 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/planner/external/LoadScanProvider.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/planner/external/LoadScanProvider.java
@@ -18,6 +18,7 @@
 package org.apache.doris.planner.external;
 
 import org.apache.doris.analysis.Analyzer;
+import org.apache.doris.analysis.ColumnDef.DefaultValue;
 import org.apache.doris.analysis.ImportColumnDesc;
 import org.apache.doris.analysis.IntLiteral;
 import org.apache.doris.analysis.SlotRef;
@@ -188,7 +189,8 @@ public class LoadScanProvider {
         // add columnExpr for sequence column
         TableIf targetTable = getTargetTable();
         if (targetTable instanceof OlapTable && ((OlapTable) 
targetTable).hasSequenceCol()) {
-            String sequenceCol = ((OlapTable) targetTable).getSequenceMapCol();
+            OlapTable olapTable = (OlapTable) targetTable;
+            String sequenceCol = olapTable.getSequenceMapCol();
             if (sequenceCol != null) {
                 String finalSequenceCol = sequenceCol;
                 Optional<ImportColumnDesc> foundCol = 
columnDescs.descs.stream()
@@ -199,8 +201,14 @@ public class LoadScanProvider {
                     columnDescs.descs.add(new 
ImportColumnDesc(Column.SEQUENCE_COL,
                             new SlotRef(null, sequenceCol)));
                 } else if (!fileGroupInfo.isPartialUpdate()) {
-                    throw new UserException("Table " + targetTable.getName()
-                            + " has sequence column, need to specify the 
sequence column");
+                    Column seqCol = olapTable.getFullSchema().stream()
+                                    .filter(col -> 
col.getName().equals(olapTable.getSequenceMapCol()))
+                                    .findFirst().get();
+                    if (seqCol.getDefaultValue() == null
+                                    || 
!seqCol.getDefaultValue().equals(DefaultValue.CURRENT_TIMESTAMP)) {
+                        throw new UserException("Table " + olapTable.getName()
+                                + " has sequence column, need to specify the 
sequence column");
+                    }
                 }
             } else {
                 sequenceCol = context.fileGroup.getSequenceCol();
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
 b/regression-test/data/nereids_p0/insert_into_table/partial_update_seq_col.out
similarity index 82%
copy from 
regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
copy to 
regression-test/data/nereids_p0/insert_into_table/partial_update_seq_col.out
index 2e108324765..01b4341d54b 100644
--- 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
+++ 
b/regression-test/data/nereids_p0/insert_into_table/partial_update_seq_col.out
@@ -10,9 +10,14 @@
 -- !partial_update_with_seq --
 1      doris   200     123     1       2023-01-01
 2      doris2  2600    223     1       2023-07-20
-3      unknown 1500    \N      4321    2022-07-20
+3      unknown 2500    \N      4321    2022-07-18
 
 -- !partial_update_with_seq_hidden_columns --
 1      doris   200     123     1       2023-01-01      0       3       
2023-01-01
 2      doris2  2600    223     1       2023-07-20      0       4       
2023-07-20
-3      unknown 1500    \N      4321    2022-07-20      0       4       
2022-07-20
+3      unknown 2500    \N      4321    2022-07-18      0       4       
2022-07-18
+
+-- !sql --
+1      200
+2      400
+
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.out
index 81cd9a99ef3..d7e0e49e58b 100644
--- 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.out
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.out
@@ -18,10 +18,6 @@
 1      10      10      10
 2      20      20      20
 
--- !delete_seq_col_2 --
-1      111     10
-2      222     20
-
 -- !update_value_col_1 --
 1      0       0       0       0       0       0       0       0       0
 
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.out
index 81cd9a99ef3..d7e0e49e58b 100644
--- 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.out
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.out
@@ -18,10 +18,6 @@
 1      10      10      10
 2      20      20      20
 
--- !delete_seq_col_2 --
-1      111     10
-2      222     20
-
 -- !update_value_col_1 --
 1      0       0       0       0       0       0       0       0       0
 
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_seq_col.out
similarity index 82%
copy from 
regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
copy to 
regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_seq_col.out
index 2e108324765..01b4341d54b 100644
--- 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_insert_seq_col.out
@@ -10,9 +10,14 @@
 -- !partial_update_with_seq --
 1      doris   200     123     1       2023-01-01
 2      doris2  2600    223     1       2023-07-20
-3      unknown 1500    \N      4321    2022-07-20
+3      unknown 2500    \N      4321    2022-07-18
 
 -- !partial_update_with_seq_hidden_columns --
 1      doris   200     123     1       2023-01-01      0       3       
2023-01-01
 2      doris2  2600    223     1       2023-07-20      0       4       
2023-07-20
-3      unknown 1500    \N      4321    2022-07-20      0       4       
2022-07-20
+3      unknown 2500    \N      4321    2022-07-18      0       4       
2022-07-18
+
+-- !sql --
+1      200
+2      400
+
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
index 2e108324765..a789f18216e 100644
--- 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_seq_col.out
@@ -16,3 +16,8 @@
 1      doris   200     123     1       2023-01-01      0       3       
2023-01-01
 2      doris2  2600    223     1       2023-07-20      0       4       
2023-07-20
 3      unknown 1500    \N      4321    2022-07-20      0       4       
2022-07-20
+
+-- !sql --
+1      200
+2      400
+
diff --git a/regression-test/suites/nereids_p0/delete/delete_cte.groovy 
b/regression-test/suites/nereids_p0/delete/delete_cte.groovy
index bebbf035491..1fc61b7ca5e 100644
--- a/regression-test/suites/nereids_p0/delete/delete_cte.groovy
+++ b/regression-test/suites/nereids_p0/delete/delete_cte.groovy
@@ -33,7 +33,6 @@ suite('nereids_delete_cte') {
         distributed by hash(id, id1)
         properties(
             "replication_num"="1",
-            "function_column.sequence_col" = "c4",
             "enable_unique_key_merge_on_write" = "true"
         );
     """
diff --git a/regression-test/suites/nereids_p0/delete/delete_using.groovy 
b/regression-test/suites/nereids_p0/delete/delete_using.groovy
index de50ea87679..bc5a29430c5 100644
--- a/regression-test/suites/nereids_p0/delete/delete_using.groovy
+++ b/regression-test/suites/nereids_p0/delete/delete_using.groovy
@@ -29,7 +29,6 @@ suite('nereids_delete_using') {
         distributed by hash(id, id1)
         properties(
             'replication_num'='1',
-            "function_column.sequence_col" = "c4",
             "enable_unique_key_merge_on_write" = "true"
         );
     '''
diff --git 
a/regression-test/suites/nereids_p0/insert_into_table/complex_insert.groovy 
b/regression-test/suites/nereids_p0/insert_into_table/complex_insert.groovy
index cf6168c71a2..91160335063 100644
--- a/regression-test/suites/nereids_p0/insert_into_table/complex_insert.groovy
+++ b/regression-test/suites/nereids_p0/insert_into_table/complex_insert.groovy
@@ -125,8 +125,7 @@ suite('complex_insert') {
         ) unique key (id, id1)
         distributed by hash(id, id1) buckets 13
         properties(
-            'replication_num'='1',
-            "function_column.sequence_col" = "c4"
+            'replication_num'='1'
         );
     '''
 
diff --git 
a/regression-test/suites/nereids_p0/insert_into_table/partial_update_seq_col.groovy
 
b/regression-test/suites/nereids_p0/insert_into_table/partial_update_seq_col.groovy
new file mode 100644
index 00000000000..7d4190852f6
--- /dev/null
+++ 
b/regression-test/suites/nereids_p0/insert_into_table/partial_update_seq_col.groovy
@@ -0,0 +1,115 @@
+
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("nereids_partial_update_native_insert_seq_col", "p0") {
+    sql "set enable_nereids_dml=true;"
+    sql "set experimental_enable_nereids_planner=true;"
+    sql "set enable_fallback_to_original_planner=false;"
+    sql "sync;"
+
+    def tableName = "nereids_partial_update_native_insert_seq_col"
+    sql """ DROP TABLE IF EXISTS ${tableName} """
+    sql """
+            CREATE TABLE ${tableName} (
+                `id` int(11) NOT NULL COMMENT "用户 ID",
+                `name` varchar(65533) DEFAULT "unknown" COMMENT "用户姓名",
+                `score` int(11) NOT NULL COMMENT "用户得分",
+                `test` int(11) NULL COMMENT "null test",
+                `dft` int(11) DEFAULT "4321",
+                `update_time` date NULL)
+            UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES(
+                "replication_num" = "1",
+                "enable_unique_key_merge_on_write" = "true",
+                "function_column.sequence_col" = "update_time"
+            )
+    """
+    sql """ insert into ${tableName} values
+            (2, "doris2", 2000, 223, 1, '2023-01-01'),
+            (1, "doris", 1000, 123, 1, '2023-01-01');"""
+    sql "sync"
+
+    qt_select_default """ select * from ${tableName} order by id;"""
+
+    // set enable_unique_key_partial_update=false, it's a row update
+    // the input data don't contains sequence mapping column and the sequence 
mapping
+    // column's default value is not CURRENT_TIMESTAMP, will load fail
+    test {
+        sql "insert into ${tableName}(id,score) values(2,400),(1,200);"
+        exception "Table nereids_partial_update_native_insert_seq_col has 
sequence column, need to specify the sequence column"
+    }
+
+    // set enable_unique_key_partial_update=true, should success
+    // we don't provide the sequence column in input data, so the updated rows
+    // should use there original sequence column values.
+    sql "set enable_unique_key_partial_update=true;"
+    sql "sync;"
+    sql "insert into ${tableName}(id,score) values(2,400),(1,200);"
+    sql "set enable_unique_key_partial_update=false;"
+    sql "sync;"
+
+    qt_partial_update_without_seq """ select * from ${tableName} order by 
id;"""
+
+    // provide the sequence column this time, should update according to the
+    // given sequence values
+    sql "set enable_unique_key_partial_update=true;"
+    sql "set enable_insert_strict = false;"
+    sql "sync;"
+    sql """insert into ${tableName}(id,score,update_time) values
+                (2,2500,"2023-07-19"),
+                (2,2600,"2023-07-20"),
+                (1,1300,"2022-07-19"),
+                (3,1500,"2022-07-20"),
+                (3,2500,"2022-07-18");"""
+    sql "set enable_unique_key_partial_update=false;"
+    sql "sync;"
+
+    qt_partial_update_with_seq """ select * from ${tableName} order by id;"""
+
+    sql "SET show_hidden_columns=true"
+    sql "sync"
+
+    qt_partial_update_with_seq_hidden_columns """select * from ${tableName} 
order by id;"""
+
+    sql """ DROP TABLE IF EXISTS ${tableName} """
+
+
+    def tableName2 = "nereids_partial_update_native_insert_seq_col2"
+    sql """ DROP TABLE IF EXISTS ${tableName2} """
+    sql """
+            CREATE TABLE ${tableName2} (
+                `id` int(11) NOT NULL COMMENT "用户 ID",
+                `score` int(11) NOT NULL COMMENT "用户得分",
+                `update_time` DATETIMEV2 NULL DEFAULT CURRENT_TIMESTAMP)
+            UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES(
+                "replication_num" = "1",
+                "enable_unique_key_merge_on_write" = "true",
+                "function_column.sequence_col" = "update_time"
+            )""" 
+    
+    // don't set enable_unique_key_partial_update, it's a row update
+    // the input data don't contains sequence mapping column but the sequence 
mapping
+    // column's default value is CURRENT_TIMESTAMP, will load successfully
+    sql "SET show_hidden_columns=false"
+    sql "set enable_unique_key_partial_update=false;"
+    sql "sync;"
+    sql "insert into ${tableName2}(id,score) values(2,400),(1,200);"
+    qt_sql """ select id,score from ${tableName2} order by id;"""
+    sql """ DROP TABLE IF EXISTS ${tableName2}; """
+}
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.groovy
index fe8a471696f..e93f7fa46be 100644
--- 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.groovy
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_light_schema_change.groovy
@@ -150,24 +150,10 @@ suite("test_partial_update_insert_light_schema_change", 
"p0") {
     qt_delete_seq_col_1 "select * from ${tableName} order by k;"
 
     // schema change
-    sql " ALTER table ${tableName} DROP COLUMN c;"
-    try_times=100
-    while(true){
-        def res = sql " SHOW ALTER TABLE COLUMN WHERE TableName = 
'${tableName}' ORDER BY CreateTime DESC LIMIT 1 "
-        Thread.sleep(1200)
-        if(res[0][9].toString() == "FINISHED"){
-            break;
-        }
-        assert(try_times>0)
-        try_times--
+    test {
+        sql " ALTER table ${tableName} DROP COLUMN c;"
+        exception "Can not drop sequence mapping column[c] in Unique data 
model table[${tableName}]"
     }
-    sql "sync"
-
-    sql "set enable_unique_key_partial_update=true;"
-    sql "insert into ${tableName}(k,v1) values(2,222),(1,111);"
-    sql "set enable_unique_key_partial_update=false;"
-    sql "sync"
-    qt_delete_seq_col_2 "select * from ${tableName} order by k;" 
 
     // test update value column
     tableName = "test_partial_update_insert_light_schema_change_update_column"
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.groovy
index 1229fcf4d3e..61f07c8e10f 100644
--- 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.groovy
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_schema_change.groovy
@@ -150,24 +150,10 @@ suite("test_partial_update_insert_schema_change", "p0") {
     qt_delete_seq_col_1 "select * from ${tableName} order by k;"
 
     // schema change
-    sql " ALTER table ${tableName} DROP COLUMN c;"
-    try_times=100
-    while(true){
-        def res = sql " SHOW ALTER TABLE COLUMN WHERE TableName = 
'${tableName}' ORDER BY CreateTime DESC LIMIT 1 "
-        Thread.sleep(1200)
-        if(res[0][9].toString() == "FINISHED"){
-            break;
-        }
-        assert(try_times>0)
-        try_times--
+    test {
+        sql " ALTER table ${tableName} DROP COLUMN c;"
+        exception "Can not drop sequence mapping column[c] in Unique data 
model table[${tableName}]"
     }
-    sql "sync"
-
-    sql "set enable_unique_key_partial_update=true;"
-    sql "insert into ${tableName}(k,v1) values(2,222),(1,111);"
-    sql "set enable_unique_key_partial_update=false;"
-    sql "sync"
-    qt_delete_seq_col_2 "select * from ${tableName} order by k;" 
 
     // test update value column
     tableName = "test_partial_update_insert_schema_change_update_column"
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_seq_col.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_seq_col.groovy
new file mode 100644
index 00000000000..0f80940ec33
--- /dev/null
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_insert_seq_col.groovy
@@ -0,0 +1,114 @@
+
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_partial_update_native_insert_seq_col_old_planner", "p0") {
+    sql "set enable_nereids_dml=false;"
+    sql "set experimental_enable_nereids_planner=false;"
+    sql "set enable_fallback_to_original_planner=true;"
+    sql "sync;"
+
+    def tableName = "test_partial_update_native_insert_seq_col_old_planner"
+    sql """ DROP TABLE IF EXISTS ${tableName} """
+    sql """
+            CREATE TABLE ${tableName} (
+                `id` int(11) NOT NULL COMMENT "用户 ID",
+                `name` varchar(65533) DEFAULT "unknown" COMMENT "用户姓名",
+                `score` int(11) NOT NULL COMMENT "用户得分",
+                `test` int(11) NULL COMMENT "null test",
+                `dft` int(11) DEFAULT "4321",
+                `update_time` date NULL)
+            UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES(
+                "replication_num" = "1",
+                "enable_unique_key_merge_on_write" = "true",
+                "function_column.sequence_col" = "update_time"
+            )
+    """
+    sql """ insert into ${tableName} values
+            (2, "doris2", 2000, 223, 1, '2023-01-01'),
+            (1, "doris", 1000, 123, 1, '2023-01-01');"""
+    sql "sync"
+
+    qt_select_default """ select * from ${tableName} order by id;"""
+
+    // don't set partial update header, it's a row update streamload
+    // the input data don't contains sequence mapping column, will load fail
+    test {
+        sql "insert into ${tableName}(id,score) values(2,400),(1,200);"
+        exception "Table test_partial_update_native_insert_seq_col_old_planner 
has sequence column, need to specify the sequence column"
+    }
+
+    // set partial update header, should success
+    // we don't provide the sequence column in input data, so the updated rows
+    // should use there original sequence column values.
+    sql "set enable_unique_key_partial_update=true;"
+    sql "sync;"
+    sql "insert into ${tableName}(id,score) values(2,400),(1,200);"
+    sql "set enable_unique_key_partial_update=false;"
+    sql "sync;"
+
+    qt_partial_update_without_seq """ select * from ${tableName} order by 
id;"""
+
+    // provide the sequence column this time, should update according to the
+    // given sequence values
+    sql "set enable_unique_key_partial_update=true;"
+    sql "set enable_insert_strict = false;"
+    sql "sync;"
+    sql """insert into ${tableName}(id,score,update_time) values
+                (2,2500,"2023-07-19"),
+                (2,2600,"2023-07-20"),
+                (1,1300,"2022-07-19"),
+                (3,1500,"2022-07-20"),
+                (3,2500,"2022-07-18");"""
+    sql "set enable_unique_key_partial_update=false;"
+    sql "sync;"
+
+    qt_partial_update_with_seq """ select * from ${tableName} order by id;"""
+
+    sql "SET show_hidden_columns=true"
+    sql "sync"
+
+    qt_partial_update_with_seq_hidden_columns """select * from ${tableName} 
order by id;"""
+
+    sql """ DROP TABLE IF EXISTS ${tableName} """
+
+
+    def tableName2 = "nereids_partial_update_native_insert_seq_col2"
+    sql """ DROP TABLE IF EXISTS ${tableName2} """
+    sql """
+            CREATE TABLE ${tableName2} (
+                `id` int(11) NOT NULL COMMENT "用户 ID",
+                `score` int(11) NOT NULL COMMENT "用户得分",
+                `update_time` DATETIMEV2 NULL DEFAULT CURRENT_TIMESTAMP)
+            UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES(
+                "replication_num" = "1",
+                "enable_unique_key_merge_on_write" = "true",
+                "function_column.sequence_col" = "update_time"
+            )""" 
+    
+    // don't set enable_unique_key_partial_update, it's a row update
+    // the input data don't contains sequence mapping column but the sequence 
mapping
+    // column's default value is CURRENT_TIMESTAMP, will load successfully
+    sql "SET show_hidden_columns=false"
+    sql "set enable_unique_key_partial_update=false;"
+    sql "sync;"
+    sql "insert into ${tableName2}(id,score) values(2,400),(1,200);"
+    qt_sql """ select id,score from ${tableName2} order by id;"""
+    sql """ DROP TABLE IF EXISTS ${tableName2}; """
+}
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_seq_col.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_seq_col.groovy
index 06a066fc0a9..4a99f2a7700 100644
--- 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_seq_col.groovy
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_seq_col.groovy
@@ -124,4 +124,35 @@ suite("test_primary_key_partial_update_seq_col", "p0") {
 
     // drop drop
     sql """ DROP TABLE IF EXISTS ${tableName} """
+
+
+    def tableName2 = "nereids_partial_update_native_insert_seq_col2"
+    sql """ DROP TABLE IF EXISTS ${tableName2} """
+    sql """
+            CREATE TABLE ${tableName2} (
+                `id` int(11) NOT NULL COMMENT "用户 ID",
+                `score` int(11) NOT NULL COMMENT "用户得分",
+                `update_time` DATETIMEV2 NULL DEFAULT CURRENT_TIMESTAMP)
+            UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES(
+                "replication_num" = "1",
+                "enable_unique_key_merge_on_write" = "true",
+                "function_column.sequence_col" = "update_time"
+            )""" 
+    
+    // don't set partial update header, it's a row update streamload
+    // the input data don't contains sequence mapping column but the sequence 
mapping
+    // column's default value is CURRENT_TIMESTAMP, will load successfully
+    streamLoad {
+        table "${tableName2}"
+
+        set 'column_separator', ','
+        set 'format', 'csv'
+        set 'columns', 'id,score'
+
+        file 'basic.csv'
+        time 10000 // limit inflight 10s
+    }
+    qt_sql """ select id,score from ${tableName2} order by id;"""
+    sql """ DROP TABLE IF EXISTS ${tableName2}; """
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org


Reply via email to