This is an automated email from the ASF dual-hosted git repository.

morrysnow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 90c5461ad2 [fix](Nereids) let dml work well (#24748)
90c5461ad2 is described below

commit 90c5461ad2aefc6ac7cf29030bcfcf3c733c3595
Author: morrySnow <[email protected]>
AuthorDate: Tue Sep 26 21:08:24 2023 +0800

    [fix](Nereids) let dml work well (#24748)
    
    Co-authored-by: sohardforaname <[email protected]>
    
    TODO:
    1. support agg_state type
    2. support implicit cast literal exception
    3. use nereids execute dml for these regression cases:
    
    - test_agg_state_nereids (for TODO 1)
    - test_array_insert_overflow (for TODO 2)
    - nereids_p0/json_p0/test_json_load_and_function (for TODO 2)
    - nereids_p0/json_p0/test_json_unique_load_and_function (for TODO 2)
    - nereids_p0/jsonb_p0/test_jsonb_load_and_function (for TODO 2)
    - nereids_p0/jsonb_p0/test_jsonb_unique_load_and_function (for TODO 2)
    - json_p0/test_json_load_and_function (for TODO 2)
    - json_p0/test_json_unique_load_and_function (for TODO 2)
    - jsonb_p0/test_jsonb_load_and_function (for TODO 2)
    - jsonb_p0/test_jsonb_unique_load_and_function (for TODO 2)
    - test_multi_partition_key (for TODO 2)
---
 .../glue/translator/PhysicalPlanTranslator.java    |   3 +-
 .../doris/nereids/rules/analysis/BindSink.java     | 319 ++++++++++++---------
 .../nereids/rules/expression/check/CheckCast.java  |  11 +-
 .../trees/expressions/literal/JsonLiteral.java     |  11 +-
 .../trees/plans/commands/CreateTableCommand.java   |   3 +-
 .../plans/commands/InsertIntoTableCommand.java     |  18 +-
 .../trees/plans/commands/UpdateCommand.java        |   8 +-
 .../trees/plans/logical/LogicalOlapTableSink.java  |   4 +-
 .../trees/plans/logical/LogicalSetOperation.java   |  52 +++-
 .../plans/physical/PhysicalOlapTableSink.java      |   4 +-
 .../apache/doris/nereids/types/StructField.java    |   4 +
 .../java/org/apache/doris/qe/SessionVariable.java  |   2 +-
 .../correctness/test_time_diff_microseconds.out    |   4 +-
 .../array_functions/test_array_with_scale_type.out |  52 ++--
 .../cast_function/test_cast_with_scale_type.out    |   8 +-
 .../struct_functions/test_struct_functions.out     | Bin 1376 -> 1379 bytes
 .../nereids/test_agg_state_nereids.groovy          |   6 +-
 .../insert_p0/test_array_insert_overflow.groovy    |   4 +
 .../json_p0/test_json_load_and_function.groovy     |   3 +
 .../test_json_load_unique_key_and_function.groovy  |   4 +
 .../jsonb_p0/test_jsonb_load_and_function.groovy   |   4 +
 .../test_jsonb_load_unique_key_and_function.groovy |   3 +
 .../nereids_function_p0/scalar_function/J.groovy   |   4 +
 .../nereids_p0/join/test_left_join_null.groovy     |   3 +
 .../json_p0/test_json_load_and_function.groovy     |   4 +
 .../test_json_load_unique_key_and_function.groovy  |   4 +
 .../jsonb_p0/test_jsonb_load_and_function.groovy   |   4 +
 .../test_jsonb_load_unique_key_and_function.groovy |   4 +
 .../test_multi_column_partition.groovy             |   4 +
 .../query_p0/join/test_left_join_null.groovy       |   3 +
 30 files changed, 353 insertions(+), 204 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java
index b80b622190..5bbb7b2697 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java
@@ -358,7 +358,7 @@ public class PhysicalPlanTranslator extends 
DefaultPlanVisitor<PlanFragment, Pla
         HashSet<String> partialUpdateCols = new HashSet<>();
         boolean isPartialUpdate = olapTableSink.isPartialUpdate();
         if (isPartialUpdate) {
-            OlapTable olapTable = (OlapTable) olapTableSink.getTargetTable();
+            OlapTable olapTable = olapTableSink.getTargetTable();
             if (!olapTable.getEnableUniqueKeyMergeOnWrite()) {
                 throw new AnalysisException("Partial update is only allowed in"
                         + "unique table with merge-on-write enabled.");
@@ -395,6 +395,7 @@ public class PhysicalPlanTranslator extends 
DefaultPlanVisitor<PlanFragment, Pla
             slotDesc.setType(column.getType());
             slotDesc.setColumn(column);
             slotDesc.setIsNullable(column.isAllowNull());
+            slotDesc.setAutoInc(column.isAutoInc());
         }
         OlapTableSink sink = new OlapTableSink(
                 olapTableSink.getTargetTable(),
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
index b8400caaeb..03aa3f700e 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java
@@ -35,8 +35,10 @@ import org.apache.doris.nereids.rules.RuleType;
 import org.apache.doris.nereids.rules.expression.ExpressionRewriteContext;
 import org.apache.doris.nereids.rules.expression.rules.FunctionBinder;
 import org.apache.doris.nereids.trees.expressions.Alias;
+import org.apache.doris.nereids.trees.expressions.Cast;
 import org.apache.doris.nereids.trees.expressions.Expression;
 import org.apache.doris.nereids.trees.expressions.NamedExpression;
+import org.apache.doris.nereids.trees.expressions.functions.scalar.Substring;
 import org.apache.doris.nereids.trees.expressions.literal.Literal;
 import org.apache.doris.nereids.trees.expressions.literal.NullLiteral;
 import 
org.apache.doris.nereids.trees.expressions.visitor.DefaultExpressionRewriter;
@@ -45,6 +47,8 @@ import 
org.apache.doris.nereids.trees.plans.logical.LogicalOlapTableSink;
 import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
 import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
 import org.apache.doris.nereids.types.DataType;
+import org.apache.doris.nereids.types.StringType;
+import org.apache.doris.nereids.types.coercion.CharacterType;
 import org.apache.doris.nereids.util.RelationUtil;
 import org.apache.doris.nereids.util.TypeCoercionUtils;
 import org.apache.doris.qe.ConnectContext;
@@ -56,6 +60,7 @@ import com.google.common.collect.Maps;
 
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.stream.Collectors;
 
 /**
@@ -66,162 +71,161 @@ public class BindSink implements AnalysisRuleFactory {
     @Override
     public List<Rule> buildRules() {
         return ImmutableList.of(
-                RuleType.BINDING_INSERT_TARGET_TABLE.build(
-                        unboundOlapTableSink().thenApply(ctx -> {
-                            UnboundOlapTableSink<?> sink = ctx.root;
-                            Pair<Database, OlapTable> pair = 
bind(ctx.cascadesContext, sink);
-                            Database database = pair.first;
-                            OlapTable table = pair.second;
+                
RuleType.BINDING_INSERT_TARGET_TABLE.build(unboundOlapTableSink().thenApply(ctx 
-> {
+                    UnboundOlapTableSink<?> sink = ctx.root;
+                    Pair<Database, OlapTable> pair = bind(ctx.cascadesContext, 
sink);
+                    Database database = pair.first;
+                    OlapTable table = pair.second;
 
-                            LogicalPlan child = ((LogicalPlan) sink.child());
+                    LogicalPlan child = ((LogicalPlan) sink.child());
+                    boolean isNeedSequenceCol = child.getOutput().stream()
+                            .anyMatch(slot -> 
slot.getName().equals(Column.SEQUENCE_COL));
 
-                            LogicalOlapTableSink<?> boundSink = new 
LogicalOlapTableSink<>(
-                                    database,
-                                    table,
-                                    bindTargetColumns(table, 
sink.getColNames()),
-                                    bindPartitionIds(table, 
sink.getPartitions()),
-                                    child.getOutput().stream()
-                                            .map(NamedExpression.class::cast)
-                                            
.collect(ImmutableList.toImmutableList()),
-                                    sink.isPartialUpdate(),
-                                    sink.isFromNativeInsertStmt(),
-                                    sink.child());
+                    LogicalOlapTableSink<?> boundSink = new 
LogicalOlapTableSink<>(
+                            database,
+                            table,
+                            bindTargetColumns(table, sink.getColNames(), 
isNeedSequenceCol),
+                            bindPartitionIds(table, sink.getPartitions()),
+                            child.getOutput().stream()
+                                    .map(NamedExpression.class::cast)
+                                    .collect(ImmutableList.toImmutableList()),
+                            sink.isPartialUpdate(),
+                            sink.isFromNativeInsertStmt(),
+                            sink.child());
 
-                            // we need to insert all the columns of the target 
table
-                            // although some columns are not mentions.
-                            // so we add a projects to supply the default 
value.
+                    // we need to insert all the columns of the target table
+                    // although some columns are not mentions.
+                    // so we add a projects to supply the default value.
 
-                            if (boundSink.getCols().size() != 
child.getOutput().size()) {
-                                throw new AnalysisException(
-                                        "insert into cols should be 
corresponding to the query output");
-                            }
+                    if (boundSink.getCols().size() != 
child.getOutput().size()) {
+                        throw new AnalysisException("insert into cols should 
be corresponding to the query output");
+                    }
 
-                            Map<Column, NamedExpression> columnToChildOutput = 
Maps.newHashMap();
-                            for (int i = 0; i < boundSink.getCols().size(); 
++i) {
-                                
columnToChildOutput.put(boundSink.getCols().get(i), child.getOutput().get(i));
-                            }
+                    Map<Column, NamedExpression> columnToChildOutput = 
Maps.newHashMap();
+                    for (int i = 0; i < boundSink.getCols().size(); ++i) {
+                        columnToChildOutput.put(boundSink.getCols().get(i), 
child.getOutput().get(i));
+                    }
 
-                            Map<String, NamedExpression> columnToOutput = 
Maps.newLinkedHashMap();
-                            NereidsParser expressionParser = new 
NereidsParser();
+                    Map<String, NamedExpression> columnToOutput = 
Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
+                    NereidsParser expressionParser = new NereidsParser();
 
-                            // this is a trick way to avoid legacy planner's 
slotRef toSql output include label.
-                            // see more in 
org.apache.doris.analysis.SlotRef.toSqlImpl
-                            if (ConnectContext.get() != null) {
-                                
ConnectContext.get().getState().setIsQuery(true);
+                    // generate slots not mentioned in sql, mv slots and 
shaded slots.
+                    for (Column column : 
boundSink.getTargetTable().getFullSchema()) {
+                        if (column.isMaterializedViewColumn()) {
+                            List<SlotRef> refs = column.getRefColumns();
+                            // now we have to replace the column to slots.
+                            Preconditions.checkArgument(refs != null,
+                                    "mv column %s 's ref column cannot be 
null", column);
+                            Expression parsedExpression = 
expressionParser.parseExpression(
+                                    column.getDefineExpr().toSqlWithoutTbl());
+                            Expression boundSlotExpression = 
SlotReplacer.INSTANCE
+                                    .replace(parsedExpression, columnToOutput);
+                            // the boundSlotExpression is an expression whose 
slots are bound but function
+                            // may not be bound, we have to bind it again.
+                            // for example: to_bitmap.
+                            Expression boundExpression = 
FunctionBinder.INSTANCE.rewrite(
+                                    boundSlotExpression, new 
ExpressionRewriteContext(ctx.cascadesContext));
+                            if (boundExpression instanceof Alias) {
+                                boundExpression = ((Alias) 
boundExpression).child();
                             }
-                            try {
-                                // generate slots not mentioned in sql, mv 
slots and shaded slots.
-                                for (Column column : 
boundSink.getTargetTable().getFullSchema()) {
-                                    if (column.isMaterializedViewColumn()) {
-                                        List<SlotRef> refs = 
column.getRefColumns();
-                                        // now we have to replace the column 
to slots.
-                                        Preconditions.checkArgument(refs != 
null,
-                                                "mv column's ref column cannot 
be null");
-                                        Expression parsedExpression = 
expressionParser.parseExpression(
-                                                
column.getDefineExpr().toSql());
-                                        Expression boundSlotExpression = 
SlotReplacer.INSTANCE
-                                                .replace(parsedExpression, 
columnToOutput);
-                                        // the boundSlotExpression is an 
expression whose slots are bound but function
-                                        // may not be bound, we have to bind 
it again.
-                                        // for example: to_bitmap.
-                                        Expression boundExpression = 
FunctionBinder.INSTANCE.rewrite(
-                                                boundSlotExpression, new 
ExpressionRewriteContext(ctx.cascadesContext));
-
-                                        NamedExpression slot = boundExpression 
instanceof NamedExpression
-                                                ? ((NamedExpression) 
boundExpression)
-                                                : new Alias(boundExpression);
-
-                                        columnToOutput.put(column.getName(), 
slot);
-                                    } else if 
(columnToChildOutput.containsKey(column)) {
-                                        columnToOutput.put(column.getName(), 
columnToChildOutput.get(column));
+                            NamedExpression slot = new Alias(boundExpression, 
column.getDefineExpr().toSqlWithoutTbl());
+                            columnToOutput.put(column.getName(), slot);
+                        } else if (columnToChildOutput.containsKey(column)) {
+                            columnToOutput.put(column.getName(), 
columnToChildOutput.get(column));
+                        } else {
+                            if (table.hasSequenceCol()
+                                    && 
column.getName().equals(Column.SEQUENCE_COL)
+                                    && table.getSequenceMapCol() != null) {
+                                Optional<Column> seqCol = 
table.getFullSchema().stream()
+                                        .filter(col -> 
col.getName().equals(table.getSequenceMapCol()))
+                                        .findFirst();
+                                if (!seqCol.isPresent()) {
+                                    throw new AnalysisException("sequence 
column is not contained in"
+                                            + " target table " + 
table.getName());
+                                }
+                                columnToOutput.put(column.getName(), 
columnToOutput.get(seqCol.get().getName()));
+                            } else if (sink.isPartialUpdate()) {
+                                // If the current load is a partial update, 
the values of unmentioned
+                                // columns will be filled in SegmentWriter. 
And the output of sink node
+                                // should not contain these unmentioned 
columns, so we just skip them.
+                                continue;
+                            } else if (column.getDefaultValue() == null) {
+                                // Otherwise, the unmentioned columns should 
be filled with default values
+                                // or null values
+                                columnToOutput.put(column.getName(), new Alias(
+                                        new 
NullLiteral(DataType.fromCatalogType(column.getType())),
+                                        column.getName()
+                                ));
+                            } else {
+                                try {
+                                    // it comes from the original planner, if 
default value expression is
+                                    // null, we use the literal string of the 
default value, or it may be
+                                    // default value function, like 
CURRENT_TIMESTAMP.
+                                    if (column.getDefaultValueExpr() == null) {
+                                        columnToOutput.put(column.getName(),
+                                                new 
Alias(Literal.of(column.getDefaultValue())
+                                                        
.checkedCastTo(DataType.fromCatalogType(column.getType())),
+                                                        column.getName()));
                                     } else {
-                                        if (table.hasSequenceCol()
-                                                && 
column.getName().equals(Column.SEQUENCE_COL)
-                                                && table.getSequenceMapCol() 
!= null) {
-                                            Column seqCol = 
table.getFullSchema().stream()
-                                                    .filter(col -> 
col.getName().equals(table.getSequenceMapCol()))
-                                                    .findFirst().get();
-                                            
columnToOutput.put(column.getName(), columnToOutput.get(seqCol.getName()));
-                                        } else if (sink.isPartialUpdate()) {
-                                            // If the current load is a 
partial update, the values of unmentioned
-                                            // columns will be filled in 
SegmentWriter. And the output of sink node
-                                            // should not contain these 
unmentioned columns, so we just skip them.
-                                            continue;
-                                        } else if (column.getDefaultValue() == 
null) {
-                                            // Otherwise, the unmentioned 
columns should be filled with default values
-                                            // or null values
-                                            
columnToOutput.put(column.getName(), new Alias(
-                                                    new 
NullLiteral(DataType.fromCatalogType(column.getType())),
-                                                    column.getName()
-                                            ));
-                                        } else {
-                                            try {
-                                                // it comes from the original 
planner, if default value expression is
-                                                // null, we use the literal 
string of the default value, or it may be
-                                                // default value function, 
like CURRENT_TIMESTAMP.
-                                                if 
(column.getDefaultValueExpr() == null) {
-                                                    
columnToOutput.put(column.getName(),
-                                                            new 
Alias(Literal.of(column.getDefaultValue())
-                                                                    
.checkedCastTo(
-                                                                            
DataType.fromCatalogType(column.getType())),
-                                                                    
column.getName()));
-                                                } else {
-                                                    Expression 
defualtValueExpression = FunctionBinder.INSTANCE.rewrite(
-                                                            new 
NereidsParser().parseExpression(
-                                                                    
column.getDefaultValueExpr().toSql()),
-                                                            new 
ExpressionRewriteContext(ctx.cascadesContext));
-                                                    NamedExpression slot =
-                                                            
defualtValueExpression instanceof NamedExpression
-                                                                    ? 
((NamedExpression) defualtValueExpression)
-                                                                    : new 
Alias(defualtValueExpression);
-
-                                                    
columnToOutput.put(column.getName(), slot);
-                                                }
-                                            } catch (Exception e) {
-                                                throw new 
AnalysisException(e.getMessage(), e.getCause());
-                                            }
+                                        Expression defualtValueExpression = 
FunctionBinder.INSTANCE.rewrite(
+                                                new 
NereidsParser().parseExpression(
+                                                        
column.getDefaultValueExpr().toSqlWithoutTbl()),
+                                                new 
ExpressionRewriteContext(ctx.cascadesContext));
+                                        if (defualtValueExpression instanceof 
Alias) {
+                                            defualtValueExpression = ((Alias) 
defualtValueExpression).child();
                                         }
+                                        columnToOutput.put(column.getName(),
+                                                new 
Alias(defualtValueExpression, column.getName()));
                                     }
-                                }
-                            } finally {
-                                if (ConnectContext.get() != null) {
-                                    // this is a trick way to avoid legacy 
planner's slotRef toSql output include label
-                                    // set back to original value.
-                                    
ConnectContext.get().getState().setIsQuery(false);
+                                } catch (Exception e) {
+                                    throw new 
AnalysisException(e.getMessage(), e.getCause());
                                 }
                             }
-                            List<NamedExpression> fullOutputExprs = 
ImmutableList.copyOf(columnToOutput.values());
+                        }
+                    }
+                    List<NamedExpression> fullOutputExprs = 
ImmutableList.copyOf(columnToOutput.values());
 
-                            LogicalProject<?> fullOutputProject = new 
LogicalProject<>(fullOutputExprs,
-                                    boundSink.child());
+                    LogicalProject<?> fullOutputProject = new 
LogicalProject<>(fullOutputExprs, boundSink.child());
 
-                            // add cast project
-                            List<NamedExpression> castExprs = 
Lists.newArrayList();
-                            for (int i = 0; i < table.getFullSchema().size(); 
++i) {
-                                Column col = table.getFullSchema().get(i);
-                                NamedExpression expr = (NamedExpression) 
columnToOutput.get(col.getName());
-                                if (expr == null) {
-                                    // If `expr` is null, it means that the 
current load is a partial update
-                                    // and `col` should not be contained in 
the output of the sink node so
-                                    // we skip it.
-                                    continue;
-                                }
-                                Expression castExpr = 
TypeCoercionUtils.castIfNotSameType(
-                                        expr,
-                                        
DataType.fromCatalogType(col.getType()));
-                                if (castExpr instanceof NamedExpression) {
-                                    castExprs.add(((NamedExpression) 
castExpr));
-                                } else {
-                                    castExprs.add(new Alias(castExpr));
-                                }
-                            }
-                            if (!castExprs.equals(fullOutputExprs)) {
-                                fullOutputProject = new 
LogicalProject<Plan>(castExprs, fullOutputProject);
+                    // add cast project
+                    List<NamedExpression> castExprs = Lists.newArrayList();
+                    for (int i = 0; i < table.getFullSchema().size(); ++i) {
+                        Column col = table.getFullSchema().get(i);
+                        NamedExpression expr = 
columnToOutput.get(col.getName());
+                        if (expr == null) {
+                            // If `expr` is null, it means that the current 
load is a partial update
+                            // and `col` should not be contained in the output 
of the sink node so
+                            // we skip it.
+                            continue;
+                        }
+                        maybeFallbackCastUnsupportedType(expr, 
ctx.connectContext);
+                        DataType inputType = expr.getDataType();
+                        DataType targetType = 
DataType.fromCatalogType(table.getFullSchema().get(i).getType());
+                        Expression castExpr = expr;
+                        if (isSourceAndTargetStringLikeType(inputType, 
targetType)) {
+                            int sourceLength = ((CharacterType) 
inputType).getLen();
+                            int targetLength = ((CharacterType) 
targetType).getLen();
+                            if (sourceLength >= targetLength && targetLength 
>= 0) {
+                                castExpr = new Substring(castExpr, 
Literal.of(1), Literal.of(targetLength));
+                            } else if (targetType.isStringType()) {
+                                castExpr = new Cast(castExpr, 
StringType.INSTANCE);
                             }
+                        } else {
+                            castExpr = 
TypeCoercionUtils.castIfNotSameType(castExpr, targetType);
+                        }
+                        if (castExpr instanceof NamedExpression) {
+                            castExprs.add(((NamedExpression) castExpr));
+                        } else {
+                            castExprs.add(new Alias(castExpr));
+                        }
+                    }
+                    if (!castExprs.equals(fullOutputExprs)) {
+                        fullOutputProject = new 
LogicalProject<Plan>(castExprs, fullOutputProject);
+                    }
 
-                            return 
boundSink.withChildAndUpdateOutput(fullOutputProject);
+                    return 
boundSink.withChildAndUpdateOutput(fullOutputProject);
 
-                        })),
+                })),
                 RuleType.BINDING_INSERT_FILE.build(
                         logicalFileSink().when(s -> 
s.getOutputExprs().isEmpty())
                                 .then(fileSink -> fileSink.withOutputExprs(
@@ -238,6 +242,11 @@ public class BindSink implements AnalysisRuleFactory {
         Pair<DatabaseIf, TableIf> pair = 
RelationUtil.getDbAndTable(tableQualifier,
                 cascadesContext.getConnectContext().getEnv());
         if (!(pair.second instanceof OlapTable)) {
+            try {
+                
cascadesContext.getConnectContext().getSessionVariable().enableFallbackToOriginalPlannerOnce();
+            } catch (Exception e) {
+                throw new AnalysisException("fall back failed");
+            }
             throw new AnalysisException("the target table of insert into is 
not an OLAP table");
         }
         return Pair.of(((Database) pair.first), (OlapTable) pair.second);
@@ -256,11 +265,12 @@ public class BindSink implements AnalysisRuleFactory {
                 }).collect(Collectors.toList());
     }
 
-    private List<Column> bindTargetColumns(OlapTable table, List<String> 
colsName) {
+    private List<Column> bindTargetColumns(OlapTable table, List<String> 
colsName, boolean isNeedSequenceCol) {
+        // if the table set sequence column in stream load phase, the sequence 
map column is null, we query it.
         return colsName.isEmpty()
-                ? table.getFullSchema().stream().filter(column -> 
column.isVisible()
-                        && !column.isMaterializedViewColumn())
-                .collect(Collectors.toList())
+                ? table.getFullSchema().stream()
+                .filter(c -> validColumn(c, isNeedSequenceCol))
+                .collect(ImmutableList.toImmutableList())
                 : colsName.stream().map(cn -> {
                     Column column = table.getColumn(cn);
                     if (column == null) {
@@ -268,7 +278,27 @@ public class BindSink implements AnalysisRuleFactory {
                                 cn, table.getName()));
                     }
                     return column;
-                }).collect(Collectors.toList());
+                }).collect(ImmutableList.toImmutableList());
+    }
+
+    private void maybeFallbackCastUnsupportedType(Expression expression, 
ConnectContext ctx) {
+        if (expression.getDataType().isMapType()) {
+            try {
+                ctx.getSessionVariable().enableFallbackToOriginalPlannerOnce();
+            } catch (Exception e) {
+                throw new AnalysisException("failed to try to fall back to 
original planner");
+            }
+            throw new AnalysisException("failed to cast type when binding 
sink, type is: " + expression.getDataType());
+        }
+    }
+
+    private boolean isSourceAndTargetStringLikeType(DataType input, DataType 
target) {
+        return input.isStringLikeType() && target.isStringLikeType();
+    }
+
+    private boolean validColumn(Column column, boolean isNeedSequenceCol) {
+        return (column.isVisible() || (isNeedSequenceCol && 
column.isSequenceColumn()))
+                && !column.isMaterializedViewColumn();
     }
 
     private static class SlotReplacer extends 
DefaultExpressionRewriter<Map<String, NamedExpression>> {
@@ -280,6 +310,9 @@ public class BindSink implements AnalysisRuleFactory {
 
         @Override
         public Expression visitUnboundSlot(UnboundSlot unboundSlot, 
Map<String, NamedExpression> replaceMap) {
+            if (!replaceMap.containsKey(unboundSlot.getName())) {
+                throw new AnalysisException("cannot find column from target 
table " + unboundSlot.getNameParts());
+            }
             return replaceMap.get(unboundSlot.getName());
         }
     }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/check/CheckCast.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/check/CheckCast.java
index fe25bb8e7b..af475b42c5 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/check/CheckCast.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/check/CheckCast.java
@@ -51,6 +51,12 @@ public class CheckCast extends AbstractExpressionRewriteRule 
{
     }
 
     private boolean check(DataType originalType, DataType targetType) {
+        if (originalType.isNullType()) {
+            return true;
+        }
+        if (originalType.equals(targetType)) {
+            return true;
+        }
         if (originalType instanceof ArrayType && targetType instanceof 
ArrayType) {
             return check(((ArrayType) originalType).getItemType(), 
((ArrayType) targetType).getItemType());
         } else if (originalType instanceof MapType && targetType instanceof 
MapType) {
@@ -63,7 +69,10 @@ public class CheckCast extends AbstractExpressionRewriteRule 
{
                 return false;
             }
             for (int i = 0; i < targetFields.size(); i++) {
-                if (!targetFields.get(i).equals(originalFields.get(i))) {
+                if (originalFields.get(i).isNullable() != 
targetFields.get(i).isNullable()) {
+                    return false;
+                }
+                if (!check(originalFields.get(i).getDataType(), 
targetFields.get(i).getDataType())) {
                     return false;
                 }
             }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/JsonLiteral.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/JsonLiteral.java
index 779be0ca54..3897239179 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/JsonLiteral.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/JsonLiteral.java
@@ -32,7 +32,6 @@ public class JsonLiteral extends Literal {
 
     private static final ObjectMapper MAPPER = new ObjectMapper();
 
-    private final JsonNode jsonNode;
     private final String value;
 
     /**
@@ -40,15 +39,17 @@ public class JsonLiteral extends Literal {
      */
     public JsonLiteral(String value) {
         super(JsonType.INSTANCE);
+        JsonNode jsonNode;
         try {
             jsonNode = MAPPER.readTree(value);
         } catch (JsonProcessingException e) {
             throw new AnalysisException("Invalid jsonb literal: '" + value + 
"'. because " + e.getMessage());
         }
-        if (jsonNode.isMissingNode()) {
+        if (jsonNode == null || jsonNode.isMissingNode()) {
             throw new AnalysisException("Invalid jsonb literal: ''");
+        } else {
+            this.value = jsonNode.toString();
         }
-        this.value = jsonNode.toString();
     }
 
     @Override
@@ -57,8 +58,8 @@ public class JsonLiteral extends Literal {
     }
 
     @Override
-    public JsonNode getValue() {
-        return jsonNode;
+    public String getValue() {
+        return value;
     }
 
     @Override
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateTableCommand.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateTableCommand.java
index 6f5cf65667..43e3b0ce64 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateTableCommand.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateTableCommand.java
@@ -107,7 +107,8 @@ public class CreateTableCommand extends Command implements 
ForwardWithSync {
             } else if (i == 0 && dataType.isStringType()) {
                 dataType = 
VarcharType.createVarcharType(ScalarType.MAX_VARCHAR_LENGTH);
             }
-            columnsOfQuery.add(new ColumnDefinition(s.getName(), dataType, 
s.nullable()));
+            // if the column is an expression, we set it to nullable, 
otherwise according to the nullable of the slot.
+            columnsOfQuery.add(new ColumnDefinition(s.getName(), dataType, 
!s.isColumnFromTable() || s.nullable()));
         }
         createTableInfo.validateCreateTableAsSelect(columnsOfQuery.build(), 
ctx);
         CreateTableStmt createTableStmt = 
createTableInfo.translateToLegacyStmt();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java
index f87b79308c..780ba178f0 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java
@@ -81,8 +81,8 @@ public class InsertIntoTableCommand extends Command 
implements ForwardWithSync,
 
     private final LogicalPlan logicalQuery;
     private final Optional<String> labelName;
+    private final boolean isOverwrite;
     private NereidsPlanner planner;
-    private boolean isOverwrite;
     private boolean isTxnBegin = false;
 
     /**
@@ -129,10 +129,10 @@ public class InsertIntoTableCommand extends Command 
implements ForwardWithSync,
         }
         String label = this.labelName.orElse(String.format("label_%x_%x", 
ctx.queryId().hi, ctx.queryId().lo));
 
-        Optional<TreeNode> plan = ((Set<TreeNode>) planner.getPhysicalPlan()
-                .collect(node -> node instanceof 
PhysicalOlapTableSink)).stream().findAny();
+        Optional<TreeNode<?>> plan = (planner.getPhysicalPlan()
+                .<Set<TreeNode<?>>>collect(node -> node instanceof 
PhysicalOlapTableSink)).stream().findAny();
         Preconditions.checkArgument(plan.isPresent(), "insert into command 
must contain OlapTableSinkNode");
-        PhysicalOlapTableSink<?> physicalOlapTableSink = 
((PhysicalOlapTableSink) plan.get());
+        PhysicalOlapTableSink<?> physicalOlapTableSink = 
((PhysicalOlapTableSink<?>) plan.get());
 
         if (isOverwrite) {
             dealOverwrite(ctx, executor, physicalOlapTableSink);
@@ -189,14 +189,15 @@ public class InsertIntoTableCommand extends Command 
implements ForwardWithSync,
      * @param ctx ctx
      * @param executor executor
      * @param physicalOlapTableSink physicalOlapTableSink
+     *
      * @throws Exception Exception
      */
-    public void dealOverwrite(ConnectContext ctx, StmtExecutor executor, 
PhysicalOlapTableSink<?> physicalOlapTableSink)
-            throws Exception {
+    public void dealOverwrite(ConnectContext ctx, StmtExecutor executor,
+            PhysicalOlapTableSink<?> physicalOlapTableSink) throws Exception {
         OlapTable targetTable = physicalOlapTableSink.getTargetTable();
         TableName tableName = new 
TableName(InternalCatalog.INTERNAL_CATALOG_NAME, 
targetTable.getQualifiedDbName(),
                 targetTable.getName());
-        List partitionNames = ((UnboundOlapTableSink) 
logicalQuery).getPartitions();
+        List<String> partitionNames = ((UnboundOlapTableSink<?>) 
logicalQuery).getPartitions();
         if (CollectionUtils.isEmpty(partitionNames)) {
             partitionNames = 
Lists.newArrayList(targetTable.getPartitionNames());
         }
@@ -243,12 +244,11 @@ public class InsertIntoTableCommand extends Command 
implements ForwardWithSync,
      * @param executor executor
      * @param tempPartitionNames tempPartitionNames
      * @param tableName tableName
-     * @throws Exception Exception
      */
     private boolean insertInto(ConnectContext ctx, StmtExecutor executor, 
List<String> tempPartitionNames,
             TableName tableName) {
         try {
-            UnboundOlapTableSink sink = (UnboundOlapTableSink) logicalQuery;
+            UnboundOlapTableSink<?> sink = (UnboundOlapTableSink<?>) 
logicalQuery;
             UnboundOlapTableSink<?> copySink = new UnboundOlapTableSink<>(
                     sink.getNameParts(),
                     sink.getColNames(),
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateCommand.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateCommand.java
index 0a0b3fd9a0..742b2ea801 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateCommand.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateCommand.java
@@ -105,7 +105,8 @@ public class UpdateCommand extends Command implements 
ForwardWithSync, Explainab
         List<NamedExpression> selectItems = Lists.newArrayList();
         String tableName = tableAlias != null ? tableAlias : 
targetTable.getName();
         for (Column column : targetTable.getFullSchema()) {
-            if (!column.isVisible()) {
+            // if it sets sequence column in stream load phase, the sequence 
map column is null, we query it.
+            if (!column.isVisible() && !column.isSequenceColumn()) {
                 continue;
             }
             if (colNameToExpression.containsKey(column.getName())) {
@@ -123,9 +124,12 @@ public class UpdateCommand extends Command implements 
ForwardWithSync, Explainab
             logicalQuery = ((LogicalPlan) 
cte.get().withChildren(logicalQuery));
         }
 
+        boolean isPartialUpdate = targetTable.getEnableUniqueKeyMergeOnWrite()
+                && selectItems.size() < targetTable.getColumns().size();
+
         // make UnboundTableSink
         return new UnboundOlapTableSink<>(nameParts, ImmutableList.of(), 
ImmutableList.of(),
-                ImmutableList.of(), logicalQuery);
+                ImmutableList.of(), isPartialUpdate, logicalQuery);
     }
 
     private void checkTable(ConnectContext ctx) throws AnalysisException {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOlapTableSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOlapTableSink.java
index d7d890dda6..a3641057a7 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOlapTableSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOlapTableSink.java
@@ -138,8 +138,8 @@ public class LogicalOlapTableSink<CHILD_TYPE extends Plan> 
extends LogicalSink<C
     public String toString() {
         return Utils.toSqlString("LogicalOlapTableSink[" + id.asInt() + "]",
                 "outputExprs", outputExprs,
-                "database", database,
-                "targetTable", targetTable,
+                "database", database.getFullName(),
+                "targetTable", targetTable.getName(),
                 "cols", cols,
                 "partitionIds", partitionIds,
                 "isPartialUpdate", isPartialUpdate,
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java
index 41ea6d09ef..d19b939876 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java
@@ -18,6 +18,7 @@
 package org.apache.doris.nereids.trees.plans.logical;
 
 import org.apache.doris.catalog.Type;
+import org.apache.doris.nereids.exceptions.AnalysisException;
 import org.apache.doris.nereids.memo.GroupExpression;
 import org.apache.doris.nereids.properties.LogicalProperties;
 import org.apache.doris.nereids.trees.expressions.Alias;
@@ -30,7 +31,11 @@ import org.apache.doris.nereids.trees.plans.Plan;
 import org.apache.doris.nereids.trees.plans.PlanType;
 import org.apache.doris.nereids.trees.plans.algebra.SetOperation;
 import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor;
+import org.apache.doris.nereids.types.ArrayType;
 import org.apache.doris.nereids.types.DataType;
+import org.apache.doris.nereids.types.MapType;
+import org.apache.doris.nereids.types.StructField;
+import org.apache.doris.nereids.types.StructType;
 import org.apache.doris.nereids.util.TypeCoercionUtils;
 
 import com.google.common.base.Preconditions;
@@ -137,10 +142,7 @@ public abstract class LogicalSetOperation extends 
AbstractLogicalPlan implements
         for (int i = 0; i < child(0).getOutput().size(); ++i) {
             Slot left = child(0).getOutput().get(i);
             Slot right = child(1).getOutput().get(i);
-            DataType compatibleType = 
DataType.fromCatalogType(Type.getAssignmentCompatibleType(
-                    left.getDataType().toCatalogDataType(),
-                    right.getDataType().toCatalogDataType(),
-                    false));
+            DataType compatibleType = 
getAssignmentCompatibleType(left.getDataType(), right.getDataType());
             Expression newLeft = TypeCoercionUtils.castIfNotSameType(left, 
compatibleType);
             Expression newRight = TypeCoercionUtils.castIfNotSameType(right, 
compatibleType);
             if (newLeft instanceof Cast) {
@@ -211,4 +213,46 @@ public abstract class LogicalSetOperation extends 
AbstractLogicalPlan implements
     public int getArity() {
         return children.size();
     }
+
+    private DataType getAssignmentCompatibleType(DataType left, DataType 
right) {
+        if (left.isNullType()) {
+            return right;
+        }
+        if (right.isNullType()) {
+            return left;
+        }
+        if (left.equals(right)) {
+            return left;
+        }
+        if (left instanceof ArrayType && right instanceof ArrayType) {
+            return ArrayType.of(getAssignmentCompatibleType(
+                    ((ArrayType) left).getItemType(), ((ArrayType) 
right).getItemType()));
+        }
+        if (left instanceof MapType && right instanceof MapType) {
+            return MapType.of(
+                    getAssignmentCompatibleType(((MapType) left).getKeyType(), 
((MapType) right).getKeyType()),
+                    getAssignmentCompatibleType(((MapType) 
left).getValueType(), ((MapType) right).getValueType()));
+        }
+        if (left instanceof StructType && right instanceof StructType) {
+            List<StructField> leftFields = ((StructType) left).getFields();
+            List<StructField> rightFields = ((StructType) right).getFields();
+            if (leftFields.size() != rightFields.size()) {
+                throw new AnalysisException(
+                        "could not get common type for two different struct 
type " + left + ", " + right);
+            }
+            ImmutableList.Builder<StructField> commonFields = 
ImmutableList.builder();
+            for (int i = 0; i < leftFields.size(); i++) {
+                boolean nullable = leftFields.get(i).isNullable() || 
rightFields.get(i).isNullable();
+                DataType commonType = getAssignmentCompatibleType(
+                        leftFields.get(i).getDataType(), 
rightFields.get(i).getDataType());
+                StructField commonField = 
leftFields.get(i).withDataTypeAndNulalble(commonType, nullable);
+                commonFields.add(commonField);
+            }
+            return new StructType(commonFields.build());
+        }
+        return DataType.fromCatalogType(Type.getAssignmentCompatibleType(
+                left.toCatalogDataType(),
+                right.toCatalogDataType(),
+                false));
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java
index 3c0a7177fc..093c87281e 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java
@@ -157,8 +157,8 @@ public class PhysicalOlapTableSink<CHILD_TYPE extends Plan> 
extends PhysicalSink
     public String toString() {
         return Utils.toSqlString("LogicalOlapTableSink[" + id.asInt() + "]",
                 "outputExprs", outputExprs,
-                "database", database,
-                "targetTable", targetTable,
+                "database", database.getFullName(),
+                "targetTable", targetTable.getName(),
                 "cols", cols,
                 "partitionIds", partitionIds,
                 "singleReplicaLoad", singleReplicaLoad,
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/types/StructField.java 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/types/StructField.java
index 135866738f..fed07259be 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/types/StructField.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/types/StructField.java
@@ -71,6 +71,10 @@ public class StructField {
         return new StructField(name, dataType, nullable, comment);
     }
 
+    public StructField withDataTypeAndNulalble(DataType dataType, boolean 
nullable) {
+        return new StructField(name, dataType, nullable, comment);
+    }
+
     public org.apache.doris.catalog.StructField toCatalogDataType() {
         return new org.apache.doris.catalog.StructField(
                 name, dataType.toCatalogDataType(), comment, nullable);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java
index 52a68b9e04..8a138aeda4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java
@@ -689,7 +689,7 @@ public class SessionVariable implements Serializable, 
Writable {
     public boolean extractWideRangeExpr = true;
 
     @VariableMgr.VarAttr(name = ENABLE_NEREIDS_DML, needForward = true)
-    public boolean enableNereidsDML = false;
+    public boolean enableNereidsDML = true;
 
     @VariableMgr.VarAttr(name = ENABLE_STRICT_CONSISTENCY_DML, needForward = 
true)
     public boolean enableStrictConsistencyDml = false;
diff --git a/regression-test/data/correctness/test_time_diff_microseconds.out 
b/regression-test/data/correctness/test_time_diff_microseconds.out
index dbeeb067f2..cf0f6e93ab 100644
--- a/regression-test/data/correctness/test_time_diff_microseconds.out
+++ b/regression-test/data/correctness/test_time_diff_microseconds.out
@@ -1,7 +1,7 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
 -- !select1 --
 67:19:00.0000
-24:00:00.5140
+24:00:00.5150
 -437:00:00.7683
 
 -- !select2 --
@@ -15,7 +15,7 @@
 
 -- !select5 --
 67:19:00.0000
-24:00:00.5140
+24:00:00.5150
 -437:00:00.7683
 
 -- !select6 --
diff --git 
a/regression-test/data/query_p0/sql_functions/array_functions/test_array_with_scale_type.out
 
b/regression-test/data/query_p0/sql_functions/array_functions/test_array_with_scale_type.out
index c20f9977c3..70feafa910 100644
--- 
a/regression-test/data/query_p0/sql_functions/array_functions/test_array_with_scale_type.out
+++ 
b/regression-test/data/query_p0/sql_functions/array_functions/test_array_with_scale_type.out
@@ -6,8 +6,8 @@
 2022-12-02T22:23:25
 
 -- !select --
-2022-12-01T22:23:24.999
-2022-12-02T22:23:24.999
+2022-12-01T22:23:25
+2022-12-02T22:23:25
 
 -- !select --
 2022-12-02T22:23:24.999999
@@ -16,8 +16,8 @@
 2022-12-02T22:23:25
 
 -- !select --
-2022-12-01T23:23:24.999
-2022-12-02T23:23:24.999
+2022-12-01T23:23:25
+2022-12-02T23:23:25
 
 -- !select --
 23
@@ -76,8 +76,8 @@
 [24.990, 25.990]
 
 -- !select --
-[2022-12-01 22:23:24.999]
-[2022-12-02 22:23:24.999]
+[2022-12-01 22:23:25.000]
+[2022-12-02 22:23:25.000]
 
 -- !select --
 [2022-12-02 22:23:25.000, 2022-12-02 22:23:23.998]
@@ -91,8 +91,8 @@
 []
 
 -- !select --
-[2022-12-01 22:23:24.999, 2022-12-01 23:23:24.999]
-[2022-12-02 22:23:24.999, 2022-12-02 23:23:24.999]
+[2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000]
+[2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000]
 
 -- !select --
 \N
@@ -115,44 +115,44 @@
 [2022-12-02 22:23:24.999, 2022-12-02 22:23:23.997]
 
 -- !select --
-[2022-12-01 22:23:24.999, 2022-12-01 23:23:24.999]
-[2022-12-02 22:23:24.999, 2022-12-02 23:23:24.999]
+[2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000]
+[2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000]
 
 -- !select --
-[2022-12-01 22:23:24.999, 2022-12-01 23:23:24.999, 2022-12-02 22:23:24.999, 
2022-12-02 22:23:23.997]
-[2022-12-02 22:23:24.999, 2022-12-02 23:23:24.999, 2022-12-02 22:23:24.999, 
2022-12-02 22:23:23.997]
+[2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000, 2022-12-02 22:23:24.999, 
2022-12-02 22:23:23.997]
+[2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000, 2022-12-02 22:23:24.999, 
2022-12-02 22:23:23.997]
 
 -- !select --
 [22.679, 33.679, 22.679, 33.679, 22.679, 33.679]
 [23.679, 34.679, 23.679, 34.679, 23.679, 34.679]
 
 -- !select --
-[{22.679, 22.679, 2022-12-01 22:23:24.999, 22.679}, {33.679, 33.679, 
2022-12-01 23:23:24.999, 33.679}]
-[{23.679, 23.679, 2022-12-02 22:23:24.999, 23.679}, {34.679, 34.679, 
2022-12-02 23:23:24.999, 34.679}]
+[{22.679, 22.679, 2022-12-01 22:23:25.000, 22.679}, {33.679, 33.679, 
2022-12-01 23:23:25.000, 33.679}]
+[{23.679, 23.679, 2022-12-02 22:23:25.000, 23.679}, {34.679, 34.679, 
2022-12-02 23:23:25.000, 34.679}]
 
 -- !select --
 [{2022-12-02 22:23:24.999}, {2022-12-02 22:23:23.997}]
 [{2022-12-02 22:23:24.999}, {2022-12-02 22:23:23.997}]
 
 -- !select --
-[{2022-12-01 22:23:24.999}, {2022-12-01 23:23:24.999}]
-[{2022-12-02 22:23:24.999}, {2022-12-02 23:23:24.999}]
+[{2022-12-01 22:23:25.000}, {2022-12-01 23:23:25.000}]
+[{2022-12-02 22:23:25.000}, {2022-12-02 23:23:25.000}]
 
 -- !select --
-[{2022-12-01 22:23:24.999, 2022-12-02 22:23:24.999}, {2022-12-01 23:23:24.999, 
2022-12-02 22:23:23.997}]
-[{2022-12-02 22:23:24.999, 2022-12-02 22:23:24.999}, {2022-12-02 23:23:24.999, 
2022-12-02 22:23:23.997}]
+[{2022-12-01 22:23:25.000, 2022-12-02 22:23:24.999}, {2022-12-01 23:23:25.000, 
2022-12-02 22:23:23.997}]
+[{2022-12-02 22:23:25.000, 2022-12-02 22:23:24.999}, {2022-12-02 23:23:25.000, 
2022-12-02 22:23:23.997}]
 
 -- !select --
 [2022-12-02 22:23:23.997, 2022-12-02 22:23:24.999]
 [2022-12-02 22:23:23.997, 2022-12-02 22:23:24.999]
 
 -- !select --
-[2023-03-08 23:23:23.997, 2022-12-01 22:23:24.999, 2022-12-01 23:23:24.999]
-[2023-03-08 23:23:23.997, 2022-12-02 22:23:24.999, 2022-12-02 23:23:24.999]
+[2023-03-08 23:23:23.997, 2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000]
+[2023-03-08 23:23:23.997, 2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000]
 
 -- !select --
-2022-12-01T22:23:24.999        [2022-12-01 22:23:24.999, 2022-12-01 
23:23:24.999]      [2022-12-01 22:23:24.999, 2022-12-01 22:23:24.999, 
2022-12-01 23:23:24.999]
-2022-12-02T22:23:24.999        [2022-12-02 22:23:24.999, 2022-12-02 
23:23:24.999]      [2022-12-02 22:23:24.999, 2022-12-02 22:23:24.999, 
2022-12-02 23:23:24.999]
+2022-12-01T22:23:25    [2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000]      
[2022-12-01 22:23:25.000, 2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000]
+2022-12-02T22:23:25    [2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000]      
[2022-12-02 22:23:25.000, 2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000]
 
 -- !select --
 [25.990, 22.679, 33.679]
@@ -167,12 +167,12 @@
 [2022-12-02 22:23:24.999, 2022-12-02 22:23:23.997]
 
 -- !select --
-[2022-12-01 22:23:24.999, 2022-12-01 23:23:24.999, 2023-03-08 23:23:23.997]
-[2022-12-02 22:23:24.999, 2022-12-02 23:23:24.999, 2023-03-08 23:23:23.997]
+[2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000, 2023-03-08 23:23:23.997]
+[2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000, 2023-03-08 23:23:23.997]
 
 -- !select --
-2022-12-01T22:23:24.999        [2022-12-01 22:23:24.999, 2022-12-01 
23:23:24.999]      [2022-12-01 22:23:24.999, 2022-12-01 23:23:24.999, 
2022-12-01 22:23:24.999]
-2022-12-02T22:23:24.999        [2022-12-02 22:23:24.999, 2022-12-02 
23:23:24.999]      [2022-12-02 22:23:24.999, 2022-12-02 23:23:24.999, 
2022-12-02 22:23:24.999]
+2022-12-01T22:23:25    [2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000]      
[2022-12-01 22:23:25.000, 2022-12-01 23:23:25.000, 2022-12-01 22:23:25.000]
+2022-12-02T22:23:25    [2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000]      
[2022-12-02 22:23:25.000, 2022-12-02 23:23:25.000, 2022-12-02 22:23:25.000]
 
 -- !select --
 [22.679, 33.679, 25.990]
diff --git 
a/regression-test/data/query_p0/sql_functions/cast_function/test_cast_with_scale_type.out
 
b/regression-test/data/query_p0/sql_functions/cast_function/test_cast_with_scale_type.out
index 876b45e501..2fed8d279e 100644
--- 
a/regression-test/data/query_p0/sql_functions/cast_function/test_cast_with_scale_type.out
+++ 
b/regression-test/data/query_p0/sql_functions/cast_function/test_cast_with_scale_type.out
@@ -1,11 +1,11 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
 -- !select1 --
-1      2022-12-01T22:23:24.999 2022-12-01 22:23:24.999999
-2      2022-12-02T22:23:24.999 2022-12-02 22:23:24.999999
+1      2022-12-01T22:23:25     2022-12-01 22:23:24.999999
+2      2022-12-02T22:23:25     2022-12-02 22:23:24.999999
 
 -- !select2 --
-1      2022-12-01T22:23:24.999 2022-12-01T22:23:25
-2      2022-12-02T22:23:24.999 2022-12-02T22:23:25
+1      2022-12-01T22:23:25     2022-12-01T22:23:25
+2      2022-12-02T22:23:25     2022-12-02T22:23:25
 
 -- !select3 --
 2022-12-02T22:23:25    2022-12-02T22:23:24
diff --git 
a/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions.out
 
b/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions.out
index ffce5c3e19..2d09d7a2af 100644
Binary files 
a/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions.out
 and 
b/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions.out
 differ
diff --git 
a/regression-test/suites/datatype_p0/agg_state/nereids/test_agg_state_nereids.groovy
 
b/regression-test/suites/datatype_p0/agg_state/nereids/test_agg_state_nereids.groovy
index b31070b01c..c7a0a6d748 100644
--- 
a/regression-test/suites/datatype_p0/agg_state/nereids/test_agg_state_nereids.groovy
+++ 
b/regression-test/suites/datatype_p0/agg_state/nereids/test_agg_state_nereids.groovy
@@ -55,15 +55,19 @@ suite("test_agg_state_nereids") {
             properties("replication_num" = "1");
         """
 
+    sql 'set enable_fallback_to_original_planner=true'
     sql "insert into a_table select 1,max_by_state(1,3);"
     sql "insert into a_table select 1,max_by_state(2,2);"
     sql "insert into a_table select 1,max_by_state(3,1);"
+    sql 'set enable_fallback_to_original_planner=false'
 
     qt_length1 """select k1,length(k2) from a_table order by k1;"""
     qt_group1 """select k1,max_by_merge(k2) from a_table group by k1 order by 
k1;"""
     qt_merge1 """select max_by_merge(k2) from a_table;"""
-    
+
+    sql 'set enable_fallback_to_original_planner=true'
     sql "insert into a_table select k1+1, max_by_state(k2,k1+10) from d_table;"
+    sql 'set enable_fallback_to_original_planner=false'
 
     qt_length2 """select k1,length(k2) from a_table order by k1;"""
     qt_group2 """select k1,max_by_merge(k2) from a_table group by k1 order by 
k1;"""
diff --git a/regression-test/suites/insert_p0/test_array_insert_overflow.groovy 
b/regression-test/suites/insert_p0/test_array_insert_overflow.groovy
index 62c11494cb..68fa156ab1 100644
--- a/regression-test/suites/insert_p0/test_array_insert_overflow.groovy
+++ b/regression-test/suites/insert_p0/test_array_insert_overflow.groovy
@@ -16,6 +16,10 @@
 // under the License.
 
 suite("test_array_insert_overflow") {
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     def testTable = "test_array_insert_overflow"
 
     sql """
diff --git a/regression-test/suites/json_p0/test_json_load_and_function.groovy 
b/regression-test/suites/json_p0/test_json_load_and_function.groovy
index 6f72833072..0d8b980446 100644
--- a/regression-test/suites/json_p0/test_json_load_and_function.groovy
+++ b/regression-test/suites/json_p0/test_json_load_and_function.groovy
@@ -19,6 +19,9 @@ import org.codehaus.groovy.runtime.IOGroovyMethods
 
 suite("test_json_load_and_function", "p0") {
 
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_json"
     def dataFile = "test_json.csv"
diff --git 
a/regression-test/suites/json_p0/test_json_load_unique_key_and_function.groovy 
b/regression-test/suites/json_p0/test_json_load_unique_key_and_function.groovy
index 4b990bc4f7..7d1dad4940 100644
--- 
a/regression-test/suites/json_p0/test_json_load_unique_key_and_function.groovy
+++ 
b/regression-test/suites/json_p0/test_json_load_unique_key_and_function.groovy
@@ -16,6 +16,10 @@
 // under the License.
 
 suite("test_json_unique_load_and_function", "p0") {
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_json_unique"
     def dataFile = "test_json_unique_key.csv"
diff --git 
a/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy 
b/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy
index 46b64813e0..1347726d2a 100644
--- a/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy
+++ b/regression-test/suites/jsonb_p0/test_jsonb_load_and_function.groovy
@@ -18,6 +18,10 @@
 import org.codehaus.groovy.runtime.IOGroovyMethods
 
 suite("test_jsonb_load_and_function", "p0") {
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_jsonb"
     def dataFile = "test_jsonb.csv"
diff --git 
a/regression-test/suites/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
 
b/regression-test/suites/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
index 3f53721d4b..398e0f06ed 100644
--- 
a/regression-test/suites/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
+++ 
b/regression-test/suites/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
@@ -16,6 +16,9 @@
 // under the License.
 
 suite("test_jsonb_unique_load_and_function", "p0") {
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
     // define a sql table
     def testTable = "tbl_test_jsonb_unique"
     def dataFile = "test_jsonb_unique_key.csv"
diff --git 
a/regression-test/suites/nereids_function_p0/scalar_function/J.groovy 
b/regression-test/suites/nereids_function_p0/scalar_function/J.groovy
index 1f743333e7..939e70e5db 100644
--- a/regression-test/suites/nereids_function_p0/scalar_function/J.groovy
+++ b/regression-test/suites/nereids_function_p0/scalar_function/J.groovy
@@ -20,6 +20,10 @@ import org.codehaus.groovy.runtime.IOGroovyMethods
 suite("nereids_scalar_fn_J") {
     sql "SET enable_nereids_planner=true"
     sql "SET enable_fallback_to_original_planner=false"
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_jsonb"
     def dataFile = "test_jsonb.csv"
diff --git a/regression-test/suites/nereids_p0/join/test_left_join_null.groovy 
b/regression-test/suites/nereids_p0/join/test_left_join_null.groovy
index ba15a1f831..970974022d 100644
--- a/regression-test/suites/nereids_p0/join/test_left_join_null.groovy
+++ b/regression-test/suites/nereids_p0/join/test_left_join_null.groovy
@@ -21,6 +21,9 @@ suite("test_left_join_null", "query") {
 
     def tbName1 = "dept_emp"
     def tbName2 = "departments"
+    
+    sql "drop table if exists ${tbName1}"
+    sql "drop table if exists ${tbName2}"
 
     sql """
            CREATE TABLE IF NOT EXISTS ${tbName1} (
diff --git 
a/regression-test/suites/nereids_p0/json_p0/test_json_load_and_function.groovy 
b/regression-test/suites/nereids_p0/json_p0/test_json_load_and_function.groovy
index 14047a678b..e16c8a6e96 100644
--- 
a/regression-test/suites/nereids_p0/json_p0/test_json_load_and_function.groovy
+++ 
b/regression-test/suites/nereids_p0/json_p0/test_json_load_and_function.groovy
@@ -20,6 +20,10 @@ import org.codehaus.groovy.runtime.IOGroovyMethods
 suite("test_json_load_and_function", "p0") {
     sql "SET enable_nereids_planner=true"
     sql "SET enable_fallback_to_original_planner=false"
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_json"
     def dataFile = "test_json.csv"
diff --git 
a/regression-test/suites/nereids_p0/json_p0/test_json_load_unique_key_and_function.groovy
 
b/regression-test/suites/nereids_p0/json_p0/test_json_load_unique_key_and_function.groovy
index 87916fddc2..4aafc86b56 100644
--- 
a/regression-test/suites/nereids_p0/json_p0/test_json_load_unique_key_and_function.groovy
+++ 
b/regression-test/suites/nereids_p0/json_p0/test_json_load_unique_key_and_function.groovy
@@ -18,6 +18,10 @@
 suite("test_json_unique_load_and_function", "p0") {
     sql "SET enable_nereids_planner=true"
     sql "SET enable_fallback_to_original_planner=false"
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_json_unique"
     def dataFile = "test_json_unique_key.csv"
diff --git 
a/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_and_function.groovy
 
b/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_and_function.groovy
index 33258d6bec..685acae232 100644
--- 
a/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_and_function.groovy
+++ 
b/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_and_function.groovy
@@ -20,6 +20,10 @@ import org.codehaus.groovy.runtime.IOGroovyMethods
 suite("test_jsonb_load_and_function", "p0") {
     sql "SET enable_nereids_planner=true"
     sql "SET enable_fallback_to_original_planner=false"
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_jsonb"
     def dataFile = "test_jsonb.csv"
diff --git 
a/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
 
b/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
index 013f5d6fab..70b5655a1c 100644
--- 
a/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
+++ 
b/regression-test/suites/nereids_p0/jsonb_p0/test_jsonb_load_unique_key_and_function.groovy
@@ -18,6 +18,10 @@
 suite("test_jsonb_unique_load_and_function", "p0") {
     sql "SET enable_nereids_planner=true"
     sql "SET enable_fallback_to_original_planner=false"
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     // define a sql table
     def testTable = "tbl_test_jsonb_unique"
     def dataFile = "test_jsonb_unique_key.csv"
diff --git 
a/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy
 
b/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy
index 8ef435cdf1..1b78fb30fe 100644
--- 
a/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy
+++ 
b/regression-test/suites/partition_p0/multi_partition/test_multi_column_partition.groovy
@@ -16,6 +16,10 @@
 // under the License.
 
 suite("test_multi_partition_key", "p0") {
+
+    // TODO: remove it after we add implicit cast check in Nereids
+    sql "set enable_nereids_dml=false"
+
     def random = new Random()
     sql "set enable_insert_strict=true"
     def createTable = { String tableName, String partitionInfo /* param */  ->
diff --git a/regression-test/suites/query_p0/join/test_left_join_null.groovy 
b/regression-test/suites/query_p0/join/test_left_join_null.groovy
index 42227b6485..71efbd8798 100644
--- a/regression-test/suites/query_p0/join/test_left_join_null.groovy
+++ b/regression-test/suites/query_p0/join/test_left_join_null.groovy
@@ -20,6 +20,9 @@ suite("test_left_join_null", "query") {
     def tbName1 = "dept_emp"
     def tbName2 = "departments"
 
+    sql "drop table if exists ${tbName1}"
+    sql "drop table if exists ${tbName2}"
+
     sql """
            CREATE TABLE IF NOT EXISTS ${tbName1} (
               `emp_no` int NOT NULL,


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to