This is an automated email from the ASF dual-hosted git repository.

englefly pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new e4786790134 [fix](nereids) deduplicate scope slots (#58683)
e4786790134 is described below

commit e47867901348cb99b46c48289cbe5c247e45b451
Author: minghong <[email protected]>
AuthorDate: Mon Dec 15 10:00:36 2025 +0800

    [fix](nereids) deduplicate scope slots (#58683)
    
    If the slots in the scope are not deduplicated, some slots will be
    bounded twice, and optimizer throw exception because of ambigious.
---
 .../nereids/rules/analysis/ExpressionAnalyzer.java |  4 ++
 .../nereids_p0/slot_bind/test_bind_slot.groovy     | 47 ++++++++++++++++++++++
 2 files changed, 51 insertions(+)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/ExpressionAnalyzer.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/ExpressionAnalyzer.java
index e3c948d0460..5dbd43801fb 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/ExpressionAnalyzer.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/ExpressionAnalyzer.java
@@ -290,7 +290,11 @@ public class ExpressionAnalyzer extends 
SubExprAnalyzer<ExpressionRewriteContext
         if (bindSlotInOuterScope && !foundInThisScope && 
outerScope.isPresent()) {
             boundedOpt = Optional.of(bindSlotByScope(unboundSlot, 
outerScope.get()));
         }
+        // it is heavy to deduplicate slots in scope. So we deduplicates 
bounded here
         List<? extends Expression> bounded = boundedOpt.get();
+        if (bounded.size() > 1) {
+            bounded = bounded.stream().distinct().collect(Collectors.toList());
+        }
         switch (bounded.size()) {
             case 0:
                 String tableName = 
StringUtils.join(unboundSlot.getQualifier(), ".");
diff --git a/regression-test/suites/nereids_p0/slot_bind/test_bind_slot.groovy 
b/regression-test/suites/nereids_p0/slot_bind/test_bind_slot.groovy
new file mode 100644
index 00000000000..6d49c7fb49a
--- /dev/null
+++ b/regression-test/suites/nereids_p0/slot_bind/test_bind_slot.groovy
@@ -0,0 +1,47 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+
+suite("test_bind_slot") {
+    sql """
+      drop table if exists t; 
+       CREATE TABLE t (
+      `id` int COMMENT '',
+      `status` string COMMENT '',
+      `time_created` string COMMENT '',
+    )  properties("replication_num" = "1");
+
+  """
+  // if scope is not deduplicated, time_created wil be bound twice, causing a 
slot ambigious error
+  sql """select
+          from_unixtime(time_created / 1000, 'yyyyMMdd') as date1,
+          status,
+          count(distinct id) as cnt
+          from
+            (
+              select
+                from_unixtime(time_created / 1001) as created_date,
+                time_created,
+                *
+              from
+                t
+            ) t1
+          group by
+          from_unixtime(time_created / 1000, 'yyyyMMdd'),
+          status;
+  """
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to