This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new f19e8450bff branch-3.1: [fix](count) fix wrong count push down logic 
(#56182) (#56482)
f19e8450bff is described below

commit f19e8450bff724b99aa68440012aad503d038846
Author: Mingyu Chen (Rayner) <[email protected]>
AuthorDate: Sat Sep 27 19:25:45 2025 -0700

    branch-3.1: [fix](count) fix wrong count push down logic (#56182) (#56482)
    
    bp #56182
---
 .../apache/doris/datasource/hive/source/HiveScanNode.java    |  3 +++
 .../doris/datasource/iceberg/source/IcebergScanNode.java     |  4 ++++
 .../doris/datasource/paimon/source/PaimonScanNode.java       |  4 ++++
 gensrc/thrift/PlanNodes.thrift                               |  2 +-
 .../iceberg/test_iceberg_optimize_count.out                  | 12 ++++++++++++
 .../iceberg/test_iceberg_optimize_count.groovy               | 12 ++++++++----
 6 files changed, 32 insertions(+), 5 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
index 4a47f97f7f2..6a0fee01a54 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
@@ -468,10 +468,12 @@ public class HiveScanNode extends FileQueryScanNode {
                 }
                 transactionalHiveDesc.setDeleteDeltas(deleteDeltaDescs);
                 
tableFormatFileDesc.setTransactionalHiveParams(transactionalHiveDesc);
+                tableFormatFileDesc.setTableLevelRowCount(-1);
                 rangeDesc.setTableFormatParams(tableFormatFileDesc);
             } else {
                 TTableFormatFileDesc tableFormatFileDesc = new 
TTableFormatFileDesc();
                 
tableFormatFileDesc.setTableFormatType(TableFormatType.HIVE.value());
+                tableFormatFileDesc.setTableLevelRowCount(-1);
                 rangeDesc.setTableFormatParams(tableFormatFileDesc);
             }
         }
@@ -606,3 +608,4 @@ public class HiveScanNode extends FileQueryScanNode {
     }
 }
 
+
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
index 5fcbe1def75..d15675944e4 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
@@ -184,6 +184,9 @@ public class IcebergScanNode extends FileQueryScanNode {
         
tableFormatFileDesc.setTableFormatType(icebergSplit.getTableFormatType().value());
         if (tableLevelPushDownCount) {
             
tableFormatFileDesc.setTableLevelRowCount(icebergSplit.getTableLevelRowCount());
+        } else {
+            // MUST explicitly set to -1, to be distinct from valid row count 
>= 0
+            tableFormatFileDesc.setTableLevelRowCount(-1);
         }
         TIcebergFileDesc fileDesc = new TIcebergFileDesc();
         fileDesc.setFormatVersion(formatVersion);
@@ -670,3 +673,4 @@ public class IcebergScanNode extends FileQueryScanNode {
         return Optional.empty();
     }
 }
+
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
index 05a4a133fc0..9c3f60c9c2f 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
@@ -242,6 +242,9 @@ public class PaimonScanNode extends FileQueryScanNode {
         }
         if (paimonSplit.getRowCount().isPresent()) {
             
tableFormatFileDesc.setTableLevelRowCount(paimonSplit.getRowCount().get());
+        } else {
+            // MUST explicitly set to -1, to be distinct from valid row count 
>= 0
+            tableFormatFileDesc.setTableLevelRowCount(-1);
         }
         tableFormatFileDesc.setPaimonParams(fileDesc);
         Map<String, String> partitionValues = 
paimonSplit.getPaimonPartitionValues();
@@ -689,3 +692,4 @@ public class PaimonScanNode extends FileQueryScanNode {
         return baseTable;
     }
 }
+
diff --git a/gensrc/thrift/PlanNodes.thrift b/gensrc/thrift/PlanNodes.thrift
index 908c7d10530..eb2baf31780 100644
--- a/gensrc/thrift/PlanNodes.thrift
+++ b/gensrc/thrift/PlanNodes.thrift
@@ -411,7 +411,7 @@ struct TTableFormatFileDesc {
     6: optional TMaxComputeFileDesc max_compute_params
     7: optional TTrinoConnectorFileDesc trino_connector_params
     8: optional TLakeSoulFileDesc lakesoul_params
-    9: optional i64 table_level_row_count
+    9: optional i64 table_level_row_count = -1
 }
 
 // Deprecated, hive text talbe is a special format, not a serde type
diff --git 
a/regression-test/data/external_table_p0/iceberg/test_iceberg_optimize_count.out
 
b/regression-test/data/external_table_p0/iceberg/test_iceberg_optimize_count.out
index ec9129a00d2..20d03ad9c06 100644
--- 
a/regression-test/data/external_table_p0/iceberg/test_iceberg_optimize_count.out
+++ 
b/regression-test/data/external_table_p0/iceberg/test_iceberg_optimize_count.out
@@ -11,6 +11,18 @@
 -- !q04 --
 1000
 
+-- !q01 --
+1000
+
+-- !q02 --
+1000
+
+-- !q03 --
+1000
+
+-- !q04 --
+1000
+
 -- !q05 --
 1000
 
diff --git 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_optimize_count.groovy
 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_optimize_count.groovy
index bc3b006fb93..0f1f5535c05 100644
--- 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_optimize_count.groovy
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_optimize_count.groovy
@@ -50,10 +50,14 @@ suite("test_iceberg_optimize_count", 
"p0,external,doris,external_docker,external
         // use push down count
         sql """ set enable_count_push_down_for_external_table=true; """
 
-        qt_q01 """${sqlstr1}""" 
-        qt_q02 """${sqlstr2}""" 
-        qt_q03 """${sqlstr3}""" 
-        qt_q04 """${sqlstr4}""" 
+        for (String val: ["1K", "0"]) {
+            sql "set file_split_size=${val}"
+            qt_q01 """${sqlstr1}""" 
+            qt_q02 """${sqlstr2}""" 
+            qt_q03 """${sqlstr3}""" 
+            qt_q04 """${sqlstr4}""" 
+        }
+        sql "unset variable file_split_size;"
 
         // traditional mode
         sql """set num_files_in_batch_mode=100000"""


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to