This is an automated email from the ASF dual-hosted git repository.

liyang pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 00352e8c2d53307b4cf3f847fc6bbf2a869c4520
Author: haocheni <hao_...@qq.com>
AuthorDate: Fri Sep 22 10:29:48 2023 +0800

    KYLIN-5817 Query scanRows and bytes are incorrect
---
 .../src/main/scala/org/apache/spark/sql/hive/QueryMetricUtils.scala   | 4 ++--
 .../org/apache/spark/sql/execution/SparkQueryMetricUtilsSuite.scala   | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/hive/QueryMetricUtils.scala
 
b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/hive/QueryMetricUtils.scala
index 4beda86353..68fc7b9b19 100644
--- 
a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/hive/QueryMetricUtils.scala
+++ 
b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/hive/QueryMetricUtils.scala
@@ -63,8 +63,8 @@ object QueryMetricUtils extends Logging {
           child => {
             if (child.isInstanceOf[SparkPlan]) {
               val result = collectAdaptiveSparkPlanExecMetrics(child, scanRow, 
scanBytes)
-              newScanRow = result._1
-              newScanBytes = result._2
+              newScanRow += result._1
+              newScanBytes += result._2
             } else {
               logTrace("Not sparkPlan in collectAdaptiveSparkPlanExecMetrics, 
child: " + child.getClass.getName)
             }
diff --git 
a/src/spark-project/sparder/src/test/scala/org/apache/spark/sql/execution/SparkQueryMetricUtilsSuite.scala
 
b/src/spark-project/sparder/src/test/scala/org/apache/spark/sql/execution/SparkQueryMetricUtilsSuite.scala
index ede02eef60..f0eda5b817 100644
--- 
a/src/spark-project/sparder/src/test/scala/org/apache/spark/sql/execution/SparkQueryMetricUtilsSuite.scala
+++ 
b/src/spark-project/sparder/src/test/scala/org/apache/spark/sql/execution/SparkQueryMetricUtilsSuite.scala
@@ -217,8 +217,8 @@ class SparkQueryMetricUtilsSuite extends QueryTest with 
SharedSparkSession {
     assert(0 == collectScanMetrics._1.get(0))
     assert(0 == collectScanMetrics._2.get(0))
     val collectScanMetrics2 = 
QueryMetricUtils.collectAdaptiveSparkPlanExecMetrics(dataWritingCommandExec, 1, 
1)
-    assert(1 == collectScanMetrics2._1)
-    assert(1 == collectScanMetrics2._2)
+    assert(2 == collectScanMetrics2._1)
+    assert(2 == collectScanMetrics2._2)
 
   }
 

Reply via email to