This is an automated email from the ASF dual-hosted git repository. xxyu pushed a commit to branch kylin-on-parquet-v2 in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this push: new 0b4df4c KYLIN-4822 The metrics 'Total spark scan time' of query log is negative in some cases 0b4df4c is described below commit 0b4df4cf67b27c6f9a7a9e5c9006ddf528acc09b Author: Zhichao Zhang <441586...@qq.com> AuthorDate: Wed Nov 18 09:08:46 2020 +0800 KYLIN-4822 The metrics 'Total spark scan time' of query log is negative in some cases --- .../apache/spark/sql/hive/utils/QueryMetricUtils.scala | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/hive/utils/QueryMetricUtils.scala b/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/hive/utils/QueryMetricUtils.scala index d4f8c50..c928e84 100644 --- a/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/hive/utils/QueryMetricUtils.scala +++ b/kylin-spark-project/kylin-spark-common/src/main/scala/org/apache/spark/sql/hive/utils/QueryMetricUtils.scala @@ -42,11 +42,16 @@ object QueryMetricUtils extends Logging { // There is only 'numOutputRows' metric in HiveTableScanExec (exec.metrics.apply("numOutputRows").value, -1l, -1l, -1l, -1l) } - val scanRows = metrics.map(metric => java.lang.Long.valueOf(metric._1)).toList.asJava - val scanFiles = metrics.map(metrics => java.lang.Long.valueOf(metrics._2)).toList.asJava - val metadataTime = metrics.map(metrics => java.lang.Long.valueOf(metrics._3)).toList.asJava - val scanTime = metrics.map(metrics => java.lang.Long.valueOf(metrics._4)).toList.asJava - val scanBytes = metrics.map(metric => java.lang.Long.valueOf(metric._5)).toList.asJava + val scanRows = metrics.map(metric => java.lang.Long.valueOf(metric._1)) + .filter(_ >= 0L).toList.asJava + val scanFiles = metrics.map(metrics => java.lang.Long.valueOf(metrics._2)) + .filter(_ >= 0L).toList.asJava + val metadataTime = metrics.map(metrics => java.lang.Long.valueOf(metrics._3)) + .filter(_ >= 0L).toList.asJava + val scanTime = metrics.map(metrics => java.lang.Long.valueOf(metrics._4)) + .filter(_ >= 0L).toList.asJava + val scanBytes = metrics.map(metric => java.lang.Long.valueOf(metric._5)) + .filter(_ >= 0L).toList.asJava (scanRows, scanFiles, metadataTime, scanTime, scanBytes) } catch {