This is an automated email from the ASF dual-hosted git repository.

liyang pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 8aaa87cbb3f6b9e247ced8ac34d72fb1365a39f1
Author: lixiang <447399...@qq.com>
AuthorDate: Fri Aug 11 11:48:18 2023 +0800

    KYLIN-5783 Round query result of decimal zero by config
---
 .../java/org/apache/kylin/common/KylinConfigBase.java    |  4 ++++
 .../org/apache/kylin/query/pushdown/SparkSqlClient.scala |  2 ++
 .../apache/kylin/query/pushdown/SparderTypeUtilTest.java | 16 ++++++++++++++++
 .../scala/org/apache/spark/sql/SparderTypeUtil.scala     | 11 ++++++++++-
 4 files changed, 32 insertions(+), 1 deletion(-)

diff --git 
a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java 
b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index eddb15bffa..ff7bd4c7c3 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -4029,4 +4029,8 @@ public abstract class KylinConfigBase implements 
Serializable {
     public int getExpectedIndexSizeOptimized() {
         return 
Integer.parseInt(getOptional("kylin.index.expected-size-after-optimization", 
"0"));
     }
+
+    public boolean isRoundDecimalZero() {
+        return 
Boolean.parseBoolean(getOptional("kylin.query.round-decimal-zero", FALSE));
+    }
 }
diff --git 
a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
 
b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
index d2ba4fca42..a015f6d2c3 100644
--- 
a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
+++ 
b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.query.pushdown
 
+import java.math.BigDecimal
 import java.sql.Timestamp
 import java.util
 import java.util.concurrent.{Callable, Executors, TimeUnit, TimeoutException}
@@ -213,6 +214,7 @@ object SparkSqlClient {
     case value: immutable.Map[Any, Any] =>
       value.map(p => rawValueToString(p._1, true) + ":" + 
rawValueToString(p._2, true)).mkString("{", ",", "}")
     case value: Array[Byte] => new String(value)
+    case value: BigDecimal => SparderTypeUtil.adjustDecimal(value)
     case value: Any => value.toString
   }
 }
diff --git 
a/src/spark-project/sparder/src/test/java/org/apache/kylin/query/pushdown/SparderTypeUtilTest.java
 
b/src/spark-project/sparder/src/test/java/org/apache/kylin/query/pushdown/SparderTypeUtilTest.java
index ea055a98ef..0626a96c3e 100644
--- 
a/src/spark-project/sparder/src/test/java/org/apache/kylin/query/pushdown/SparderTypeUtilTest.java
+++ 
b/src/spark-project/sparder/src/test/java/org/apache/kylin/query/pushdown/SparderTypeUtilTest.java
@@ -28,6 +28,8 @@ import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeSystem;
 import org.apache.calcite.sql.type.BasicSqlType;
 import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.TempMetadataBuilder;
 import org.apache.spark.sql.util.SparderTypeUtil;
 import org.junit.Assert;
 import org.junit.Test;
@@ -140,4 +142,18 @@ public class SparderTypeUtilTest {
         Assert.assertEquals(Long.class, convert_value.getClass());
         Assert.assertEquals(0L, convert_value);
     }
+
+    @Test
+    public void testBigDecimalRoundZero() {
+        String tempMetadataDir = 
TempMetadataBuilder.prepareLocalTempMetadata();
+        KylinConfig.setKylinConfigForLocalTest(tempMetadataDir);
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        Assert.assertEquals("0.000", convertToStringWithDecimalType(new 
BigDecimal("0.000"), 29, 3));
+        config.setProperty("kylin.query.round-decimal-zero", "true");
+        Assert.assertEquals("0", convertToStringWithDecimalType(new 
BigDecimal("0.000"), 29, 3));
+        Assert.assertEquals("0", convertToStringWithDecimalType(new 
BigDecimal("0.0000"), 29, 4));
+        Assert.assertEquals("0.001", convertToStringWithDecimalType(new 
BigDecimal("0.001"), 29, 3));
+
+    }
+
 }
diff --git 
a/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
 
b/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
index 83499cda60..acce92b76a 100644
--- 
a/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
+++ 
b/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
@@ -252,6 +252,15 @@ object SparderTypeUtil extends Logging {
     ret
   }
 
+  def adjustDecimal(value: BigDecimal): String = {
+    // customized for Longfor
+    if (value.compareTo(BigDecimal.ZERO) == 0 && 
KylinConfig.getInstanceFromEnv.isRoundDecimalZero) {
+      BigDecimal.ZERO.toString
+    } else {
+      value.toString
+    }
+  }
+
   def convertToStringWithCalciteType(rawValue: Any, relType: RelDataType, 
wrapped: Boolean = false): String = {
     val formatStringValue = (value: String) => if (wrapped) 
StringHelper.doubleQuote(value) else value
     val formatArray = (value: String) => {
@@ -268,7 +277,7 @@ object SparderTypeUtil extends Logging {
     (rawValue, relType.getSqlTypeName) match {
       case (null, _) => null
       // types that matched
-      case (value: BigDecimal, SqlTypeName.DECIMAL) => value.toString
+      case (value: BigDecimal, SqlTypeName.DECIMAL) => adjustDecimal(value)
       case (value: Integer, SqlTypeName.INTEGER) => value.toString
       case (value: Byte, SqlTypeName.TINYINT) => value.toString
       case (value: Short, SqlTypeName.SMALLINT) => value.toString

Reply via email to