This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.0 by this push:
     new 116cf5839b33 [SPARK-51941][SQL] Fix the precision problem for 
convertToCatalyst when the Input is BigDecimal between -1.0 and 1.0
116cf5839b33 is described below

commit 116cf5839b330319fbd406e1aa6f4f17cbed846c
Author: jackylee-ch <lijunq...@baidu.com>
AuthorDate: Tue Apr 29 20:19:07 2025 +0800

    [SPARK-51941][SQL] Fix the precision problem for convertToCatalyst when the 
Input is BigDecimal between -1.0 and 1.0
    
    In [SPARK-20211](https://issues.apache.org/jira/browse/SPARK-20211), we 
addressed a fix with BigDecimal type conversion exceptions. However, the 
CatalystTypeConverters.convertToCatalyst method was not updated accordingly, 
causing users to still encounter exceptions when attempting to convert 
BigDecimal types. Below is a reproducible example case for this problem.
    ```
    CatalystTypeConverters.convertToCatalyst(BigDecimal("0.01"))
    ```
    ```
    Decimal scale (2) cannot be greater than precision (1).
    org.apache.spark.sql.AnalysisException: Decimal scale (2) cannot be greater 
than precision (1).
        at 
org.apache.spark.sql.errors.DataTypeErrors$.decimalCannotGreaterThanPrecisionError(DataTypeErrors.scala:122)
        at org.apache.spark.sql.types.DecimalType.<init>(DecimalType.scala:46)
        at 
org.apache.spark.sql.catalyst.CatalystTypeConverters$.convertToCatalyst(CatalystTypeConverters.scala:578)
        at 
org.apache.spark.sql.catalyst.CatalystTypeConvertersSuite.$anonfun$new$18(CatalystTypeConvertersSuite.scala:159)
    ```
    
    No.
    
    Newly added test.
    
    No.
    
    Closes #50747 from jackylee-ch/fix_convertToCatalyst_with_BigDecimal.
    
    Authored-by: jackylee-ch <lijunq...@baidu.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit 946d2d74ce0f007e5da04c887c0b17cf8b6fe405)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../org/apache/spark/sql/catalyst/CatalystTypeConverters.scala   | 6 ++++--
 .../apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala  | 9 +++++++++
 2 files changed, 13 insertions(+), 2 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
index fab65251ed51..5f46c3bcaef6 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
@@ -561,8 +561,10 @@ object CatalystTypeConverters {
     case t: Timestamp => TimestampConverter.toCatalyst(t)
     case i: Instant => InstantConverter.toCatalyst(i)
     case l: LocalDateTime => TimestampNTZConverter.toCatalyst(l)
-    case d: BigDecimal => new DecimalConverter(DecimalType(d.precision, 
d.scale)).toCatalyst(d)
-    case d: JavaBigDecimal => new DecimalConverter(DecimalType(d.precision, 
d.scale)).toCatalyst(d)
+    case d: BigDecimal =>
+      new DecimalConverter(DecimalType(Math.max(d.precision, d.scale), 
d.scale)).toCatalyst(d)
+    case d: JavaBigDecimal =>
+      new DecimalConverter(DecimalType(Math.max(d.precision, d.scale), 
d.scale)).toCatalyst(d)
     case seq: Seq[Any] => new 
GenericArrayData(seq.map(convertToCatalyst).toArray)
     case r: Row => InternalRow(r.toSeq.map(convertToCatalyst): _*)
     case arr: Array[Byte] => arr
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
index 7572843f44a1..d9a337a4b64b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.catalyst
 
+import java.math.{BigDecimal => JavaBigDecimal}
 import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period}
 
 import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException}
@@ -155,6 +156,14 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
         "dataType" -> "decimal(10,0)"))
   }
 
+  test("SPARK-51941: convert BigDecimal to Decimal") {
+    val expected = Decimal("0.01")
+    val bigDecimal = BigDecimal("0.01")
+    assert(CatalystTypeConverters.convertToCatalyst(bigDecimal) === expected)
+    val javaBigDecimal = new JavaBigDecimal("0.01")
+    assert(CatalystTypeConverters.convertToCatalyst(javaBigDecimal) === 
expected)
+  }
+
   test("converting a wrong value to the string type") {
     checkError(
       exception = intercept[SparkIllegalArgumentException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to