This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 946d2d74ce0f [SPARK-51941][SQL] Fix the precision problem for 
convertToCatalyst when the Input is BigDecimal between -1.0 and 1.0
946d2d74ce0f is described below

commit 946d2d74ce0f007e5da04c887c0b17cf8b6fe405
Author: jackylee-ch <lijunq...@baidu.com>
AuthorDate: Tue Apr 29 20:19:07 2025 +0800

    [SPARK-51941][SQL] Fix the precision problem for convertToCatalyst when the 
Input is BigDecimal between -1.0 and 1.0
    
    ### What changes were proposed in this pull request?
    In [SPARK-20211](https://issues.apache.org/jira/browse/SPARK-20211), we 
addressed a fix with BigDecimal type conversion exceptions. However, the 
CatalystTypeConverters.convertToCatalyst method was not updated accordingly, 
causing users to still encounter exceptions when attempting to convert 
BigDecimal types. Below is a reproducible example case for this problem.
    ```
    CatalystTypeConverters.convertToCatalyst(BigDecimal("0.01"))
    ```
    ```
    Decimal scale (2) cannot be greater than precision (1).
    org.apache.spark.sql.AnalysisException: Decimal scale (2) cannot be greater 
than precision (1).
        at 
org.apache.spark.sql.errors.DataTypeErrors$.decimalCannotGreaterThanPrecisionError(DataTypeErrors.scala:122)
        at org.apache.spark.sql.types.DecimalType.<init>(DecimalType.scala:46)
        at 
org.apache.spark.sql.catalyst.CatalystTypeConverters$.convertToCatalyst(CatalystTypeConverters.scala:578)
        at 
org.apache.spark.sql.catalyst.CatalystTypeConvertersSuite.$anonfun$new$18(CatalystTypeConvertersSuite.scala:159)
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Newly added test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #50747 from jackylee-ch/fix_convertToCatalyst_with_BigDecimal.
    
    Authored-by: jackylee-ch <lijunq...@baidu.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../org/apache/spark/sql/catalyst/CatalystTypeConverters.scala   | 6 ++++--
 .../apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala  | 9 +++++++++
 2 files changed, 13 insertions(+), 2 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
index ef4308beafe8..bb6afb3b13fa 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
@@ -575,8 +575,10 @@ object CatalystTypeConverters {
     case t: Timestamp => TimestampConverter.toCatalyst(t)
     case i: Instant => InstantConverter.toCatalyst(i)
     case l: LocalDateTime => TimestampNTZConverter.toCatalyst(l)
-    case d: BigDecimal => new DecimalConverter(DecimalType(d.precision, 
d.scale)).toCatalyst(d)
-    case d: JavaBigDecimal => new DecimalConverter(DecimalType(d.precision, 
d.scale)).toCatalyst(d)
+    case d: BigDecimal =>
+      new DecimalConverter(DecimalType(Math.max(d.precision, d.scale), 
d.scale)).toCatalyst(d)
+    case d: JavaBigDecimal =>
+      new DecimalConverter(DecimalType(Math.max(d.precision, d.scale), 
d.scale)).toCatalyst(d)
     case seq: Seq[Any] => new 
GenericArrayData(seq.map(convertToCatalyst).toArray)
     case r: Row => InternalRow(r.toSeq.map(convertToCatalyst): _*)
     case arr: Array[Byte] => arr
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
index 3457a9ced4e3..e4c48f7467f9 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.catalyst
 
+import java.math.{BigDecimal => JavaBigDecimal}
 import java.time.{Duration, Instant, LocalDate, LocalDateTime, LocalTime, 
Period}
 
 import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException}
@@ -155,6 +156,14 @@ class CatalystTypeConvertersSuite extends SparkFunSuite 
with SQLHelper {
         "dataType" -> "decimal(10,0)"))
   }
 
+  test("SPARK-51941: convert BigDecimal to Decimal") {
+    val expected = Decimal("0.01")
+    val bigDecimal = BigDecimal("0.01")
+    assert(CatalystTypeConverters.convertToCatalyst(bigDecimal) === expected)
+    val javaBigDecimal = new JavaBigDecimal("0.01")
+    assert(CatalystTypeConverters.convertToCatalyst(javaBigDecimal) === 
expected)
+  }
+
   test("converting a wrong value to the string type") {
     checkError(
       exception = intercept[SparkIllegalArgumentException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to