This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 949de3416a8 [SPARK-45786][SQL][FOLLOWUP][TEST] Fix Decimal random 
number tests with ANSI enabled
949de3416a8 is described below

commit 949de3416a8ef5b7faa22149f5e07d8235237f40
Author: Kazuyuki Tanimura <[email protected]>
AuthorDate: Fri Nov 17 02:49:51 2023 -0800

    [SPARK-45786][SQL][FOLLOWUP][TEST] Fix Decimal random number tests with 
ANSI enabled
    
    ### What changes were proposed in this pull request?
    This follow-up PR fixes the test for SPARK-45786 that is failing in GHA 
with SPARK_ANSI_SQL_MODE=true
    
    ### Why are the changes needed?
    The issue discovered in 
https://github.com/apache/spark/pull/43678#discussion_r1395693417
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Test updated
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #43853 from kazuyukitanimura/SPARK-45786-FollowUp.
    
    Authored-by: Kazuyuki Tanimura <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../catalyst/expressions/ArithmeticExpressionSuite.scala | 16 ++++++++++++----
 1 file changed, 12 insertions(+), 4 deletions(-)

diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
index 568dcd10d11..2dc7e82f772 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
@@ -308,27 +308,35 @@ class ArithmeticExpressionSuite extends SparkFunSuite 
with ExpressionEvalHelper
           val mulResult = Decimal(mulExact.setScale(mulType.scale, 
RoundingMode.HALF_UP))
           val mulExpected =
             if (mulResult.precision > DecimalType.MAX_PRECISION) null else 
mulResult
-          checkEvaluation(mulActual, mulExpected)
+          checkEvaluationOrException(mulActual, mulExpected)
 
           val divType = Divide(null, null).resultDecimalType(p1, s1, p2, s2)
           val divResult = Decimal(divExact.setScale(divType.scale, 
RoundingMode.HALF_UP))
           val divExpected =
             if (divResult.precision > DecimalType.MAX_PRECISION) null else 
divResult
-          checkEvaluation(divActual, divExpected)
+          checkEvaluationOrException(divActual, divExpected)
 
           val remType = Remainder(null, null).resultDecimalType(p1, s1, p2, s2)
           val remResult = Decimal(remExact.setScale(remType.scale, 
RoundingMode.HALF_UP))
           val remExpected =
             if (remResult.precision > DecimalType.MAX_PRECISION) null else 
remResult
-          checkEvaluation(remActual, remExpected)
+          checkEvaluationOrException(remActual, remExpected)
 
           val quotType = IntegralDivide(null, null).resultDecimalType(p1, s1, 
p2, s2)
           val quotResult = Decimal(quotExact.setScale(quotType.scale, 
RoundingMode.HALF_UP))
           val quotExpected =
             if (quotResult.precision > DecimalType.MAX_PRECISION) null else 
quotResult
-          checkEvaluation(quotActual, quotExpected.toLong)
+          checkEvaluationOrException(quotActual, quotExpected.toLong)
         }
       }
+
+      def checkEvaluationOrException(actual: BinaryArithmetic, expected: Any): 
Unit =
+        if (SQLConf.get.ansiEnabled && expected == null) {
+          checkExceptionInExpression[SparkArithmeticException](actual,
+            "NUMERIC_VALUE_OUT_OF_RANGE")
+        } else {
+          checkEvaluation(actual, expected)
+        }
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to