This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d9781d99ae87 [SPARK-50389][SQL][TESTS] 
Remove`sql-tests/results/try_arithmetic.sql.out.java21`
d9781d99ae87 is described below

commit d9781d99ae877d624d7a1897fce0781ff3fc3323
Author: yangjie01 <[email protected]>
AuthorDate: Thu Nov 21 21:03:44 2024 -0800

    [SPARK-50389][SQL][TESTS] 
Remove`sql-tests/results/try_arithmetic.sql.out.java21`
    
    ### What changes were proposed in this pull request?
    
    There are 8 Java 21-related golden files in Spark:
    
    ```
    
./sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/nonansi/try_arithmetic.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/nonansi/try_aggregates.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/try_aggregates.sql.out.java21
    
./sql/core/src/test/resources/sql-tests/results/datetime-formatting.sql.out.java21
    ```
    
    After SPARK-50313 was merged, the contents of 
`./sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out.java21` 
and `./sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out` 
have become identical. We can verify by running `diff 
./sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out.java21 
./sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out`. So 
this pr deletes this file.
    
    Actually, after SPARK-50313 the specialization for Java 21 has become 
`./sql/core/src/test/resources/sql-tests/results/nonansi/try_arithmetic.sql.out.java21`.
    
    ### Why are the changes needed?
    Delete an unnecessary golen file.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    - Pass Github Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #48928 from LuciferYang/SPARK-50389.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../results/try_arithmetic.sql.out.java21          | 753 ---------------------
 1 file changed, 753 deletions(-)

diff --git 
a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out.java21 
b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out.java21
deleted file mode 100644
index acf6e70a50de..000000000000
--- 
a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out.java21
+++ /dev/null
@@ -1,753 +0,0 @@
--- Automatically generated by SQLQueryTestSuite
--- !query
-SELECT try_add(1, 1)
--- !query schema
-struct<try_add(1, 1):int>
--- !query output
-2
-
-
--- !query
-SELECT try_add(2147483647, 1)
--- !query schema
-struct<try_add(2147483647, 1):int>
--- !query output
-NULL
-
-
--- !query
-SELECT try_add(2147483647, decimal(1))
--- !query schema
-struct<try_add(2147483647, 1):decimal(11,0)>
--- !query output
-2147483648
-
-
--- !query
-SELECT try_add(2147483647, "1")
--- !query schema
-struct<try_add(2147483647, 1):bigint>
--- !query output
-2147483648
-
-
--- !query
-SELECT try_add(-2147483648, -1)
--- !query schema
-struct<try_add(-2147483648, -1):int>
--- !query output
-NULL
-
-
--- !query
-SELECT try_add(9223372036854775807L, 1)
--- !query schema
-struct<try_add(9223372036854775807, 1):bigint>
--- !query output
-NULL
-
-
--- !query
-SELECT try_add(-9223372036854775808L, -1)
--- !query schema
-struct<try_add(-9223372036854775808, -1):bigint>
--- !query output
-NULL
-
-
--- !query
-SELECT try_add(1, (2147483647 + 1))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 20,
-    "stopIndex" : 33,
-    "fragment" : "2147483647 + 1"
-  } ]
-}
-
-
--- !query
-SELECT try_add(1L, (9223372036854775807L + 1L))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 21,
-    "stopIndex" : 45,
-    "fragment" : "9223372036854775807L + 1L"
-  } ]
-}
-
-
--- !query
-SELECT try_add(1, 1.0 / 0.0)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "DIVIDE_BY_ZERO",
-  "sqlState" : "22012",
-  "messageParameters" : {
-    "config" : "\"spark.sql.ansi.enabled\""
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 19,
-    "stopIndex" : 27,
-    "fragment" : "1.0 / 0.0"
-  } ]
-}
-
-
--- !query
-SELECT try_add(date'2021-01-01', 1)
--- !query schema
-struct<try_add(DATE '2021-01-01', 1):date>
--- !query output
-2021-01-02
-
-
--- !query
-SELECT try_add(1, date'2021-01-01')
--- !query schema
-struct<try_add(1, DATE '2021-01-01'):date>
--- !query output
-2021-01-02
-
-
--- !query
-SELECT try_add(date'2021-01-01', interval 2 year)
--- !query schema
-struct<try_add(DATE '2021-01-01', INTERVAL '2' YEAR):date>
--- !query output
-2023-01-01
-
-
--- !query
-SELECT try_add(date'2021-01-01', interval 2 second)
--- !query schema
-struct<try_add(DATE '2021-01-01', INTERVAL '02' SECOND):timestamp>
--- !query output
-2021-01-01 00:00:02
-
-
--- !query
-SELECT try_add(interval 2 year, date'2021-01-01')
--- !query schema
-struct<try_add(INTERVAL '2' YEAR, DATE '2021-01-01'):date>
--- !query output
-2023-01-01
-
-
--- !query
-SELECT try_add(interval 2 second, date'2021-01-01')
--- !query schema
-struct<try_add(INTERVAL '02' SECOND, DATE '2021-01-01'):timestamp>
--- !query output
-2021-01-01 00:00:02
-
-
--- !query
-SELECT try_add(timestamp_ltz'2021-01-01 00:00:00', interval 2 year)
--- !query schema
-struct<try_add(TIMESTAMP '2021-01-01 00:00:00', INTERVAL '2' YEAR):timestamp>
--- !query output
-2023-01-01 00:00:00
-
-
--- !query
-SELECT try_add(timestamp_ntz'2021-01-01 00:00:00', interval 2 second)
--- !query schema
-struct<try_add(TIMESTAMP_NTZ '2021-01-01 00:00:00', INTERVAL '02' 
SECOND):timestamp_ntz>
--- !query output
-2021-01-01 00:00:02
-
-
--- !query
-SELECT try_add(interval 2 year, timestamp_ltz'2021-01-01 00:00:00')
--- !query schema
-struct<try_add(INTERVAL '2' YEAR, TIMESTAMP '2021-01-01 00:00:00'):timestamp>
--- !query output
-2023-01-01 00:00:00
-
-
--- !query
-SELECT try_add(interval 2 second, timestamp_ntz'2021-01-01 00:00:00')
--- !query schema
-struct<try_add(INTERVAL '02' SECOND, TIMESTAMP_NTZ '2021-01-01 
00:00:00'):timestamp_ntz>
--- !query output
-2021-01-01 00:00:02
-
-
--- !query
-SELECT try_add(interval 2 year, interval 2 year)
--- !query schema
-struct<try_add(INTERVAL '2' YEAR, INTERVAL '2' YEAR):interval year>
--- !query output
-4-0
-
-
--- !query
-SELECT try_add(interval 2 second, interval 2 second)
--- !query schema
-struct<try_add(INTERVAL '02' SECOND, INTERVAL '02' SECOND):interval second>
--- !query output
-0 00:00:04.000000000
-
-
--- !query
-SELECT try_add(interval 2 year, interval 2 second)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.ExtendedAnalysisException
-{
-  "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
-  "sqlState" : "42K09",
-  "messageParameters" : {
-    "inputSql" : "\"INTERVAL '2' YEAR\"",
-    "inputType" : "\"INTERVAL YEAR\"",
-    "paramIndex" : "first",
-    "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"",
-    "sqlExpr" : "\"INTERVAL '2' YEAR + INTERVAL '02' SECOND\""
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 8,
-    "stopIndex" : 50,
-    "fragment" : "try_add(interval 2 year, interval 2 second)"
-  } ]
-}
-
-
--- !query
-SELECT try_add(interval 2147483647 month, interval 2 month)
--- !query schema
-struct<try_add(INTERVAL '2147483647' MONTH, INTERVAL '2' MONTH):interval month>
--- !query output
-NULL
-
-
--- !query
-SELECT try_add(interval 106751991 day, interval 3 day)
--- !query schema
-struct<try_add(INTERVAL '106751991' DAY, INTERVAL '3' DAY):interval day>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(1, 0.5)
--- !query schema
-struct<try_divide(1, 0.5):decimal(8,6)>
--- !query output
-2.000000
-
-
--- !query
-SELECT try_divide(1, 0)
--- !query schema
-struct<try_divide(1, 0):double>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(0, 0)
--- !query schema
-struct<try_divide(0, 0):double>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(1, (2147483647 + 1))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 23,
-    "stopIndex" : 36,
-    "fragment" : "2147483647 + 1"
-  } ]
-}
-
-
--- !query
-SELECT try_divide(1L, (9223372036854775807L + 1L))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 24,
-    "stopIndex" : 48,
-    "fragment" : "9223372036854775807L + 1L"
-  } ]
-}
-
-
--- !query
-SELECT try_divide(1, 1.0 / 0.0)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "DIVIDE_BY_ZERO",
-  "sqlState" : "22012",
-  "messageParameters" : {
-    "config" : "\"spark.sql.ansi.enabled\""
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 22,
-    "stopIndex" : 30,
-    "fragment" : "1.0 / 0.0"
-  } ]
-}
-
-
--- !query
-SELECT try_divide(1, decimal(0))
--- !query schema
-struct<try_divide(1, 0):decimal(12,11)>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(1, "0")
--- !query schema
-struct<try_divide(1, 0):double>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(interval 2 year, 2)
--- !query schema
-struct<try_divide(INTERVAL '2' YEAR, 2):interval year to month>
--- !query output
-1-0
-
-
--- !query
-SELECT try_divide(interval 2 second, 2)
--- !query schema
-struct<try_divide(INTERVAL '02' SECOND, 2):interval day to second>
--- !query output
-0 00:00:01.000000000
-
-
--- !query
-SELECT try_divide(interval 2 year, 0)
--- !query schema
-struct<try_divide(INTERVAL '2' YEAR, 0):interval year to month>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(interval 2 second, 0)
--- !query schema
-struct<try_divide(INTERVAL '02' SECOND, 0):interval day to second>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(interval 2147483647 month, 0.5)
--- !query schema
-struct<try_divide(INTERVAL '2147483647' MONTH, 0.5):interval year to month>
--- !query output
-NULL
-
-
--- !query
-SELECT try_divide(interval 106751991 day, 0.5)
--- !query schema
-struct<try_divide(INTERVAL '106751991' DAY, 0.5):interval day to second>
--- !query output
-NULL
-
-
--- !query
-SELECT try_subtract(1, 1)
--- !query schema
-struct<try_subtract(1, 1):int>
--- !query output
-0
-
-
--- !query
-SELECT try_subtract(2147483647, -1)
--- !query schema
-struct<try_subtract(2147483647, -1):int>
--- !query output
-NULL
-
-
--- !query
-SELECT try_subtract(2147483647, decimal(-1))
--- !query schema
-struct<try_subtract(2147483647, -1):decimal(11,0)>
--- !query output
-2147483648
-
-
--- !query
-SELECT try_subtract(2147483647, "-1")
--- !query schema
-struct<try_subtract(2147483647, -1):bigint>
--- !query output
-2147483648
-
-
--- !query
-SELECT try_subtract(-2147483648, 1)
--- !query schema
-struct<try_subtract(-2147483648, 1):int>
--- !query output
-NULL
-
-
--- !query
-SELECT try_subtract(9223372036854775807L, -1)
--- !query schema
-struct<try_subtract(9223372036854775807, -1):bigint>
--- !query output
-NULL
-
-
--- !query
-SELECT try_subtract(-9223372036854775808L, 1)
--- !query schema
-struct<try_subtract(-9223372036854775808, 1):bigint>
--- !query output
-NULL
-
-
--- !query
-SELECT try_subtract(1, (2147483647 + 1))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 25,
-    "stopIndex" : 38,
-    "fragment" : "2147483647 + 1"
-  } ]
-}
-
-
--- !query
-SELECT try_subtract(1L, (9223372036854775807L + 1L))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 26,
-    "stopIndex" : 50,
-    "fragment" : "9223372036854775807L + 1L"
-  } ]
-}
-
-
--- !query
-SELECT try_subtract(1, 1.0 / 0.0)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "DIVIDE_BY_ZERO",
-  "sqlState" : "22012",
-  "messageParameters" : {
-    "config" : "\"spark.sql.ansi.enabled\""
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 24,
-    "stopIndex" : 32,
-    "fragment" : "1.0 / 0.0"
-  } ]
-}
-
-
--- !query
-SELECT try_subtract(interval 2 year, interval 3 year)
--- !query schema
-struct<try_subtract(INTERVAL '2' YEAR, INTERVAL '3' YEAR):interval year>
--- !query output
--1-0
-
-
--- !query
-SELECT try_subtract(interval 3 second, interval 2 second)
--- !query schema
-struct<try_subtract(INTERVAL '03' SECOND, INTERVAL '02' SECOND):interval 
second>
--- !query output
-0 00:00:01.000000000
-
-
--- !query
-SELECT try_subtract(interval 2147483647 month, interval -2 month)
--- !query schema
-struct<try_subtract(INTERVAL '2147483647' MONTH, INTERVAL '-2' MONTH):interval 
month>
--- !query output
-NULL
-
-
--- !query
-SELECT try_subtract(interval 106751991 day, interval -3 day)
--- !query schema
-struct<try_subtract(INTERVAL '106751991' DAY, INTERVAL '-3' DAY):interval day>
--- !query output
-NULL
-
-
--- !query
-SELECT try_multiply(2, 3)
--- !query schema
-struct<try_multiply(2, 3):int>
--- !query output
-6
-
-
--- !query
-SELECT try_multiply(2147483647, -2)
--- !query schema
-struct<try_multiply(2147483647, -2):int>
--- !query output
-NULL
-
-
--- !query
-SELECT try_multiply(2147483647, decimal(-2))
--- !query schema
-struct<try_multiply(2147483647, -2):decimal(21,0)>
--- !query output
--4294967294
-
-
--- !query
-SELECT try_multiply(2147483647, "-2")
--- !query schema
-struct<try_multiply(2147483647, -2):bigint>
--- !query output
--4294967294
-
-
--- !query
-SELECT try_multiply(-2147483648, 2)
--- !query schema
-struct<try_multiply(-2147483648, 2):int>
--- !query output
-NULL
-
-
--- !query
-SELECT try_multiply(9223372036854775807L, 2)
--- !query schema
-struct<try_multiply(9223372036854775807, 2):bigint>
--- !query output
-NULL
-
-
--- !query
-SELECT try_multiply(-9223372036854775808L, -2)
--- !query schema
-struct<try_multiply(-9223372036854775808, -2):bigint>
--- !query output
-NULL
-
-
--- !query
-SELECT try_multiply(1, (2147483647 + 1))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "integer overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 25,
-    "stopIndex" : 38,
-    "fragment" : "2147483647 + 1"
-  } ]
-}
-
-
--- !query
-SELECT try_multiply(1L, (9223372036854775807L + 1L))
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "ARITHMETIC_OVERFLOW",
-  "sqlState" : "22003",
-  "messageParameters" : {
-    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
-    "config" : "\"spark.sql.ansi.enabled\"",
-    "message" : "long overflow"
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 26,
-    "stopIndex" : 50,
-    "fragment" : "9223372036854775807L + 1L"
-  } ]
-}
-
-
--- !query
-SELECT try_multiply(1, 1.0 / 0.0)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.SparkArithmeticException
-{
-  "errorClass" : "DIVIDE_BY_ZERO",
-  "sqlState" : "22012",
-  "messageParameters" : {
-    "config" : "\"spark.sql.ansi.enabled\""
-  },
-  "queryContext" : [ {
-    "objectType" : "",
-    "objectName" : "",
-    "startIndex" : 24,
-    "stopIndex" : 32,
-    "fragment" : "1.0 / 0.0"
-  } ]
-}
-
-
--- !query
-SELECT try_multiply(interval 2 year, 2)
--- !query schema
-struct<try_multiply(INTERVAL '2' YEAR, 2):interval year to month>
--- !query output
-4-0
-
-
--- !query
-SELECT try_multiply(interval 2 second, 2)
--- !query schema
-struct<try_multiply(INTERVAL '02' SECOND, 2):interval day to second>
--- !query output
-0 00:00:04.000000000
-
-
--- !query
-SELECT try_multiply(interval 2 year, 0)
--- !query schema
-struct<try_multiply(INTERVAL '2' YEAR, 0):interval year to month>
--- !query output
-0-0
-
-
--- !query
-SELECT try_multiply(interval 2 second, 0)
--- !query schema
-struct<try_multiply(INTERVAL '02' SECOND, 0):interval day to second>
--- !query output
-0 00:00:00.000000000
-
-
--- !query
-SELECT try_multiply(interval 2147483647 month, 2)
--- !query schema
-struct<try_multiply(INTERVAL '2147483647' MONTH, 2):interval year to month>
--- !query output
-NULL
-
-
--- !query
-SELECT try_multiply(interval 106751991 day, 2)
--- !query schema
-struct<try_multiply(INTERVAL '106751991' DAY, 2):interval day to second>
--- !query output
-NULL


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to