This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 4321ade7ee02 [SPARK-52715][SQL] Pretty SQL string of TIME - DAY-TIME
INTERVAL
4321ade7ee02 is described below
commit 4321ade7ee029ec4dcc8c1bef05a96f4cefac9b3
Author: Max Gekk <[email protected]>
AuthorDate: Wed Jul 9 07:38:13 2025 +0200
[SPARK-52715][SQL] Pretty SQL string of TIME - DAY-TIME INTERVAL
### What changes were proposed in this pull request?
In the PR, I propose to changes SQL representation of TIME and DAY-TIME
INTERVAL subtraction via wrapping it by `DatetimeSub`.
### Why are the changes needed?
To improve user experience with Spark SQL. Before the changes, subtraction
looks like:
```sql
TIME '12:30:00' + (- INTERVAL '12:29:59.000001' HOUR TO SECOND
```
, and after the changes `+-` is replaced by just `-`:
```sql
TIME '12:30:00' - INTERVAL '12:29:59.000001' HOUR TO SECOND
```
### Does this PR introduce _any_ user-facing change?
No. The TIME data type hasn't been released yet.
### How was this patch tested?
By running the affected tests:
```
$ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z
time.sql"
```
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #51403 from MaxGekk/time-nice-subtract.
Authored-by: Max Gekk <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala | 2 +-
.../src/test/resources/sql-tests/analyzer-results/time.sql.out | 8 ++++----
sql/core/src/test/resources/sql-tests/results/time.sql.out | 8 ++++----
3 files changed, 9 insertions(+), 9 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
index ab6e4ea2b8b4..5c5cf8e303cd 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
@@ -123,7 +123,7 @@ object BinaryArithmeticWithDatetimeResolver {
)
)
case (_: TimeType, _: DayTimeIntervalType) =>
- TimeAddInterval(l, UnaryMinus(r, mode == EvalMode.ANSI))
+ DatetimeSub(l, r, TimeAddInterval(l, UnaryMinus(r, mode ==
EvalMode.ANSI)))
case (_, CalendarIntervalType | _: DayTimeIntervalType) =>
Cast(DatetimeSub(l, r,
TimestampAddInterval(l, UnaryMinus(r, mode == EvalMode.ANSI))),
l.dataType)
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
index 68c78e617518..f35f342e22be 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
@@ -476,28 +476,28 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
-- !query
SELECT TIME'12:30' - INTERVAL '12:29:59.000001' HOUR TO SECOND
-- !query analysis
-Project [12:30:00 + -INTERVAL '12:29:59.000001' HOUR TO SECOND AS TIME
'12:30:00' + (- INTERVAL '12:29:59.000001' HOUR TO SECOND)#x]
+Project [12:30:00 - INTERVAL '12:29:59.000001' HOUR TO SECOND AS TIME
'12:30:00' - INTERVAL '12:29:59.000001' HOUR TO SECOND#x]
+- OneRowRelation
-- !query
SELECT '23:59:59.999999' :: TIME - INTERVAL '23:59:59.999999' HOUR TO SECOND
-- !query analysis
-Project [cast(23:59:59.999999 as time(6)) + -INTERVAL '23:59:59.999999' HOUR
TO SECOND AS CAST(23:59:59.999999 AS TIME(6)) + (- INTERVAL '23:59:59.999999'
HOUR TO SECOND)#x]
+Project [cast(23:59:59.999999 as time(6)) - INTERVAL '23:59:59.999999' HOUR TO
SECOND AS CAST(23:59:59.999999 AS TIME(6)) - INTERVAL '23:59:59.999999' HOUR TO
SECOND#x]
+- OneRowRelation
-- !query
SELECT '00:00:00.0001' :: TIME(4) - INTERVAL '0 00:00:00.0001' DAY TO SECOND
-- !query analysis
-Project [cast(00:00:00.0001 as time(4)) + -INTERVAL '0 00:00:00.0001' DAY TO
SECOND AS CAST(00:00:00.0001 AS TIME(4)) + (- INTERVAL '0 00:00:00.0001' DAY TO
SECOND)#x]
+Project [cast(00:00:00.0001 as time(4)) - INTERVAL '0 00:00:00.0001' DAY TO
SECOND AS CAST(00:00:00.0001 AS TIME(4)) - INTERVAL '0 00:00:00.0001' DAY TO
SECOND#x]
+- OneRowRelation
-- !query
SELECT '08:30' :: TIME(0) - INTERVAL '6' HOUR
-- !query analysis
-Project [cast(08:30 as time(0)) + -INTERVAL '06' HOUR AS CAST(08:30 AS
TIME(0)) + (- INTERVAL '06' HOUR)#x]
+Project [cast(08:30 as time(0)) - INTERVAL '06' HOUR AS CAST(08:30 AS TIME(0))
- INTERVAL '06' HOUR#x]
+- OneRowRelation
diff --git a/sql/core/src/test/resources/sql-tests/results/time.sql.out
b/sql/core/src/test/resources/sql-tests/results/time.sql.out
index 34abd9799729..cc532033eba8 100644
--- a/sql/core/src/test/resources/sql-tests/results/time.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/time.sql.out
@@ -583,7 +583,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
-- !query
SELECT TIME'12:30' - INTERVAL '12:29:59.000001' HOUR TO SECOND
-- !query schema
-struct<TIME '12:30:00' + (- INTERVAL '12:29:59.000001' HOUR TO SECOND):time(6)>
+struct<TIME '12:30:00' - INTERVAL '12:29:59.000001' HOUR TO SECOND:time(6)>
-- !query output
00:00:00.999999
@@ -591,7 +591,7 @@ struct<TIME '12:30:00' + (- INTERVAL '12:29:59.000001' HOUR
TO SECOND):time(6)>
-- !query
SELECT '23:59:59.999999' :: TIME - INTERVAL '23:59:59.999999' HOUR TO SECOND
-- !query schema
-struct<CAST(23:59:59.999999 AS TIME(6)) + (- INTERVAL '23:59:59.999999' HOUR
TO SECOND):time(6)>
+struct<CAST(23:59:59.999999 AS TIME(6)) - INTERVAL '23:59:59.999999' HOUR TO
SECOND:time(6)>
-- !query output
00:00:00
@@ -599,7 +599,7 @@ struct<CAST(23:59:59.999999 AS TIME(6)) + (- INTERVAL
'23:59:59.999999' HOUR TO
-- !query
SELECT '00:00:00.0001' :: TIME(4) - INTERVAL '0 00:00:00.0001' DAY TO SECOND
-- !query schema
-struct<CAST(00:00:00.0001 AS TIME(4)) + (- INTERVAL '0 00:00:00.0001' DAY TO
SECOND):time(6)>
+struct<CAST(00:00:00.0001 AS TIME(4)) - INTERVAL '0 00:00:00.0001' DAY TO
SECOND:time(6)>
-- !query output
00:00:00
@@ -607,7 +607,7 @@ struct<CAST(00:00:00.0001 AS TIME(4)) + (- INTERVAL '0
00:00:00.0001' DAY TO SEC
-- !query
SELECT '08:30' :: TIME(0) - INTERVAL '6' HOUR
-- !query schema
-struct<CAST(08:30 AS TIME(0)) + (- INTERVAL '06' HOUR):time(0)>
+struct<CAST(08:30 AS TIME(0)) - INTERVAL '06' HOUR:time(0)>
-- !query output
02:30:00
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]