This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 12da3c5aa648 [SPARK-52596][SQL] Try make TIMESTAMP_NTZ from DATE and
TIME
12da3c5aa648 is described below
commit 12da3c5aa64802a2a39c846629ea49aa3465f746
Author: Max Gekk <[email protected]>
AuthorDate: Mon Jun 30 07:22:12 2025 -0700
[SPARK-52596][SQL] Try make TIMESTAMP_NTZ from DATE and TIME
### What changes were proposed in this pull request?
In the PR, I propose to extend the `try_make_timestamp_ntz` function, and
accept a date and time fields.
#### Syntax
```sql
try_make_timestamp_ntz(date[, time])
```
#### Arguments
- date: A date expression
- time: A time expression
#### Returns
A TIMESTAMP_NTZ.
#### Examples
```sql
> SELECT try_make_timestamp_ntz(DATE'2014-12-28', TIME'6:30:45.887');
2014-12-28 06:30:45.887
```
### Why are the changes needed?
Users will be able to create a timestamp without time zone by combining a
time and a date.
### Does this PR introduce _any_ user-facing change?
No, it just extends the existing API.
### How was this patch tested?
By running the affected test suites:
```
$ build/sbt "test:testOnly *ExpressionInfoSuite"
$ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z
timestamp-ntz.sql"
```
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #51303 from MaxGekk/try_make_timestamp_ntz.
Authored-by: Max Gekk <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../catalyst/expressions/datetimeExpressions.scala | 14 +++-
.../analyzer-results/timestamp-ntz.sql.out | 67 +++++++++++++++++++
.../resources/sql-tests/inputs/timestamp-ntz.sql | 6 ++
.../sql-tests/results/timestamp-ntz.sql.out | 76 ++++++++++++++++++++++
4 files changed, 161 insertions(+), 2 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 048019877565..2f937f5fbf46 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -2637,7 +2637,11 @@ object MakeTimestampNTZExpressionBuilder extends
ExpressionBuilder {
// scalastyle:off line.size.limit
@ExpressionDescription(
- usage = "_FUNC_(year, month, day, hour, min, sec) - Try to create local
date-time from year, month, day, hour, min, sec fields. The function returns
NULL on invalid inputs.",
+ usage = """
+ _FUNC_(year, month, day, hour, min, sec) - Try to create local date-time
from year, month, day, hour, min, sec fields. The function returns NULL on
invalid inputs.
+
+ _FUNC_(date, time) - Create a local date-time from date and time fields.
+ """,
arguments = """
Arguments:
* year - the year to represent, from 1 to 9999
@@ -2648,6 +2652,8 @@ object MakeTimestampNTZExpressionBuilder extends
ExpressionBuilder {
* sec - the second-of-minute and its micro-fraction to represent, from
0 to 60. If the sec argument equals to 60, the seconds field is
set
to 0 and 1 minute is added to the final timestamp.
+ * date - a date to represent, from 0001-01-01 to 9999-12-31
+ * time - a local time to represent, from 00:00:00 to 23:59:59.999999
""",
examples = """
Examples:
@@ -2659,6 +2665,8 @@ object MakeTimestampNTZExpressionBuilder extends
ExpressionBuilder {
NULL
> SELECT _FUNC_(2024, 13, 22, 15, 30, 0);
NULL
+ > SELECT _FUNC_(DATE'2014-12-28', TIME'6:30:45.887');
+ 2014-12-28 06:30:45.887
""",
group = "datetime_funcs",
since = "4.0.0")
@@ -2666,7 +2674,9 @@ object MakeTimestampNTZExpressionBuilder extends
ExpressionBuilder {
object TryMakeTimestampNTZExpressionBuilder extends ExpressionBuilder {
override def build(funcName: String, expressions: Seq[Expression]):
Expression = {
val numArgs = expressions.length
- if (numArgs == 6) {
+ if (numArgs == 2) {
+ TryEval(MakeTimestampNTZ(expressions(0), expressions(1)))
+ } else if (numArgs == 6) {
MakeTimestamp(
expressions(0),
expressions(1),
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out
index 7fded2f15628..877a7439ade3 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp-ntz.sql.out
@@ -131,6 +131,73 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
}
+-- !query
+SELECT try_make_timestamp_ntz(make_date(2025, 6, 27), make_time(16, 08,
45.123456))
+-- !query analysis
+Project [try_make_timestamp_ntz(make_timestamp_ntz(make_date(2025, 6, 27,
true), make_time(16, 8, cast(45.123456 as decimal(16,6))))) AS
try_make_timestamp_ntz(make_timestamp_ntz(make_date(2025, 6, 27), make_time(16,
8, 45.123456)))#x]
++- OneRowRelation
+
+
+-- !query
+SELECT try_make_timestamp_ntz(NULL, TIME'23:59:59.00001')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+SELECT try_make_timestamp_ntz(DATE'0001-01-01', NULL)
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+SELECT try_make_timestamp_ntz('2018-11-17 13:33:33', TIME'0:0:0')
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
+ "sqlState" : "42K09",
+ "messageParameters" : {
+ "inputSql" : "\"2018-11-17 13:33:33\"",
+ "inputType" : "\"STRING\"",
+ "paramIndex" : "first",
+ "requiredType" : "\"DATE\"",
+ "sqlExpr" : "\"make_timestamp_ntz(2018-11-17 13:33:33, TIME '00:00:00')\""
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 65,
+ "fragment" : "try_make_timestamp_ntz('2018-11-17 13:33:33', TIME'0:0:0')"
+ } ]
+}
+
+
+-- !query
+SELECT try_make_timestamp_ntz(DATE'2025-06-20', 10D)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
+ "sqlState" : "42K09",
+ "messageParameters" : {
+ "inputSql" : "\"10.0\"",
+ "inputType" : "\"DOUBLE\"",
+ "paramIndex" : "second",
+ "requiredType" : "\"TIME\"",
+ "sqlExpr" : "\"make_timestamp_ntz(DATE '2025-06-20', 10.0)\""
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 52,
+ "fragment" : "try_make_timestamp_ntz(DATE'2025-06-20', 10D)"
+ } ]
+}
+
+
-- !query
SELECT convert_timezone('Europe/Moscow', 'America/Los_Angeles',
timestamp_ntz'2022-01-01 00:00:00')
-- !query analysis
diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestamp-ntz.sql
b/sql/core/src/test/resources/sql-tests/inputs/timestamp-ntz.sql
index bfbe551747a6..0f9902cc4fee 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/timestamp-ntz.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/timestamp-ntz.sql
@@ -21,6 +21,12 @@ SELECT make_timestamp_ntz(DATE'1970-01-01', NULL);
SELECT make_timestamp_ntz(timestamp_ntz'2018-11-17 13:33:33', TIME'0:0:0');
SELECT make_timestamp_ntz(DATE'2025-06-20', '0:0:0');
+SELECT try_make_timestamp_ntz(make_date(2025, 6, 27), make_time(16, 08,
45.123456));
+SELECT try_make_timestamp_ntz(NULL, TIME'23:59:59.00001');
+SELECT try_make_timestamp_ntz(DATE'0001-01-01', NULL);
+SELECT try_make_timestamp_ntz('2018-11-17 13:33:33', TIME'0:0:0');
+SELECT try_make_timestamp_ntz(DATE'2025-06-20', 10D);
+
SELECT convert_timezone('Europe/Moscow', 'America/Los_Angeles',
timestamp_ntz'2022-01-01 00:00:00');
SELECT convert_timezone('Europe/Brussels', timestamp_ntz'2022-03-23 00:00:00');
diff --git
a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
index ad829bfe1f5d..aff9b5abffd3 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
@@ -155,6 +155,82 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
}
+-- !query
+SELECT try_make_timestamp_ntz(make_date(2025, 6, 27), make_time(16, 08,
45.123456))
+-- !query schema
+struct<try_make_timestamp_ntz(make_timestamp_ntz(make_date(2025, 6, 27),
make_time(16, 8, 45.123456))):timestamp_ntz>
+-- !query output
+2025-06-27 16:08:45.123456
+
+
+-- !query
+SELECT try_make_timestamp_ntz(NULL, TIME'23:59:59.00001')
+-- !query schema
+struct<try_make_timestamp_ntz(make_timestamp_ntz(NULL, TIME
'23:59:59.00001')):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+SELECT try_make_timestamp_ntz(DATE'0001-01-01', NULL)
+-- !query schema
+struct<try_make_timestamp_ntz(make_timestamp_ntz(DATE '0001-01-01',
NULL)):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+SELECT try_make_timestamp_ntz('2018-11-17 13:33:33', TIME'0:0:0')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
+ "sqlState" : "42K09",
+ "messageParameters" : {
+ "inputSql" : "\"2018-11-17 13:33:33\"",
+ "inputType" : "\"STRING\"",
+ "paramIndex" : "first",
+ "requiredType" : "\"DATE\"",
+ "sqlExpr" : "\"make_timestamp_ntz(2018-11-17 13:33:33, TIME '00:00:00')\""
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 65,
+ "fragment" : "try_make_timestamp_ntz('2018-11-17 13:33:33', TIME'0:0:0')"
+ } ]
+}
+
+
+-- !query
+SELECT try_make_timestamp_ntz(DATE'2025-06-20', 10D)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
+ "sqlState" : "42K09",
+ "messageParameters" : {
+ "inputSql" : "\"10.0\"",
+ "inputType" : "\"DOUBLE\"",
+ "paramIndex" : "second",
+ "requiredType" : "\"TIME\"",
+ "sqlExpr" : "\"make_timestamp_ntz(DATE '2025-06-20', 10.0)\""
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 52,
+ "fragment" : "try_make_timestamp_ntz(DATE'2025-06-20', 10D)"
+ } ]
+}
+
+
-- !query
SELECT convert_timezone('Europe/Moscow', 'America/Los_Angeles',
timestamp_ntz'2022-01-01 00:00:00')
-- !query schema
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]