This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 62becf4750e0 [SPARK-52693][SQL] Support +/- ANSI day-time intervals to/from TIME 62becf4750e0 is described below commit 62becf4750e0b517d9beae68577d21cf5dd7b2b2 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Tue Jul 8 08:30:31 2025 +0200 [SPARK-52693][SQL] Support +/- ANSI day-time intervals to/from TIME ### What changes were proposed in this pull request? In the PR, I propose to support the `+` and `+` operators over TIME and DAY-TIME INTERVAL. #### Syntax ``` exprA + exprB, exprB + exprA exprA - exprB ``` where - **exprA** - an expression of the TIME data type of any valid precision [0, 6]. - **exprB** - and expression of the DAY-TIME INTERVAL with any start and end fields `SECOND`, `MINUTE`, `HOUR`, `DAY`. #### Returns The result of the TIME(n) data type or raises the error `DATETIME_OVERFLOW` if the result is out of the valid range `[00:00, 24:00)`. If the result is valid, its precision `n` is the maximum precision of the input time `m` and the day-time interval `i`: `n = max(m, i)` where `i` = 6 for the end interval field `SECOND` and `0` for other fields `MINUTE`, `HOUR`, `DAY`. ### Why are the changes needed? To conform the ANSI SQL standard: <img width="867" alt="Screenshot 2025-07-07 at 09 41 49" src="https://github.com/user-attachments/assets/808a3bad-70a6-4c28-b23d-83e8399bd0e9" /> ### Does this PR introduce _any_ user-facing change? No. The TIME data type hasn't been released yet. ### How was this patch tested? By running new tests and affected test suites: ``` $ build/sbt "test:testOnly *DateTimeUtilsSuite" $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z time.sql" ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51383 from MaxGekk/time-add-interval. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../spark/sql/catalyst/util/DateTimeConstants.java | 1 + .../apache/spark/sql/types/AbstractDataType.scala | 2 +- .../BinaryArithmeticWithDatetimeResolver.scala | 6 ++ .../sql/catalyst/expressions/timeExpressions.scala | 45 ++++++++- .../spark/sql/catalyst/util/DateTimeUtils.scala | 27 +++++ .../spark/sql/errors/QueryExecutionErrors.scala | 17 +++- .../expressions/TimeExpressionsSuite.scala | 33 +++++- .../sql/catalyst/util/DateTimeUtilsSuite.scala | 46 ++++++++- .../sql-tests/analyzer-results/time.sql.out | 101 +++++++++++++++++++ .../src/test/resources/sql-tests/inputs/time.sql | 10 ++ .../test/resources/sql-tests/results/time.sql.out | 112 +++++++++++++++++++++ 11 files changed, 394 insertions(+), 6 deletions(-) diff --git a/common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/DateTimeConstants.java b/common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/DateTimeConstants.java index 0ae238564d59..d52207ad860c 100644 --- a/common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/DateTimeConstants.java +++ b/common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/DateTimeConstants.java @@ -45,4 +45,5 @@ public class DateTimeConstants { public static final long NANOS_PER_MICROS = 1000L; public static final long NANOS_PER_MILLIS = MICROS_PER_MILLIS * NANOS_PER_MICROS; public static final long NANOS_PER_SECOND = MILLIS_PER_SECOND * NANOS_PER_MILLIS; + public static final long NANOS_PER_DAY = MICROS_PER_DAY * NANOS_PER_MICROS; } diff --git a/sql/api/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/api/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala index e2f29d6cdc36..a87482e88139 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala @@ -181,7 +181,7 @@ private[spark] object AnsiIntervalType extends AbstractDataType { */ private[sql] abstract class AnyTimeType extends DatetimeType -private[spark] object AnyTimeType extends AbstractDataType { +private[spark] object AnyTimeType extends AbstractDataType with Serializable { override private[sql] def simpleString: String = "time" override private[sql] def acceptsType(other: DataType): Boolean = diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala index 7302cfcf969b..ab6e4ea2b8b4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala @@ -39,6 +39,7 @@ import org.apache.spark.sql.catalyst.expressions.{ Subtract, SubtractDates, SubtractTimestamps, + TimeAddInterval, TimestampAddInterval, TimestampAddYMInterval, UnaryMinus @@ -53,6 +54,7 @@ import org.apache.spark.sql.types.{ StringType, TimestampNTZType, TimestampType, + TimeType, YearMonthIntervalType } import org.apache.spark.sql.types.DayTimeIntervalType.DAY @@ -80,6 +82,8 @@ object BinaryArithmeticWithDatetimeResolver { a.copy(right = Cast(a.right, a.left.dataType)) case (DateType, CalendarIntervalType) => DateAddInterval(l, r, ansiEnabled = mode == EvalMode.ANSI) + case (_: TimeType, _: DayTimeIntervalType) => TimeAddInterval(l, r) + case (_: DayTimeIntervalType, _: TimeType) => TimeAddInterval(r, l) case (_, CalendarIntervalType | _: DayTimeIntervalType) => Cast(TimestampAddInterval(l, r), l.dataType) case (CalendarIntervalType, DateType) => @@ -118,6 +122,8 @@ object BinaryArithmeticWithDatetimeResolver { ansiEnabled = mode == EvalMode.ANSI ) ) + case (_: TimeType, _: DayTimeIntervalType) => + TimeAddInterval(l, UnaryMinus(r, mode == EvalMode.ANSI)) case (_, CalendarIntervalType | _: DayTimeIntervalType) => Cast(DatetimeSub(l, r, TimestampAddInterval(l, UnaryMinus(r, mode == EvalMode.ANSI))), l.dataType) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala index 3a1cce518b23..8e0e53dff2a2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.expressions import java.time.DateTimeException import java.util.Locale +import org.apache.spark.SparkException import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, TypeCheckSuccess} @@ -32,7 +33,8 @@ import org.apache.spark.sql.catalyst.util.TimeFormatter import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} import org.apache.spark.sql.internal.types.StringTypeWithCollation -import org.apache.spark.sql.types.{AbstractDataType, AnyTimeType, DataType, DecimalType, IntegerType, ObjectType, TimeType} +import org.apache.spark.sql.types.{AbstractDataType, AnyTimeType, ByteType, DataType, DayTimeIntervalType, DecimalType, IntegerType, ObjectType, TimeType} +import org.apache.spark.sql.types.DayTimeIntervalType.SECOND import org.apache.spark.unsafe.types.UTF8String /** @@ -563,3 +565,44 @@ case class MakeTime( override protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]): MakeTime = copy(hours = newChildren(0), minutes = newChildren(1), secsAndMicros = newChildren(2)) } + +/** + * Adds day-time interval to time. + */ +case class TimeAddInterval(time: Expression, interval: Expression) + extends BinaryExpression with RuntimeReplaceable with ExpectsInputTypes { + override def nullIntolerant: Boolean = true + + override def left: Expression = time + override def right: Expression = interval + + override def toString: String = s"$left + $right" + override def sql: String = s"${left.sql} + ${right.sql}" + override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType, DayTimeIntervalType) + + override def replacement: Expression = { + val (timePrecision, intervalEndField) = (time.dataType, interval.dataType) match { + case (TimeType(p), DayTimeIntervalType(_, endField)) => (p, endField) + case _ => throw SparkException.internalError("Unexpected input types: " + + s"time type ${time.dataType.sql}, interval type ${interval.dataType.sql}.") + } + val intervalPrecision = if (intervalEndField < SECOND) { + TimeType.MIN_PRECISION + } else { + TimeType.MICROS_PRECISION + } + val targetPrecision = Math.max(timePrecision, intervalPrecision) + StaticInvoke( + classOf[DateTimeUtils.type], + TimeType(targetPrecision), + "timeAddInterval", + Seq(time, Literal(timePrecision), interval, Literal(intervalEndField), + Literal(targetPrecision)), + Seq(AnyTimeType, IntegerType, DayTimeIntervalType, ByteType, IntegerType), + propagateNull = nullIntolerant) + } + + override protected def withNewChildrenInternal( + newTime: Expression, newInterval: Expression): TimeAddInterval = + copy(time = newTime, interval = newInterval) +} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index 503a7771c587..79f7d37738d2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -834,4 +834,31 @@ object DateTimeUtils extends SparkDateTimeUtils { def makeTimestampNTZ(days: Int, nanos: Long): Long = { localDateTimeToMicros(LocalDateTime.of(daysToLocalDate(days), nanosToLocalTime(nanos))) } + + /** + * Adds a day-time interval to a time. + * + * @param time A time in nanoseconds. + * @param timePrecision The number of digits of the fraction part of time. + * @param interval A day-time interval in microseconds. + * @param intervalEndField The rightmost field which the interval comprises of. + * Valid values: 0 (DAY), 1 (HOUR), 2 (MINUTE), 3 (SECOND). + * @param targetPrecision The number of digits of the fraction part of the resulting time. + * @return A time value in nanoseconds or throw an arithmetic overflow + * if the result out of valid time range [00:00, 24:00). + */ + def timeAddInterval( + time: Long, + timePrecision: Int, + interval: Long, + intervalEndField: Byte, + targetPrecision: Int): Long = { + val result = MathUtils.addExact(time, MathUtils.multiplyExact(interval, NANOS_PER_MICROS)) + if (0 <= result && result < NANOS_PER_DAY) { + truncateTimeToPrecision(result, targetPrecision) + } else { + throw QueryExecutionErrors.timeAddIntervalOverflowError( + time, timePrecision, interval, intervalEndField) + } + } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 7f623039778a..f1f7e9681f6c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -44,7 +44,7 @@ import org.apache.spark.sql.catalyst.plans.JoinType import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.ValueInterval import org.apache.spark.sql.catalyst.trees.{Origin, TreeNode} -import org.apache.spark.sql.catalyst.util.{sideBySide, CharsetProvider, DateTimeUtils, FailFastMode, MapData} +import org.apache.spark.sql.catalyst.util.{sideBySide, CharsetProvider, DateTimeUtils, FailFastMode, IntervalUtils, MapData} import org.apache.spark.sql.connector.catalog.{CatalogNotFoundException, Table, TableProvider} import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ import org.apache.spark.sql.connector.expressions.Transform @@ -2534,6 +2534,21 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } + def timeAddIntervalOverflowError( + time: Long, + timePrecision: Int, + interval: Long, + intervalEndField: Byte): ArithmeticException = { + val i = toSQLValue(IntervalUtils.microsToDuration(interval), + DayTimeIntervalType(intervalEndField)) + val t = toSQLValue(DateTimeUtils.nanosToLocalTime(time), TimeType(timePrecision)) + new SparkArithmeticException( + errorClass = "DATETIME_OVERFLOW", + messageParameters = Map("operation" -> s"add $i to the time value $t"), + context = Array.empty, + summary = "") + } + def invalidBucketFile(path: String): Throwable = { new SparkException( errorClass = "INVALID_BUCKET_FILE", diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeExpressionsSuite.scala index 4220a7008404..36e0dac677bb 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeExpressionsSuite.scala @@ -17,14 +17,15 @@ package org.apache.spark.sql.catalyst.expressions -import java.time.LocalTime +import java.time.{Duration, LocalTime} import org.apache.spark.{SPARK_DOC_ROOT, SparkDateTimeException, SparkFunSuite} import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.Cast.{toSQLId, toSQLValue} import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ -import org.apache.spark.sql.types.{Decimal, DecimalType, IntegerType, StringType, TimeType} +import org.apache.spark.sql.types.{DayTimeIntervalType, Decimal, DecimalType, IntegerType, StringType, TimeType} +import org.apache.spark.sql.types.DayTimeIntervalType.{DAY, HOUR, SECOND} class TimeExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { test("ParseToTime") { @@ -364,4 +365,32 @@ class TimeExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { (child: Expression) => SecondsOfTimeWithFraction(child).replacement, TimeType()) } + + test("Add ANSI day-time intervals to TIME") { + checkEvaluation( + TimeAddInterval(Literal.create(null, TimeType()), Literal(Duration.ofHours(1))), + null) + checkEvaluation( + TimeAddInterval(Literal(LocalTime.of(12, 30)), Literal(null, DayTimeIntervalType(SECOND))), + null) + checkEvaluation( + TimeAddInterval(Literal(LocalTime.of(8, 31)), Literal(Duration.ofMinutes(30))), + LocalTime.of(8, 31).plusMinutes(30)) + // Maximum precision of TIME and DAY-TIME INTERVAL + assert(TimeAddInterval( + Literal(0L, TimeType(0)), + Literal(0L, DayTimeIntervalType(DAY))).dataType == TimeType(0)) + assert(TimeAddInterval( + Literal(1L, TimeType(TimeType.MAX_PRECISION)), + Literal(1L, DayTimeIntervalType(HOUR))).dataType == TimeType(TimeType.MAX_PRECISION)) + assert(TimeAddInterval( + Literal(2L, TimeType(TimeType.MIN_PRECISION)), + Literal(2L, DayTimeIntervalType(SECOND))).dataType == TimeType(TimeType.MICROS_PRECISION)) + assert(TimeAddInterval( + Literal(3L, TimeType(TimeType.MAX_PRECISION)), + Literal(3L, DayTimeIntervalType(SECOND))).dataType == TimeType(TimeType.MAX_PRECISION)) + checkConsistencyBetweenInterpretedAndCodegenAllowingException( + (time: Expression, interval: Expression) => TimeAddInterval(time, interval).replacement, + TimeType(), DayTimeIntervalType()) + } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index 293d740ed1a1..789fd22a67e0 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -26,12 +26,15 @@ import java.util.concurrent.TimeUnit import org.scalatest.matchers.must.Matchers import org.scalatest.matchers.should.Matchers._ -import org.apache.spark.{SparkDateTimeException, SparkFunSuite, SparkIllegalArgumentException} +import org.apache.spark.{SparkArithmeticException, SparkDateTimeException, SparkFunSuite, SparkIllegalArgumentException} import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ import org.apache.spark.sql.catalyst.util.RebaseDateTime.rebaseJulianToGregorianMicros +import org.apache.spark.sql.errors.DataTypeErrors.toSQLConf +import org.apache.spark.sql.internal.SqlApiConf +import org.apache.spark.sql.types.DayTimeIntervalType.{HOUR, SECOND} import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} @@ -1250,4 +1253,45 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { assert(truncateTimeToPrecision(localTime(23, 59, 59, 987654), 1) == localTime(23, 59, 59, 900000)) } + + test("add day-time interval to time") { + assert(timeAddInterval(0, 0, 0, SECOND, 6) == localTime()) + assert(timeAddInterval(0, 6, MICROS_PER_DAY - 1, SECOND, 6) == + localTime(23, 59, 59, 999999)) + assert(timeAddInterval(localTime(23, 59, 59, 999999), 0, -MICROS_PER_DAY + 1, SECOND, 6) == + localTime(0, 0)) + assert(timeAddInterval(localTime(12, 30, 43, 123400), 4, 10 * MICROS_PER_MINUTE, SECOND, 6) == + localTime(12, 40, 43, 123400)) + assert(timeAddInterval(localTime(19, 31, 45, 123450), 5, 6, SECOND, 6) == + localTime(19, 31, 45, 123456)) + assert(timeAddInterval(localTime(1, 2, 3, 1), 6, MICROS_PER_HOUR, HOUR, 6) == + localTime(2, 2, 3, 1)) + + checkError( + exception = intercept[SparkArithmeticException] { + timeAddInterval(1, 6, MICROS_PER_DAY, SECOND, 6) + }, + condition = "DATETIME_OVERFLOW", + parameters = Map("operation" -> + "add INTERVAL '86400' SECOND to the time value TIME '00:00:00.000000001'") + ) + checkError( + exception = intercept[SparkArithmeticException] { + timeAddInterval(0, 0, -1, SECOND, 6) + }, + condition = "DATETIME_OVERFLOW", + parameters = Map("operation" -> + "add INTERVAL '-00.000001' SECOND to the time value TIME '00:00:00'") + ) + checkError( + exception = intercept[SparkArithmeticException] { + timeAddInterval(0, 0, Long.MaxValue, SECOND, 6) + }, + condition = "ARITHMETIC_OVERFLOW", + parameters = Map( + "message" -> "long overflow", + "alternative" -> "", + "config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)) + ) + } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out index 459f7b091541..68c78e617518 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out @@ -420,3 +420,104 @@ SELECT cast(time'11:59:59.999999' as time without time zone) -- !query analysis Project [cast(11:59:59.999999 as time(6)) AS CAST(TIME '11:59:59.999999' AS TIME(6))#x] +- OneRowRelation + + +-- !query +SELECT '12:43:33.1234' :: TIME(4) + INTERVAL '01:04:05.56' HOUR TO SECOND +-- !query analysis +Project [cast(12:43:33.1234 as time(4)) + INTERVAL '01:04:05.56' HOUR TO SECOND AS CAST(12:43:33.1234 AS TIME(4)) + INTERVAL '01:04:05.56' HOUR TO SECOND#x] ++- OneRowRelation + + +-- !query +SELECT TIME'08:30' + NULL +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TIME(6)\"", + "right" : "\"VOID\"", + "sqlExpr" : "\"(TIME '08:30:00' + NULL)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "TIME'08:30' + NULL" + } ] +} + + +-- !query +SELECT TIME'00:00:00.0101' + 1 +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TIME(6)\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(TIME '00:00:00.0101' + 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "TIME'00:00:00.0101' + 1" + } ] +} + + +-- !query +SELECT TIME'12:30' - INTERVAL '12:29:59.000001' HOUR TO SECOND +-- !query analysis +Project [12:30:00 + -INTERVAL '12:29:59.000001' HOUR TO SECOND AS TIME '12:30:00' + (- INTERVAL '12:29:59.000001' HOUR TO SECOND)#x] ++- OneRowRelation + + +-- !query +SELECT '23:59:59.999999' :: TIME - INTERVAL '23:59:59.999999' HOUR TO SECOND +-- !query analysis +Project [cast(23:59:59.999999 as time(6)) + -INTERVAL '23:59:59.999999' HOUR TO SECOND AS CAST(23:59:59.999999 AS TIME(6)) + (- INTERVAL '23:59:59.999999' HOUR TO SECOND)#x] ++- OneRowRelation + + +-- !query +SELECT '00:00:00.0001' :: TIME(4) - INTERVAL '0 00:00:00.0001' DAY TO SECOND +-- !query analysis +Project [cast(00:00:00.0001 as time(4)) + -INTERVAL '0 00:00:00.0001' DAY TO SECOND AS CAST(00:00:00.0001 AS TIME(4)) + (- INTERVAL '0 00:00:00.0001' DAY TO SECOND)#x] ++- OneRowRelation + + +-- !query +SELECT '08:30' :: TIME(0) - INTERVAL '6' HOUR +-- !query analysis +Project [cast(08:30 as time(0)) + -INTERVAL '06' HOUR AS CAST(08:30 AS TIME(0)) + (- INTERVAL '06' HOUR)#x] ++- OneRowRelation + + +-- !query +SELECT '10:00:01' :: TIME(1) - INTERVAL '1' MONTH +-- !query analysis +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TIME(1)\"", + "right" : "\"INTERVAL MONTH\"", + "sqlExpr" : "\"(CAST(10:00:01 AS TIME(1)) - INTERVAL '1' MONTH)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "'10:00:01' :: TIME(1) - INTERVAL '1' MONTH" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/inputs/time.sql b/sql/core/src/test/resources/sql-tests/inputs/time.sql index fdb6317ec837..c24f4e616661 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/time.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/time.sql @@ -72,3 +72,13 @@ select extract(SECOND FROM cast('09:08:01.987654' as time(6))); SELECT cast(cast('12:00' as time(0)) as time(2)); SELECT cast(('23:59:59.001001' :: time(6)) as time(4)); SELECT cast(time'11:59:59.999999' as time without time zone); + +-- +/- ANSI day-time intervals +SELECT '12:43:33.1234' :: TIME(4) + INTERVAL '01:04:05.56' HOUR TO SECOND; +SELECT TIME'08:30' + NULL; +SELECT TIME'00:00:00.0101' + 1; +SELECT TIME'12:30' - INTERVAL '12:29:59.000001' HOUR TO SECOND; +SELECT '23:59:59.999999' :: TIME - INTERVAL '23:59:59.999999' HOUR TO SECOND; +SELECT '00:00:00.0001' :: TIME(4) - INTERVAL '0 00:00:00.0001' DAY TO SECOND; +SELECT '08:30' :: TIME(0) - INTERVAL '6' HOUR; +SELECT '10:00:01' :: TIME(1) - INTERVAL '1' MONTH; diff --git a/sql/core/src/test/resources/sql-tests/results/time.sql.out b/sql/core/src/test/resources/sql-tests/results/time.sql.out index 66db4a74a820..34abd9799729 100644 --- a/sql/core/src/test/resources/sql-tests/results/time.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/time.sql.out @@ -522,3 +522,115 @@ SELECT cast(time'11:59:59.999999' as time without time zone) struct<CAST(TIME '11:59:59.999999' AS TIME(6)):time(6)> -- !query output 11:59:59.999999 + + +-- !query +SELECT '12:43:33.1234' :: TIME(4) + INTERVAL '01:04:05.56' HOUR TO SECOND +-- !query schema +struct<CAST(12:43:33.1234 AS TIME(4)) + INTERVAL '01:04:05.56' HOUR TO SECOND:time(6)> +-- !query output +13:47:38.6834 + + +-- !query +SELECT TIME'08:30' + NULL +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TIME(6)\"", + "right" : "\"VOID\"", + "sqlExpr" : "\"(TIME '08:30:00' + NULL)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 25, + "fragment" : "TIME'08:30' + NULL" + } ] +} + + +-- !query +SELECT TIME'00:00:00.0101' + 1 +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TIME(6)\"", + "right" : "\"INT\"", + "sqlExpr" : "\"(TIME '00:00:00.0101' + 1)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "TIME'00:00:00.0101' + 1" + } ] +} + + +-- !query +SELECT TIME'12:30' - INTERVAL '12:29:59.000001' HOUR TO SECOND +-- !query schema +struct<TIME '12:30:00' + (- INTERVAL '12:29:59.000001' HOUR TO SECOND):time(6)> +-- !query output +00:00:00.999999 + + +-- !query +SELECT '23:59:59.999999' :: TIME - INTERVAL '23:59:59.999999' HOUR TO SECOND +-- !query schema +struct<CAST(23:59:59.999999 AS TIME(6)) + (- INTERVAL '23:59:59.999999' HOUR TO SECOND):time(6)> +-- !query output +00:00:00 + + +-- !query +SELECT '00:00:00.0001' :: TIME(4) - INTERVAL '0 00:00:00.0001' DAY TO SECOND +-- !query schema +struct<CAST(00:00:00.0001 AS TIME(4)) + (- INTERVAL '0 00:00:00.0001' DAY TO SECOND):time(6)> +-- !query output +00:00:00 + + +-- !query +SELECT '08:30' :: TIME(0) - INTERVAL '6' HOUR +-- !query schema +struct<CAST(08:30 AS TIME(0)) + (- INTERVAL '06' HOUR):time(0)> +-- !query output +02:30:00 + + +-- !query +SELECT '10:00:01' :: TIME(1) - INTERVAL '1' MONTH +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.ExtendedAnalysisException +{ + "errorClass" : "DATATYPE_MISMATCH.BINARY_OP_DIFF_TYPES", + "sqlState" : "42K09", + "messageParameters" : { + "left" : "\"TIME(1)\"", + "right" : "\"INTERVAL MONTH\"", + "sqlExpr" : "\"(CAST(10:00:01 AS TIME(1)) - INTERVAL '1' MONTH)\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "'10:00:01' :: TIME(1) - INTERVAL '1' MONTH" + } ] +} --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org