This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 059b395c8cbf [SPARK-52595][SQL] Rename the `TimeAdd` expression to 
`TimestampAddInterval`
059b395c8cbf is described below

commit 059b395c8cbfe1b0bdc614e6006939e3ac538b13
Author: Max Gekk <[email protected]>
AuthorDate: Sat Jun 28 20:38:18 2025 +0200

    [SPARK-52595][SQL] Rename the `TimeAdd` expression to `TimestampAddInterval`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to rename the following expressions and classes:
    1. TimeAdd -> TimestampAddInterval
    2. TimeAddResolver -> TimestampAddResolver
    
    ### Why are the changes needed?
    The `TimeAdd` expression operates on TIMESTAMP values but not on TIME 
values. Current name makes naming of similar TIME expression much more 
difficult. And the name might confuse other devs.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    By running the affected test suites:
    ```
    $ build/sbt "test:testOnly *ResolutionValidatorSuite"
    $ build/sbt "test:testOnly *DateExpressionsSuite"
    $ build/sbt "test:testOnly *ExpressionSQLBuilderSuite"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #51302 from MaxGekk/rename-timeadd.
    
    Authored-by: Max Gekk <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 .../spark/sql/catalyst/analysis/Analyzer.scala     |  4 +--
 .../analysis/AnsiStringPromotionTypeCoercion.scala |  4 +--
 .../BinaryArithmeticWithDatetimeResolver.scala     | 18 ++++++----
 .../catalyst/analysis/StreamingJoinHelper.scala    |  2 +-
 .../sql/catalyst/analysis/TypeCoercionHelper.scala |  5 +--
 .../resolver/BinaryArithmeticResolver.scala        |  2 +-
 .../analysis/resolver/ExpressionResolver.scala     |  6 ++--
 ...ddResolver.scala => TimestampAddResolver.scala} | 27 +++++++-------
 .../catalyst/expressions/datetimeExpressions.scala |  7 ++--
 .../spark/sql/catalyst/optimizer/expressions.scala |  2 +-
 .../resolver/ResolutionValidatorSuite.scala        |  6 ++--
 .../expressions/DateExpressionsSuite.scala         | 41 +++++++++++-----------
 .../expressions/ExpressionSQLBuilderSuite.scala    |  2 +-
 .../optimizer/PushFoldableIntoBranchesSuite.scala  |  4 +--
 .../window/WindowEvaluatorFactoryBase.scala        |  6 ++--
 15 files changed, 73 insertions(+), 63 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index f9272cc03735..a14efc0c5fb3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -519,7 +519,7 @@ class Analyzer(override val catalogManager: CatalogManager) 
extends RuleExecutor
    * 1. if both side are interval, stays the same;
    * 2. else if one side is date and the other is interval,
    *    turns it to [[DateAddInterval]];
-   * 3. else if one side is interval, turns it to [[TimeAdd]];
+   * 3. else if one side is interval, turns it to [[TimestampAddInterval]];
    * 4. else if one side is date, turns it to [[DateAdd]] ;
    * 5. else stays the same.
    *
@@ -527,7 +527,7 @@ class Analyzer(override val catalogManager: CatalogManager) 
extends RuleExecutor
    * 1. if both side are interval, stays the same;
    * 2. else if the left side is date and the right side is interval,
    *    turns it to [[DateAddInterval(l, -r)]];
-   * 3. else if the right side is an interval, turns it to [[TimeAdd(l, -r)]];
+   * 3. else if the right side is an interval, turns it to 
[[TimestampAddInterval(l, -r)]];
    * 4. else if one side is timestamp, turns it to [[SubtractTimestamps]];
    * 5. else if the right side is date, turns it to 
[[DateDiff]]/[[SubtractDates]];
    * 6. else if the left side is date, turns it to [[DateSub]];
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiStringPromotionTypeCoercion.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiStringPromotionTypeCoercion.scala
index e7be95bc645e..f6d7a9605831 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiStringPromotionTypeCoercion.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiStringPromotionTypeCoercion.scala
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.{
   Literal,
   SubtractDates,
   SubtractTimestamps,
-  TimeAdd,
+  TimestampAddInterval,
   UnaryMinus,
   UnaryPositive
 }
@@ -77,7 +77,7 @@ object AnsiStringPromotionTypeCoercion {
       s.copy(left = Cast(s.left, DateType))
     case s @ SubtractDates(_, right @ StringTypeExpression(), _) =>
       s.copy(right = Cast(s.right, DateType))
-    case t @ TimeAdd(left @ StringTypeExpression(), _, _) =>
+    case t @ TimestampAddInterval(left @ StringTypeExpression(), _, _) =>
       t.copy(start = Cast(t.start, TimestampType))
     case t @ SubtractTimestamps(left @ StringTypeExpression(), _, _, _) =>
       t.copy(left = Cast(t.left, t.right.dataType))
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
index 36a059b2b0f3..7302cfcf969b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/BinaryArithmeticWithDatetimeResolver.scala
@@ -39,7 +39,7 @@ import org.apache.spark.sql.catalyst.expressions.{
   Subtract,
   SubtractDates,
   SubtractTimestamps,
-  TimeAdd,
+  TimestampAddInterval,
   TimestampAddYMInterval,
   UnaryMinus
 }
@@ -62,9 +62,9 @@ object BinaryArithmeticWithDatetimeResolver {
     case a @ Add(l, r, mode) =>
       (l.dataType, r.dataType) match {
         case (DateType, DayTimeIntervalType(DAY, DAY)) => DateAdd(l, 
ExtractANSIIntervalDays(r))
-        case (DateType, _: DayTimeIntervalType) => TimeAdd(Cast(l, 
TimestampType), r)
+        case (DateType, _: DayTimeIntervalType) => 
TimestampAddInterval(Cast(l, TimestampType), r)
         case (DayTimeIntervalType(DAY, DAY), DateType) => DateAdd(r, 
ExtractANSIIntervalDays(l))
-        case (_: DayTimeIntervalType, DateType) => TimeAdd(Cast(r, 
TimestampType), l)
+        case (_: DayTimeIntervalType, DateType) => 
TimestampAddInterval(Cast(r, TimestampType), l)
         case (DateType, _: YearMonthIntervalType) => DateAddYMInterval(l, r)
         case (_: YearMonthIntervalType, DateType) => DateAddYMInterval(r, l)
         case (TimestampType | TimestampNTZType, _: YearMonthIntervalType) =>
@@ -80,10 +80,12 @@ object BinaryArithmeticWithDatetimeResolver {
           a.copy(right = Cast(a.right, a.left.dataType))
         case (DateType, CalendarIntervalType) =>
           DateAddInterval(l, r, ansiEnabled = mode == EvalMode.ANSI)
-        case (_, CalendarIntervalType | _: DayTimeIntervalType) => 
Cast(TimeAdd(l, r), l.dataType)
+        case (_, CalendarIntervalType | _: DayTimeIntervalType) =>
+          Cast(TimestampAddInterval(l, r), l.dataType)
         case (CalendarIntervalType, DateType) =>
           DateAddInterval(r, l, ansiEnabled = mode == EvalMode.ANSI)
-        case (CalendarIntervalType | _: DayTimeIntervalType, _) => 
Cast(TimeAdd(r, l), r.dataType)
+        case (CalendarIntervalType | _: DayTimeIntervalType, _) =>
+          Cast(TimestampAddInterval(r, l), r.dataType)
         case (DateType, dt) if dt != StringType => DateAdd(l, r)
         case (dt, DateType) if dt != StringType => DateAdd(r, l)
         case _ => a
@@ -93,7 +95,8 @@ object BinaryArithmeticWithDatetimeResolver {
         case (DateType, DayTimeIntervalType(DAY, DAY)) =>
           DateAdd(l, UnaryMinus(ExtractANSIIntervalDays(r), mode == 
EvalMode.ANSI))
         case (DateType, _: DayTimeIntervalType) =>
-          DatetimeSub(l, r, TimeAdd(Cast(l, TimestampType), UnaryMinus(r, mode 
== EvalMode.ANSI)))
+          DatetimeSub(l, r,
+            TimestampAddInterval(Cast(l, TimestampType), UnaryMinus(r, mode == 
EvalMode.ANSI)))
         case (DateType, _: YearMonthIntervalType) =>
           DatetimeSub(l, r, DateAddYMInterval(l, UnaryMinus(r, mode == 
EvalMode.ANSI)))
         case (TimestampType | TimestampNTZType, _: YearMonthIntervalType) =>
@@ -116,7 +119,8 @@ object BinaryArithmeticWithDatetimeResolver {
             )
           )
         case (_, CalendarIntervalType | _: DayTimeIntervalType) =>
-          Cast(DatetimeSub(l, r, TimeAdd(l, UnaryMinus(r, mode == 
EvalMode.ANSI))), l.dataType)
+          Cast(DatetimeSub(l, r,
+            TimestampAddInterval(l, UnaryMinus(r, mode == EvalMode.ANSI))), 
l.dataType)
         case _
           if AnyTimestampTypeExpression.unapply(l) ||
             AnyTimestampTypeExpression.unapply(r) =>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala
index ef425be42f98..0e61b9cce681 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala
@@ -239,7 +239,7 @@ object StreamingJoinHelper extends PredicateHelper with 
Logging {
           collect(left, negate) ++ collect(right, negate)
         case Subtract(left, right, _) =>
           collect(left, negate) ++ collect(right, !negate)
-        case TimeAdd(left, right, _) =>
+        case TimestampAddInterval(left, right, _) =>
           collect(left, negate) ++ collect(right, negate)
         case DatetimeSub(_, _, child) => collect(child, negate)
         case UnaryMinus(child, _) =>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionHelper.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionHelper.scala
index 390ff2f3114d..af42fa60604f 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionHelper.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionHelper.scala
@@ -51,7 +51,7 @@ import org.apache.spark.sql.catalyst.expressions.{
   SpecialFrameBoundary,
   SpecifiedWindowFrame,
   SubtractTimestamps,
-  TimeAdd,
+  TimestampAddInterval,
   WindowSpecDefinition
 }
 import org.apache.spark.sql.catalyst.expressions.aggregate.{Average, Sum}
@@ -700,7 +700,8 @@ abstract class TypeCoercionHelper {
         val newRight = castIfNotSameType(s.right, TimestampNTZType)
         s.copy(left = newLeft, right = newRight)
 
-      case t @ TimeAdd(StringTypeExpression(), _, _) => t.copy(start = 
Cast(t.start, TimestampType))
+      case t @ TimestampAddInterval(StringTypeExpression(), _, _) =>
+        t.copy(start = Cast(t.start, TimestampType))
 
       case other => other
     }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/BinaryArithmeticResolver.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/BinaryArithmeticResolver.scala
index d88a450a5acd..00e5d2347150 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/BinaryArithmeticResolver.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/BinaryArithmeticResolver.scala
@@ -63,7 +63,7 @@ import org.apache.spark.sql.types.{DateType, StringType}
  *
  *     Cast(
  *         DatetimeSub(
- *             TimeAdd(
+ *             TimestampAddInterval(
  *                 Literal('4 11:11', StringType),
  *                 UnaryMinus(
  *                     Literal(Interval('4 22:12' DAY TO MINUTE), 
DayTimeIntervalType(0,2))
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolver.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolver.scala
index cfeaa54a39f6..471405c7b0c6 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolver.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolver.scala
@@ -136,7 +136,7 @@ class ExpressionResolver(
     aggregateExpressionResolver,
     binaryArithmeticResolver
   )
-  private val timeAddResolver = new TimeAddResolver(this)
+  private val timestampAddResolver = new TimestampAddResolver(this)
   private val unaryMinusResolver = new UnaryMinusResolver(this)
   private val subqueryExpressionResolver = new 
SubqueryExpressionResolver(this, resolver)
   private val ordinalResolver = new OrdinalResolver(this)
@@ -262,8 +262,8 @@ class ExpressionResolver(
             
subqueryExpressionResolver.resolveScalarSubquery(unresolvedScalarSubquery)
           case unresolvedListQuery: ListQuery =>
             subqueryExpressionResolver.resolveListQuery(unresolvedListQuery)
-          case unresolvedTimeAdd: TimeAdd =>
-            timeAddResolver.resolve(unresolvedTimeAdd)
+          case unresolvedTimestampAdd: TimestampAddInterval =>
+            timestampAddResolver.resolve(unresolvedTimestampAdd)
           case unresolvedUnaryMinus: UnaryMinus =>
             unaryMinusResolver.resolve(unresolvedUnaryMinus)
           case createNamedStruct: CreateNamedStruct =>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/TimeAddResolver.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/TimestampAddResolver.scala
similarity index 74%
rename from 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/TimeAddResolver.scala
rename to 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/TimestampAddResolver.scala
index 561e921d95dd..3dc665a6d88b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/TimeAddResolver.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/TimestampAddResolver.scala
@@ -23,38 +23,39 @@ import org.apache.spark.sql.catalyst.analysis.{
   StringPromotionTypeCoercion,
   TypeCoercion
 }
-import org.apache.spark.sql.catalyst.expressions.{Expression, TimeAdd}
+import org.apache.spark.sql.catalyst.expressions.{Expression, 
TimestampAddInterval}
 
 /**
- * Helper resolver for [[TimeAdd]] which is produced by resolving 
[[BinaryArithmetic]] nodes.
+ * Helper resolver for [[TimestampAddInterval]] which is produced by resolving 
[[BinaryArithmetic]]
+ * nodes.
  */
-class TimeAddResolver(expressionResolver: ExpressionResolver)
-    extends TreeNodeResolver[TimeAdd, Expression]
+class TimestampAddResolver(expressionResolver: ExpressionResolver)
+    extends TreeNodeResolver[TimestampAddInterval, Expression]
     with ResolvesExpressionChildren
     with CoercesExpressionTypes {
 
   private val traversals = expressionResolver.getExpressionTreeTraversals
 
   protected override val ansiTransformations: 
CoercesExpressionTypes.Transformations =
-    TimeAddResolver.ANSI_TYPE_COERCION_TRANSFORMATIONS
+    TimestampAddResolver.ANSI_TYPE_COERCION_TRANSFORMATIONS
   protected override val nonAnsiTransformations: 
CoercesExpressionTypes.Transformations =
-    TimeAddResolver.TYPE_COERCION_TRANSFORMATIONS
+    TimestampAddResolver.TYPE_COERCION_TRANSFORMATIONS
 
-  override def resolve(unresolvedTimeAdd: TimeAdd): Expression = {
-    val timeAddWithResolvedChildren =
-      withResolvedChildren(unresolvedTimeAdd, expressionResolver.resolve _)
-    val timeAddWithTypeCoercion: Expression = coerceExpressionTypes(
-      expression = timeAddWithResolvedChildren,
+  override def resolve(unresolvedTimestampAdd: TimestampAddInterval): 
Expression = {
+    val timestampAddWithResolvedChildren =
+      withResolvedChildren(unresolvedTimestampAdd, expressionResolver.resolve 
_)
+    val timestampAddWithTypeCoercion: Expression = coerceExpressionTypes(
+      expression = timestampAddWithResolvedChildren,
       expressionTreeTraversal = traversals.current
     )
     TimezoneAwareExpressionResolver.resolveTimezone(
-      timeAddWithTypeCoercion,
+      timestampAddWithTypeCoercion,
       traversals.current.sessionLocalTimeZone
     )
   }
 }
 
-object TimeAddResolver {
+object TimestampAddResolver {
   // Ordering in the list of type coercions should be in sync with the list in 
[[TypeCoercion]].
   private val TYPE_COERCION_TRANSFORMATIONS: Seq[Expression => Expression] = 
Seq(
     StringPromotionTypeCoercion.apply,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 9d8ee718b1d2..048019877565 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -1647,7 +1647,10 @@ case class NextDay(
 /**
  * Adds an interval to timestamp.
  */
-case class TimeAdd(start: Expression, interval: Expression, timeZoneId: 
Option[String] = None)
+case class TimestampAddInterval(
+    start: Expression,
+    interval: Expression,
+    timeZoneId: Option[String] = None)
   extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes 
{
   override def nullIntolerant: Boolean = true
 
@@ -1690,7 +1693,7 @@ case class TimeAdd(start: Expression, interval: 
Expression, timeZoneId: Option[S
   }
 
   override protected def withNewChildrenInternal(
-      newLeft: Expression, newRight: Expression): TimeAdd =
+      newLeft: Expression, newRight: Expression): TimestampAddInterval =
     copy(start = newLeft, interval = newRight)
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
index b3236bbfa375..ad90b51d7fc3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
@@ -726,7 +726,7 @@ object SupportedBinaryExpr {
     case _: BinaryArithmetic => Some(expr, expr.children.head, 
expr.children.last)
     case _: BinaryMathExpression => Some(expr, expr.children.head, 
expr.children.last)
     case _: AddMonths | _: DateAdd | _: DateAddInterval | _: DateDiff | _: 
DateSub |
-         _: DateAddYMInterval | _: TimestampAddYMInterval | _: TimeAdd =>
+         _: DateAddYMInterval | _: TimestampAddYMInterval | _: 
TimestampAddInterval =>
       Some(expr, expr.children.head, expr.children.last)
     case _: FindInSet | _: RoundBase => Some(expr, expr.children.head, 
expr.children.last)
     case BinaryPredicate(expr) =>
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/resolver/ResolutionValidatorSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/resolver/ResolutionValidatorSuite.scala
index 913de4b5a19f..94b954c9b9a0 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/resolver/ResolutionValidatorSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/resolver/ResolutionValidatorSuite.scala
@@ -30,7 +30,7 @@ import org.apache.spark.sql.catalyst.expressions.{
   GreaterThan,
   Literal,
   NamedExpression,
-  TimeAdd
+  TimestampAddInterval
 }
 import org.apache.spark.sql.catalyst.plans.logical.{Filter, LocalRelation, 
LogicalPlan, Project}
 import org.apache.spark.sql.types.{
@@ -188,7 +188,7 @@ class ResolutionValidatorSuite extends SparkFunSuite with 
SQLConfHelper {
       Project(
         projectList = Seq(
           Alias(
-            child = TimeAdd(
+            child = TimestampAddInterval(
               start = Cast(
                 child = Literal("2024-10-01"),
                 dataType = TimestampType,
@@ -205,7 +205,7 @@ class ResolutionValidatorSuite extends SparkFunSuite with 
SQLConfHelper {
         ),
         child = LocalRelation(output = colInteger)
       ),
-      error = Some("TimezoneId is not set for TimeAdd")
+      error = Some("TimezoneId is not set for TimestampAddInterval")
     )
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 23763e1769f6..02edd3695270 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -483,32 +483,33 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
         val timeZoneId = Option(zid.getId)
         sdf.setTimeZone(TimeZone.getTimeZone(zid))
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             timestampLiteral("2016-01-29 10:00:00.000", sdf, dt),
             Literal(new CalendarInterval(1, 2, 123000L)),
             timeZoneId),
           timestampAnswer("2016-03-02 10:00:00.123", sdf, dt))
 
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             Literal.create(null, dt),
             Literal(new CalendarInterval(1, 2, 123000L)),
             timeZoneId),
           null)
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             timestampLiteral("2016-01-29 10:00:00.000", sdf, dt),
             Literal.create(null, CalendarIntervalType),
             timeZoneId),
           null)
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             Literal.create(null, dt),
             Literal.create(null, CalendarIntervalType),
             timeZoneId),
           null)
         checkConsistencyBetweenInterpretedAndCodegen(
-          (start: Expression, interval: Expression) => TimeAdd(start, 
interval, timeZoneId),
+          (start: Expression, interval: Expression) =>
+            TimestampAddInterval(start, interval, timeZoneId),
           dt, CalendarIntervalType)
       }
     }
@@ -521,28 +522,28 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       sdf.setTimeZone(TimeZone.getTimeZone(zid))
 
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal(new Timestamp(sdf.parse("2016-03-31 10:00:00.000").getTime)),
           UnaryMinus(Literal(new CalendarInterval(1, 0, 0))),
           timeZoneId),
         DateTimeUtils.fromJavaTimestamp(
           new Timestamp(sdf.parse("2016-02-29 10:00:00.000").getTime)))
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal(new Timestamp(sdf.parse("2016-03-31 10:00:00.000").getTime)),
           UnaryMinus(Literal(new CalendarInterval(1, 1, 0))),
           timeZoneId),
         DateTimeUtils.fromJavaTimestamp(
           new Timestamp(sdf.parse("2016-02-28 10:00:00.000").getTime)))
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal(new Timestamp(sdf.parse("2016-03-30 00:00:01.000").getTime)),
           UnaryMinus(Literal(new CalendarInterval(1, 0, 2000000.toLong))),
           timeZoneId),
         DateTimeUtils.fromJavaTimestamp(
           new Timestamp(sdf.parse("2016-02-28 23:59:59.000").getTime)))
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal(new Timestamp(sdf.parse("2016-03-30 00:00:01.000").getTime)),
           UnaryMinus(Literal(new CalendarInterval(1, 1, 2000000.toLong))),
           timeZoneId),
@@ -550,25 +551,25 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
           new Timestamp(sdf.parse("2016-02-27 23:59:59.000").getTime)))
 
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal.create(null, TimestampType),
           UnaryMinus(Literal(new CalendarInterval(1, 2, 123000L))),
           timeZoneId),
         null)
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal(new Timestamp(sdf.parse("2016-01-29 10:00:00.000").getTime)),
           UnaryMinus(Literal.create(null, CalendarIntervalType)),
           timeZoneId),
         null)
       checkEvaluation(
-        TimeAdd(
+        TimestampAddInterval(
           Literal.create(null, TimestampType),
           UnaryMinus(Literal.create(null, CalendarIntervalType)),
           timeZoneId),
         null)
       checkConsistencyBetweenInterpretedAndCodegen((start: Expression, 
interval: Expression) =>
-        TimeAdd(start, UnaryMinus(interval), timeZoneId),
+        TimestampAddInterval(start, UnaryMinus(interval), timeZoneId),
         TimestampType, CalendarIntervalType)
     }
   }
@@ -1800,13 +1801,13 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
         val timeZoneId = Option(zid.getId)
         sdf.setTimeZone(TimeZone.getTimeZone(zid))
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             timestampLiteral("2021-01-01 00:00:00.123", sdf, dt),
             Literal(Duration.ofDays(10).plusMinutes(10).plusMillis(321)),
             timeZoneId),
           timestampAnswer("2021-01-11 00:10:00.444", sdf, dt))
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             timestampLiteral("2021-01-01 00:10:00.123", sdf, dt),
             Literal(Duration.ofDays(-10).minusMinutes(9).minusMillis(120)),
             timeZoneId),
@@ -1814,7 +1815,7 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
 
         val e = intercept[Exception] {
           checkEvaluation(
-            TimeAdd(
+            TimestampAddInterval(
               timestampLiteral("2021-01-01 00:00:00.123", sdf, dt),
               Literal(Duration.of(Long.MaxValue, ChronoUnit.MICROS)),
               timeZoneId),
@@ -1824,26 +1825,26 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
         assert(e.getMessage.contains("long overflow"))
 
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             Literal.create(null, dt),
             Literal(Duration.ofDays(1)),
             timeZoneId),
           null)
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             timestampLiteral("2021-01-01 00:00:00.123", sdf, dt),
             Literal.create(null, DayTimeIntervalType()),
             timeZoneId),
           null)
         checkEvaluation(
-          TimeAdd(
+          TimestampAddInterval(
             Literal.create(null, dt),
             Literal.create(null, DayTimeIntervalType()),
             timeZoneId),
           null)
         dayTimeIntervalTypes.foreach { it =>
           checkConsistencyBetweenInterpretedAndCodegen((ts: Expression, 
interval: Expression) =>
-            TimeAdd(ts, interval, timeZoneId), dt, it)
+            TimestampAddInterval(ts, interval, timeZoneId), dt, it)
         }
       }
     }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala
index e88b0e32e908..4ca6bc4c4541 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala
@@ -169,7 +169,7 @@ class ExpressionSQLBuilderSuite extends SparkFunSuite {
     val interval = Literal(new CalendarInterval(0, 0, MICROS_PER_HOUR))
 
     checkSQL(
-      TimeAdd($"a", interval),
+      TimestampAddInterval($"a", interval),
       "a + INTERVAL '1 hours'"
     )
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
index d88db82d94dd..6bd0b43790e1 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
@@ -289,7 +289,7 @@ class PushFoldableIntoBranchesSuite extends PlanTest {
         Literal.create(Period.ofMonths(2), YearMonthIntervalType()))),
       If(a, Literal.create(Timestamp.valueOf("2021-02-01 00:00:00"), 
TimestampType),
         Literal.create(Timestamp.valueOf("2021-03-01 00:00:00"), 
TimestampType)))
-    assertEquivalent(TimeAdd(
+    assertEquivalent(TimestampAddInterval(
       Literal.create(Timestamp.valueOf("2021-01-01 00:00:00.000"), 
TimestampType),
       If(a, Literal(Duration.ofDays(10).plusMinutes(10).plusMillis(321)),
         Literal(Duration.ofDays(10).plusMinutes(10).plusMillis(456)))),
@@ -329,7 +329,7 @@ class PushFoldableIntoBranchesSuite extends PlanTest {
         (c, Literal.create(Period.ofMonths(2), YearMonthIntervalType()))), 
None)),
       CaseWhen(Seq((a, Literal.create(Timestamp.valueOf("2021-02-01 
00:00:00"), TimestampType)),
         (c, Literal.create(Timestamp.valueOf("2021-03-01 00:00:00"), 
TimestampType))), None))
-    assertEquivalent(TimeAdd(
+    assertEquivalent(TimestampAddInterval(
       Literal.create(Timestamp.valueOf("2021-01-01 00:00:00.000"), 
TimestampType),
       CaseWhen(Seq((a, 
Literal(Duration.ofDays(10).plusMinutes(10).plusMillis(321))),
         (c, Literal(Duration.ofDays(10).plusMinutes(10).plusMillis(456)))), 
None)),
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/window/WindowEvaluatorFactoryBase.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/window/WindowEvaluatorFactoryBase.scala
index 7d13dbbe2a06..c2dedda832e2 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/window/WindowEvaluatorFactoryBase.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/window/WindowEvaluatorFactoryBase.scala
@@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.SparkException
 import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.expressions.{Add, 
AggregateWindowFunction, Ascending, Attribute, BoundReference, CurrentRow, 
DateAdd, DateAddYMInterval, DecimalAddNoOverflowCheck, Descending, Expression, 
ExtractANSIIntervalDays, FrameLessOffsetWindowFunction, FrameType, 
IdentityProjection, IntegerLiteral, MutableProjection, NamedExpression, 
OffsetWindowFunction, PythonFuncExpression, RangeFrame, RowFrame, RowOrdering, 
SortOrder, SpecifiedWindowFrame, TimeAdd, TimestampAddYMInterval, [...]
+import org.apache.spark.sql.catalyst.expressions.{Add, 
AggregateWindowFunction, Ascending, Attribute, BoundReference, CurrentRow, 
DateAdd, DateAddYMInterval, DecimalAddNoOverflowCheck, Descending, Expression, 
ExtractANSIIntervalDays, FrameLessOffsetWindowFunction, FrameType, 
IdentityProjection, IntegerLiteral, MutableProjection, NamedExpression, 
OffsetWindowFunction, PythonFuncExpression, RangeFrame, RowFrame, RowOrdering, 
SortOrder, SpecifiedWindowFrame, TimestampAddInterval, TimestampA [...]
 import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
 import org.apache.spark.sql.execution.metric.SQLMetric
 import org.apache.spark.sql.internal.SQLConf
@@ -105,11 +105,11 @@ trait WindowEvaluatorFactoryBase {
           case (DateType, DayTimeIntervalType(DAY, DAY)) =>
             DateAdd(expr, ExtractANSIIntervalDays(boundOffset))
           case (TimestampType | TimestampNTZType, CalendarIntervalType) =>
-            TimeAdd(expr, boundOffset, Some(timeZone))
+            TimestampAddInterval(expr, boundOffset, Some(timeZone))
           case (TimestampType | TimestampNTZType, _: YearMonthIntervalType) =>
             TimestampAddYMInterval(expr, boundOffset, Some(timeZone))
           case (TimestampType | TimestampNTZType, _: DayTimeIntervalType) =>
-            TimeAdd(expr, boundOffset, Some(timeZone))
+            TimestampAddInterval(expr, boundOffset, Some(timeZone))
           case (d: DecimalType, _: DecimalType) => 
DecimalAddNoOverflowCheck(expr, boundOffset, d)
           case (a, b) if a == b => Add(expr, boundOffset)
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to