This is an automated email from the ASF dual-hosted git repository.
ptoth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 17d15a4c6cc7 [SPARK-53762][SQL] Add date and time conversions
simplifier rule to optimizer
17d15a4c6cc7 is described below
commit 17d15a4c6cc7054e62f1ecac7352962b198dcbeb
Author: Peter Toth <[email protected]>
AuthorDate: Wed Oct 15 09:09:33 2025 +0200
[SPARK-53762][SQL] Add date and time conversions simplifier rule to
optimizer
### What changes were proposed in this pull request?
This PR add a new rule to the optimizer, that focuses on date and time
conversion functions and tries to eliminate the unnecessary ones.
### Why are the changes needed?
Date and time conversions are not cheap so eliminating some of them can
bring considerable performance improvement.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Added new UT.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #52493 from
peter-toth/SPARK-53762-add-simplifydatetimeconversions-rule.
Authored-by: Peter Toth <[email protected]>
Signed-off-by: Peter Toth <[email protected]>
---
.../catalyst/expressions/datetimeExpressions.scala | 4 ++
.../spark/sql/catalyst/optimizer/Optimizer.scala | 1 +
.../spark/sql/catalyst/optimizer/expressions.scala | 42 +++++++++++++
.../sql/catalyst/rules/RuleIdCollection.scala | 1 +
.../spark/sql/catalyst/trees/TreePatterns.scala | 1 +
.../SimplifyDateTimeConversionsSuite.scala | 69 ++++++++++++++++++++++
6 files changed, 118 insertions(+)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 418431c6c78e..3948f8bd0dd6 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -1014,6 +1014,8 @@ case class DateFormatClass(left: Expression, right:
Expression, timeZoneId: Opti
override protected def withNewChildrenInternal(
newLeft: Expression, newRight: Expression): DateFormatClass =
copy(left = newLeft, right = newRight)
+
+ final override def nodePatternsInternal(): Seq[TreePattern] = Seq(DATETIME)
}
/**
@@ -1147,6 +1149,8 @@ case class GetTimestamp(
newLeft: Expression,
newRight: Expression): Expression =
copy(left = newLeft, right = newRight)
+
+ final override def nodePatternsInternal(): Seq[TreePattern] = Seq(DATETIME)
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index fc65c24afcb8..a2dced57c715 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -146,6 +146,7 @@ abstract class Optimizer(catalogManager: CatalogManager)
PruneFilters,
SimplifyCasts,
SimplifyCaseConversionExpressions,
+ SimplifyDateTimeConversions,
RewriteCorrelatedScalarSubquery,
RewriteLateralSubquery,
EliminateSerialization,
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
index 856236750f7b..71eb3e5ea2bd 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
@@ -1142,6 +1142,48 @@ object SimplifyCaseConversionExpressions extends
Rule[LogicalPlan] {
}
}
+/**
+ * Removes date and time related functions that are unnecessary.
+ */
+object SimplifyDateTimeConversions extends Rule[LogicalPlan] {
+ def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
+ _.containsPattern(DATETIME), ruleId) {
+ case q: LogicalPlan => q.transformExpressionsUpWithPruning(
+ _.containsPattern(DATETIME), ruleId) {
+ // Remove a string to timestamp conversions followed by a timestamp to
string conversions if
+ // original string is in the same format.
+ case DateFormatClass(
+ GetTimestamp(
+ e @ DateFormatClass(_, pattern, timeZoneId),
+ pattern2,
+ TimestampType,
+ _,
+ timeZoneId2,
+ _),
+ pattern3,
+ timeZoneId3)
+ if pattern.semanticEquals(pattern2) &&
pattern.semanticEquals(pattern3)
+ && timeZoneId == timeZoneId2 && timeZoneId == timeZoneId3 =>
+ e
+
+ // Remove a timestamp to string conversion followed by a string to
timestamp conversions if
+ // original timestamp is built with the same format.
+ case GetTimestamp(
+ DateFormatClass(
+ e @ GetTimestamp(_, pattern, TimestampType, _, timeZoneId, _),
+ pattern2,
+ timeZoneId2),
+ pattern3,
+ TimestampType,
+ _,
+ timeZoneId3,
+ _)
+ if pattern.semanticEquals(pattern2) &&
pattern.semanticEquals(pattern3)
+ && timeZoneId == timeZoneId2 && timeZoneId == timeZoneId3 =>
+ e
+ }
+ }
+}
/**
* Combine nested [[Concat]] expressions.
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
index fd839b4c2127..b9f15f3f951c 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
@@ -178,6 +178,7 @@ object RuleIdCollection {
"org.apache.spark.sql.catalyst.optimizer.SimplifyCaseConversionExpressions" ::
"org.apache.spark.sql.catalyst.optimizer.SimplifyCasts" ::
"org.apache.spark.sql.catalyst.optimizer.SimplifyConditionals" ::
+ "org.apache.spark.sql.catalyst.optimizer.SimplifyDateTimeConversions" ::
"org.apache.spark.sql.catalyst.optimizer.SimplifyExtractValueOps" ::
"org.apache.spark.sql.catalyst.optimizer.TransposeWindow" ::
"org.apache.spark.sql.catalyst.optimizer.UnwrapCastInBinaryComparison"
:: Nil
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
index c35aa7403d76..cc05a38f857b 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
@@ -42,6 +42,7 @@ object TreePattern extends Enumeration {
val COUNT: Value = Value
val CREATE_NAMED_STRUCT: Value = Value
val CURRENT_LIKE: Value = Value
+ val DATETIME: Value = Value
val DYNAMIC_PRUNING_EXPRESSION: Value = Value
val DYNAMIC_PRUNING_SUBQUERY: Value = Value
val EXISTS_SUBQUERY = Value
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyDateTimeConversionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyDateTimeConversionsSuite.scala
new file mode 100644
index 000000000000..8e7013f2df03
--- /dev/null
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyDateTimeConversionsSuite.scala
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.apache.spark.sql.catalyst.dsl.expressions._
+import org.apache.spark.sql.catalyst.dsl.plans._
+import org.apache.spark.sql.catalyst.expressions.{DateFormatClass,
GetTimestamp}
+import org.apache.spark.sql.catalyst.plans.PlanTest
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules.RuleExecutor
+import org.apache.spark.sql.types._
+
+class SimplifyDateTimeConversionsSuite extends PlanTest {
+
+ object Optimize extends RuleExecutor[LogicalPlan] {
+ val batches =
+ Batch("SimplifyDateTimeConversions", FixedPoint(50),
SimplifyDateTimeConversions) :: Nil
+ }
+
+ val testRelation = LocalRelation($"ts".timestamp, $"s".string)
+
+ test("SPARK-53762: Remove DateFormat - GetTimestamp groups") {
+ val pattern = "yyyy-MM-dd"
+
+ val df = DateFormatClass($"ts", pattern)
+ val gt = GetTimestamp($"s", pattern, TimestampType)
+
+ val originalQuery = testRelation
+ .select(
+ DateFormatClass(
+ GetTimestamp(
+ df,
+ pattern,
+ TimestampType),
+ pattern) as "c1",
+ GetTimestamp(
+ DateFormatClass(
+ gt,
+ pattern),
+ pattern,
+ TimestampType) as "c2")
+ .analyze
+
+ val optimized = Optimize.execute(originalQuery)
+
+ val expected = testRelation
+ .select(
+ df as "c1",
+ gt as "c2")
+ .analyze
+
+ comparePlans(optimized, expected)
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]