This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.0 by this push:
new 98007df61cc4 [SPARK-50905][SQL][TESTS] Rename `Customer*` to `Custom*`
in `SparkSessionExtensionSuite`
98007df61cc4 is described below
commit 98007df61cc49e37ceb393626ee2cee4e86c2bea
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Jan 21 08:55:34 2025 -0800
[SPARK-50905][SQL][TESTS] Rename `Customer*` to `Custom*` in
`SparkSessionExtensionSuite`
### What changes were proposed in this pull request?
This PR aims to rename `Customer*` to `Custom*` in
`SparkSessionExtensionSuite` like the other existing ones in the same file.
- `CustomAggHint`
- `CustomAggregateRule`
- `CustomSortHint`
- `CustomSortRule`
### Why are the changes needed?
To make the comment and code consistently by using a general term `Custom`
instead of `Customer`.
### Does this PR introduce _any_ user-facing change?
No, this is a renaming in test cases.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #49587 from dongjoon-hyun/SPARK-50905.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
(cherry picked from commit 001e2443d9ccfbfa087e948dc121f03e756aa3b4)
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
index 90cca58b3bd2..986d547b798e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
@@ -549,7 +549,7 @@ class SparkSessionExtensionSuite extends SparkFunSuite with
SQLHelper with Adapt
test("custom aggregate hint") {
// The custom hint allows us to replace the aggregate (without grouping
keys) with just
// Literal.
-
withSession(Seq(_.injectHintResolutionRule(CustomerAggregateHintResolutionRule),
+
withSession(Seq(_.injectHintResolutionRule(CustomAggregateHintResolutionRule),
_.injectOptimizerRule(CustomAggregateRule))) { session =>
val res = session.range(10).agg(max("id")).as("max_id")
.hint("MAX_VALUE", "id", 10)
@@ -562,7 +562,7 @@ class SparkSessionExtensionSuite extends SparkFunSuite with
SQLHelper with Adapt
test("custom sort hint") {
// The custom hint allows us to replace the sort with its input
- withSession(Seq(_.injectHintResolutionRule(CustomerSortHintResolutionRule),
+ withSession(Seq(_.injectHintResolutionRule(CustomSortHintResolutionRule),
_.injectOptimizerRule(CustomSortRule))) { session =>
val res = session.range(10).sort("id")
.hint("INPUT_SORTED")
@@ -1263,7 +1263,7 @@ case class CustomAggHint(attribute: AttributeReference,
max: Int) extends Aggreg
// Attaches the CustomAggHint to the aggregate node without grouping keys if
the aggregate
// function is MAX over the specified column.
-case class CustomerAggregateHintResolutionRule(spark: SparkSession) extends
Rule[LogicalPlan] {
+case class CustomAggregateHintResolutionRule(spark: SparkSession) extends
Rule[LogicalPlan] {
val MY_HINT_NAME = Set("MAX_VALUE")
def isMax(expr: NamedExpression, attribute: String):
Option[AttributeReference] = {
@@ -1316,7 +1316,7 @@ case class CustomAggregateRule(spark: SparkSession)
extends Rule[LogicalPlan] {
case class CustomSortHint(inputSorted: Boolean) extends SortHint
// Attaches the CustomSortHint to the sort node.
-case class CustomerSortHintResolutionRule(spark: SparkSession) extends
Rule[LogicalPlan] {
+case class CustomSortHintResolutionRule(spark: SparkSession) extends
Rule[LogicalPlan] {
val MY_HINT_NAME = Set("INPUT_SORTED")
private def applySortHint(plan: LogicalPlan): LogicalPlan =
plan.transformDown {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]