This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b37d51afd9d8 [SPARK-53700][SQL] Remove redundancy in 
`DataSourceV2RelationBase.simpleString`
b37d51afd9d8 is described below

commit b37d51afd9d8639ddc119e875475959eb393165d
Author: Anton Okolnychyi <aokolnyc...@apache.org>
AuthorDate: Thu Sep 25 09:12:55 2025 -0700

    [SPARK-53700][SQL] Remove redundancy in 
`DataSourceV2RelationBase.simpleString`
    
    ### What changes were proposed in this pull request?
    
    This PR removes redundancy in simpleString in DataSourceV2RelationBase.
    
    ### Why are the changes needed?
    
    Before this change:
    
    ```
    == Parsed Logical Plan ==
    'Project [*]
    +- 'UnresolvedRelation [cat, ns1, test_table], [], false
    
    == Analyzed Logical Plan ==
    pk: int, salary: int, dep: string
    Project [pk#18, salary#19, dep#20]
    +- SubqueryAlias cat.ns1.test_table
       +- RelationV2[pk#18, salary#19, dep#20] cat.ns1.test_table 
cat.ns1.test_table // !!! REDUNDANT !!!
    
    == Optimized Logical Plan ==
    RelationV2[pk#18, salary#19, dep#20] cat.ns1.test_table
    
    == Physical Plan ==
    *(1) Project [pk#18, salary#19, dep#20]
    +- BatchScan cat.ns1.test_table[pk#18, salary#19, dep#20] class 
org.apache.spark.sql.connector.catalog.InMemoryBaseTable$InMemoryBatchScan 
RuntimeFilters: []
    ```
    
    After this change:
    
    ```
    == Parsed Logical Plan ==
    'Project [*]
    +- 'UnresolvedRelation [cat, ns1, test_table], [], false
    
    == Analyzed Logical Plan ==
    pk: int, salary: int, dep: string
    Project [pk#25, salary#26, dep#27]
    +- SubqueryAlias cat.ns1.test_table
       +- RelationV2[pk#25, salary#26, dep#27] cat.ns1.test_table
    
    == Optimized Logical Plan ==
    RelationV2[pk#25, salary#26, dep#27] cat.ns1.test_table
    
    == Physical Plan ==
    *(1) Project [pk#25, salary#26, dep#27]
    +- BatchScan cat.ns1.test_table[pk#25, salary#26, dep#27] class 
org.apache.spark.sql.connector.catalog.InMemoryBaseTable$InMemoryBatchScan 
RuntimeFilters: []
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    This PR comes with tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #52442 from aokolnychyi/spark-53700.
    
    Authored-by: Anton Okolnychyi <aokolnyc...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../sql/execution/datasources/v2/DataSourceV2Relation.scala   |  6 +-----
 .../org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala | 11 +++++++++++
 2 files changed, 12 insertions(+), 5 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
index 2b1b40e0a5eb..26f406999494 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
@@ -65,11 +65,7 @@ abstract class DataSourceV2RelationBase(
   override def skipSchemaResolution: Boolean = 
table.supports(TableCapability.ACCEPT_ANY_SCHEMA)
 
   override def simpleString(maxFields: Int): String = {
-    val qualifiedTableName = (catalog, identifier) match {
-      case (Some(cat), Some(ident)) => s"${cat.name()}.${ident.toString}"
-      case _ => ""
-    }
-    s"RelationV2${truncatedString(output, "[", ", ", "]", maxFields)} 
$qualifiedTableName $name"
+    s"RelationV2${truncatedString(output, "[", ", ", "]", maxFields)} $name"
   }
 
   override def computeStats(): Statistics = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 9171e44571e8..3adefa6b4d53 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -79,6 +79,17 @@ abstract class DataSourceV2SQLSuite
   protected def analysisException(sqlText: String): AnalysisException = {
     intercept[AnalysisException](sql(sqlText))
   }
+
+  test("EXPLAIN") {
+    val t = "testcat.tbl"
+    withTable(t) {
+      spark.sql(s"CREATE TABLE $t (id int, data string)")
+      val explain = spark.sql(s"EXPLAIN EXTENDED SELECT * FROM 
$t").head().getString(0)
+      val relationPattern = raw".*RelationV2\[[^\]]*]\s+$t\s*$$".r
+      val relations = explain.split("\n").filter(_.contains("RelationV2"))
+      assert(relations.nonEmpty && relations.forall(line => 
relationPattern.matches(line.trim)))
+    }
+  }
 }
 
 class DataSourceV2SQLSuiteV1Filter


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to