This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch branch-4.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.1 by this push:
     new a88b15c98dd5 [SPARK-54111][CONNECT] Support getCatalogs for 
SparkConnectDatabaseMetaData
a88b15c98dd5 is described below

commit a88b15c98dd56ca3b44ed5edb7188448a0db319c
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Nov 3 14:34:56 2025 +0800

    [SPARK-54111][CONNECT] Support getCatalogs for SparkConnectDatabaseMetaData
    
    ### What changes were proposed in this pull request?
    
    Implement `getCatalogs` defined in `java.sql.DatabaseMetaData` for 
`SparkConnectDatabaseMetaData`.
    
    ```java
        /**
         * Retrieves the catalog names available in this database.  The results
         * are ordered by catalog name.
         *
         * <P>The catalog column is:
         *  <OL>
         *  <LI><B>TABLE_CAT</B> String {code =>} catalog name
         *  </OL>
         *
         * return a {code ResultSet} object in which each row has a
         *         single {code String} column that is a catalog name
         * throws SQLException if a database access error occurs
         */
        ResultSet getCatalogs() throws SQLException;
    ```
    ### Why are the changes needed?
    
    Enhance API coverage of the Connect JDBC driver, for example, 
`get[Catalogs|Schemas|Tables|...]` APIs are used by SQL GUI tools such as 
DBeaver for displaying the tree category.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, the Connect JDBC driver is a new feature under development.
    
    ### How was this patch tested?
    
    New UT is added.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #52815 from pan3793/SPARK-54111.
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
    (cherry picked from commit 2be1eb72e1ca27a9b882dec7dfea726c437821fa)
    Signed-off-by: yangjie01 <[email protected]>
---
 .../client/jdbc/SparkConnectDatabaseMetaData.scala | 12 +++++--
 .../jdbc/SparkConnectDatabaseMetaDataSuite.scala   | 40 ++++++++++++++++++++--
 2 files changed, 48 insertions(+), 4 deletions(-)

diff --git 
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaData.scala
 
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaData.scala
index a16cba5e3da4..215c8256acbc 100644
--- 
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaData.scala
+++ 
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaData.scala
@@ -25,6 +25,8 @@ import org.apache.spark.util.VersionUtils
 
 class SparkConnectDatabaseMetaData(conn: SparkConnectConnection) extends 
DatabaseMetaData {
 
+  import conn.spark.implicits._
+
   override def allProceduresAreCallable: Boolean = false
 
   override def allTablesAreSelectable: Boolean = false
@@ -288,8 +290,14 @@ class SparkConnectDatabaseMetaData(conn: 
SparkConnectConnection) extends Databas
       columnNamePattern: String): ResultSet =
     throw new SQLFeatureNotSupportedException
 
-  override def getCatalogs: ResultSet =
-    throw new SQLFeatureNotSupportedException
+  override def getCatalogs: ResultSet = {
+    conn.checkOpen()
+
+    val df = conn.spark.sql("SHOW CATALOGS")
+      .select($"catalog".as("TABLE_CAT"))
+      .orderBy("TABLE_CAT")
+    new SparkConnectResultSet(df.collectResult())
+  }
 
   override def getSchemas: ResultSet =
     throw new SQLFeatureNotSupportedException
diff --git 
a/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaDataSuite.scala
 
b/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaDataSuite.scala
index b2ecc163b2b8..42596b56f4c5 100644
--- 
a/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaDataSuite.scala
+++ 
b/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaDataSuite.scala
@@ -19,16 +19,27 @@ package org.apache.spark.sql.connect.client.jdbc
 
 import java.sql.{Array => _, _}
 
+import scala.util.Using
+
 import org.apache.spark.SparkBuildInfo.{spark_version => SPARK_VERSION}
+import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.connect.client.jdbc.test.JdbcHelper
-import org.apache.spark.sql.connect.test.{ConnectFunSuite, RemoteSparkSession}
+import org.apache.spark.sql.connect.test.{ConnectFunSuite, RemoteSparkSession, 
SQLHelper}
 import org.apache.spark.util.VersionUtils
 
 class SparkConnectDatabaseMetaDataSuite extends ConnectFunSuite with 
RemoteSparkSession
-    with JdbcHelper {
+    with JdbcHelper with SQLHelper {
 
   def jdbcUrl: String = s"jdbc:sc://localhost:$serverPort"
 
+  // catalyst test jar is inaccessible here, but presents at the testing 
connect server classpath
+  private val TEST_IN_MEMORY_CATALOG = 
"org.apache.spark.sql.connector.catalog.InMemoryCatalog"
+
+  private def registerCatalog(
+      name: String, className: String)(implicit spark: SparkSession): Unit = {
+    spark.conf.set(s"spark.sql.catalog.$name", className)
+  }
+
   test("SparkConnectDatabaseMetaData simple methods") {
     withConnection { conn =>
       val spark = conn.asInstanceOf[SparkConnectConnection].spark
@@ -199,4 +210,29 @@ class SparkConnectDatabaseMetaDataSuite extends 
ConnectFunSuite with RemoteSpark
       // scalastyle:on line.size.limit
     }
   }
+
+  test("SparkConnectDatabaseMetaData getCatalogs") {
+    withConnection { conn =>
+      implicit val spark: SparkSession = 
conn.asInstanceOf[SparkConnectConnection].spark
+
+      registerCatalog("testcat", TEST_IN_MEMORY_CATALOG)
+      registerCatalog("testcat2", TEST_IN_MEMORY_CATALOG)
+
+      // forcibly initialize the registered catalogs because SHOW CATALOGS only
+      // returns the initialized catalogs.
+      spark.sql("USE testcat")
+      spark.sql("USE testcat2")
+      spark.sql("USE spark_catalog")
+
+      val metadata = conn.getMetaData
+      Using.resource(metadata.getCatalogs) { rs =>
+        val catalogs = new Iterator[String] {
+          def hasNext: Boolean = rs.next()
+          def next(): String = rs.getString("TABLE_CAT")
+        }.toSeq
+        // results are ordered by TABLE_CAT
+        assert(catalogs === Seq("spark_catalog", "testcat", "testcat2"))
+      }
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to