This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new 6afbef9  [SPARK-54893] Support `listTables` and `getTable` in `Catalog`
6afbef9 is described below

commit 6afbef9bbbc89381b3b940514c7e07fd61130fb9
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Mon Jan 5 05:47:40 2026 +0900

    [SPARK-54893] Support `listTables` and `getTable` in `Catalog`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to support `listTables` and `getTable` in `Catalog`.
    
    ### Why are the changes needed?
    
    For feature parity.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #273 from dongjoon-hyun/SPARK-54893.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 Sources/SparkConnect/Catalog.swift         | 50 ++++++++++++++++++++++++++++++
 Sources/SparkConnect/DataFrame.swift       |  2 ++
 Tests/SparkConnectTests/CatalogTests.swift | 50 ++++++++++++++++++++++++++++++
 3 files changed, 102 insertions(+)

diff --git a/Sources/SparkConnect/Catalog.swift 
b/Sources/SparkConnect/Catalog.swift
index c1b23d4..57e97a9 100644
--- a/Sources/SparkConnect/Catalog.swift
+++ b/Sources/SparkConnect/Catalog.swift
@@ -209,6 +209,34 @@ public actor Catalog: Sendable {
     return try await df.collect().first![0] as! Bool
   }
 
+  /// Returns a list of tables in the given database (or the current database).
+  /// - Parameter pattern: The pattern that the database name needs to match.
+  /// - Returns: A list of ``SparkTable``.
+  public func listTables(dbName: String? = nil, pattern: String? = nil) async 
throws -> [SparkTable]
+  {
+    let df = getDataFrame({
+      var listTables = Spark_Connect_ListTables()
+      if let dbName {
+        listTables.dbName = dbName
+      }
+      if let pattern {
+        listTables.pattern = pattern
+      }
+      var catalog = Spark_Connect_Catalog()
+      catalog.catType = .listTables(listTables)
+      return catalog
+    })
+    return try await df.collect().map {
+      try SparkTable(
+        name: $0[0] as! String,
+        catalog: $0[1] as? String,
+        namespace: $0[2] as? [String],
+        description: $0[3] as? String,
+        tableType: $0[4] as! String,
+        isTemporary: $0[5] as! Bool)
+    }
+  }
+
   /// Creates a table from the given path and returns the corresponding 
``DataFrame``.
   /// - Parameters:
   ///   - tableName: A qualified or unqualified name that designates a table. 
If no database
@@ -247,6 +275,28 @@ public actor Catalog: Sendable {
     return df
   }
 
+  /// Get the table with the specified name.
+  /// - Parameter tableName: name of the table to get.
+  /// - Returns: The table found by the name.
+  public func getTable(_ tableName: String) async throws -> SparkTable {
+    let df = getDataFrame({
+      var table = Spark_Connect_GetTable()
+      table.tableName = tableName
+      var catalog = Spark_Connect_Catalog()
+      catalog.catType = .getTable(table)
+      return catalog
+    })
+    return try await df.collect().map {
+      try SparkTable(
+        name: $0[0] as! String,
+        catalog: $0[1] as? String,
+        namespace: $0[2] as? [String],
+        description: $0[3] as? String,
+        tableType: $0[4] as! String,
+        isTemporary: $0[5] as! Bool)
+    }.first!
+  }
+
   /// Check if the table or view with the specified name exists. This can 
either be a temporary
   /// view or a table/view.
   /// - Parameter tableName: a qualified or unqualified name that designates a 
table/view. It follows the same
diff --git a/Sources/SparkConnect/DataFrame.swift 
b/Sources/SparkConnect/DataFrame.swift
index 6ef2740..2483573 100644
--- a/Sources/SparkConnect/DataFrame.swift
+++ b/Sources/SparkConnect/DataFrame.swift
@@ -448,6 +448,8 @@ public actor DataFrame: Sendable {
               values.append((array as! AsString).asString(i).utf8)
             case .complexInfo(.strct):
               values.append((array as! AsString).asString(i))
+            case .complexInfo(.list):
+              values.append(array.asAny(i) as? [String])
             default:
               values.append(array.asAny(i) as? String)
             }
diff --git a/Tests/SparkConnectTests/CatalogTests.swift 
b/Tests/SparkConnectTests/CatalogTests.swift
index c631671..17f0b62 100644
--- a/Tests/SparkConnectTests/CatalogTests.swift
+++ b/Tests/SparkConnectTests/CatalogTests.swift
@@ -120,6 +120,56 @@ struct CatalogTests {
     await spark.stop()
   }
 
+  @Test
+  func listTables() async throws {
+    let spark = try await SparkSession.builder.getOrCreate()
+    #expect(try await spark.catalog.listTables().count == 0)
+
+    let tableName = ("TABLE_" + UUID().uuidString.replacingOccurrences(of: 
"-", with: ""))
+      .lowercased()
+    try await SQLHelper.withTable(spark, tableName)({
+      try await spark.range(1).write.orc("/tmp/\(tableName)")
+      #expect(
+        try await spark.catalog.createTable(tableName, "/tmp/\(tableName)", 
source: "orc").count()
+          == 1)
+
+      let tables = try await spark.catalog.listTables()
+      #expect(tables.count == 1)
+      #expect(tables[0].name == tableName)
+      #expect(tables[0].catalog == "spark_catalog")
+      #expect(tables[0].namespace == ["default"])
+      #expect(tables[0].description == nil)
+      #expect(tables[0].tableType == "EXTERNAL")
+      #expect(tables[0].isTemporary == false)
+      #expect(try await spark.catalog.listTables(pattern: "*") == tables)
+      #expect(try await spark.catalog.listTables(pattern: "non_exist").count 
== 0)
+    })
+    await spark.stop()
+  }
+
+  @Test
+  func getTable() async throws {
+    let spark = try await SparkSession.builder.getOrCreate()
+
+    let tableName = ("TABLE_" + UUID().uuidString.replacingOccurrences(of: 
"-", with: ""))
+      .lowercased()
+    try await SQLHelper.withTable(spark, tableName)({
+      try await spark.range(1).write.orc("/tmp/\(tableName)")
+      #expect(
+        try await spark.catalog.createTable(tableName, "/tmp/\(tableName)", 
source: "orc").count()
+          == 1)
+
+      let table = try await spark.catalog.getTable(tableName)
+      #expect(table.name == tableName)
+      #expect(table.catalog == "spark_catalog")
+      #expect(table.namespace == ["default"])
+      #expect(table.description == nil)
+      #expect(table.tableType == "EXTERNAL")
+      #expect(table.isTemporary == false)
+    })
+    await spark.stop()
+  }
+
   @Test
   func createTable() async throws {
     let spark = try await SparkSession.builder.getOrCreate()


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to