This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new d26e32b  [SPARK-51967] Use `discardableResult` to prevent unnecessary 
warnings
d26e32b is described below

commit d26e32b93f9e0f2da157b6e310c241d43a341176
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Wed Apr 30 09:08:02 2025 -0700

    [SPARK-51967] Use `discardableResult` to prevent unnecessary warnings
    
    ### What changes were proposed in this pull request?
    
    This PR aims to use `discardableResult` to prevent unnecessary warnings.
    
    ### Why are the changes needed?
    
    To simplify the `Spark Connect Swift` API usages.
    
    For example, `count()` API is used to execute an INSERT query. By adding 
`discardableResult`, we can make it clear that the result is discadable.
    
    ```swift
    discardableResult
    public func count() async throws -> Int64 {
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    This is a change to the unreleased version.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #95 from dongjoon-hyun/SPARK-51967.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 Sources/SparkConnect/Catalog.swift                    |  4 ++--
 Sources/SparkConnect/DataFrame.swift                  |  3 +++
 Sources/SparkConnect/DataFrameReader.swift            |  3 ++-
 Sources/SparkConnect/DataFrameWriter.swift            |  2 +-
 Sources/SparkConnect/DataFrameWriterV2.swift          |  2 +-
 Sources/SparkConnect/RuntimeConf.swift                |  4 ++--
 Sources/SparkConnect/SparkConnectClient.swift         | 12 ++++++++++--
 Sources/SparkConnect/SparkFileUtils.swift             |  2 +-
 Sources/SparkConnect/SparkSession.swift               |  1 +
 Tests/SparkConnectTests/BuilderTests.swift            |  2 +-
 Tests/SparkConnectTests/CatalogTests.swift            |  2 +-
 Tests/SparkConnectTests/DataFrameReaderTests.swift    |  6 +++---
 Tests/SparkConnectTests/DataFrameTests.swift          | 14 +++++++-------
 Tests/SparkConnectTests/RuntimeConfTests.swift        |  8 ++++----
 Tests/SparkConnectTests/SQLHelper.swift               |  4 ++--
 Tests/SparkConnectTests/SparkConnectClientTests.swift | 10 +++++-----
 Tests/SparkConnectTests/SparkSessionTests.swift       |  2 +-
 17 files changed, 47 insertions(+), 34 deletions(-)

diff --git a/Sources/SparkConnect/Catalog.swift 
b/Sources/SparkConnect/Catalog.swift
index f14bff5..c47fac4 100644
--- a/Sources/SparkConnect/Catalog.swift
+++ b/Sources/SparkConnect/Catalog.swift
@@ -114,7 +114,7 @@ public actor Catalog: Sendable {
       catalog.catType = .setCurrentCatalog(setCurrentCatalog)
       return catalog
     })
-    _ = try await df.count()
+    try await df.count()
   }
 
   /// Returns a list of catalogs in this session.
@@ -156,7 +156,7 @@ public actor Catalog: Sendable {
       catalog.catType = .setCurrentDatabase(setCurrentDatabase)
       return catalog
     })
-    _ = try await df.count()
+    try await df.count()
   }
 
   /// Returns a list of databases available across all sessions.
diff --git a/Sources/SparkConnect/DataFrame.swift 
b/Sources/SparkConnect/DataFrame.swift
index 8e96a38..12c855c 100644
--- a/Sources/SparkConnect/DataFrame.swift
+++ b/Sources/SparkConnect/DataFrame.swift
@@ -136,6 +136,7 @@ public actor DataFrame: Sendable {
 
   /// Return the total number of rows.
   /// - Returns: a `Int64` value.
+  @discardableResult
   public func count() async throws -> Int64 {
     let counter = Atomic(Int64(0))
 
@@ -440,6 +441,7 @@ public actor DataFrame: Sendable {
 
   /// Persist this `DataFrame` with the given storage level.
   /// - Parameter storageLevel: A storage level to apply.
+  @discardableResult
   public func persist(storageLevel: StorageLevel = 
StorageLevel.MEMORY_AND_DISK) async throws
     -> DataFrame
   {
@@ -456,6 +458,7 @@ public actor DataFrame: Sendable {
   /// This will not un-persist any cached data that is built upon this 
`DataFrame`.
   /// - Parameter blocking: Whether to block until all blocks are deleted.
   /// - Returns: A `DataFrame`
+  @discardableResult
   public func unpersist(blocking: Bool = false) async throws -> DataFrame {
     try await withGPRC { client in
       let service = Spark_Connect_SparkConnectService.Client(wrapping: client)
diff --git a/Sources/SparkConnect/DataFrameReader.swift 
b/Sources/SparkConnect/DataFrameReader.swift
index 4010fb2..859b854 100644
--- a/Sources/SparkConnect/DataFrameReader.swift
+++ b/Sources/SparkConnect/DataFrameReader.swift
@@ -85,10 +85,11 @@ public actor DataFrameReader: Sendable {
   /// the schema inference step, and thus speed up data loading.
   /// - Parameter schema: A DDL schema string.
   /// - Returns: A `DataFrameReader`.
+  @discardableResult
   public func schema(_ schema: String) async throws -> DataFrameReader {
     // Validate by parsing.
     do {
-      _ = try await sparkSession.client.ddlParse(schema)
+      try await sparkSession.client.ddlParse(schema)
     } catch {
       throw SparkConnectError.InvalidTypeException
     }
diff --git a/Sources/SparkConnect/DataFrameWriter.swift 
b/Sources/SparkConnect/DataFrameWriter.swift
index bcab5a6..c9d8425 100644
--- a/Sources/SparkConnect/DataFrameWriter.swift
+++ b/Sources/SparkConnect/DataFrameWriter.swift
@@ -177,7 +177,7 @@ public actor DataFrameWriter: Sendable {
     var command = Spark_Connect_Command()
     command.writeOperation = write
 
-    _ = try await df.spark.client.execute(df.spark.sessionID, command)
+    try await df.spark.client.execute(df.spark.sessionID, command)
   }
 
   /// Saves the content of the `DataFrame` in CSV format at the specified path.
diff --git a/Sources/SparkConnect/DataFrameWriterV2.swift 
b/Sources/SparkConnect/DataFrameWriterV2.swift
index 3e95892..b2ad861 100644
--- a/Sources/SparkConnect/DataFrameWriterV2.swift
+++ b/Sources/SparkConnect/DataFrameWriterV2.swift
@@ -147,6 +147,6 @@ public actor DataFrameWriterV2: Sendable {
 
     var command = Spark_Connect_Command()
     command.writeOperationV2 = write
-    _ = try await df.spark.client.execute(df.spark.sessionID, command)
+    try await df.spark.client.execute(df.spark.sessionID, command)
   }
 }
diff --git a/Sources/SparkConnect/RuntimeConf.swift 
b/Sources/SparkConnect/RuntimeConf.swift
index 62dfa3d..b0ff45c 100644
--- a/Sources/SparkConnect/RuntimeConf.swift
+++ b/Sources/SparkConnect/RuntimeConf.swift
@@ -32,14 +32,14 @@ public actor RuntimeConf {
   ///   - key: A string for the configuration key.
   ///   - value: A string for the configuration value.
   public func set(_ key: String, _ value: String) async throws {
-    _ = try await client.setConf(map: [key: value])
+    try await client.setConf(map: [key: value])
   }
 
   /// Reset a configuration.
   /// - Parameters:
   ///   - key: A string for the configuration key.
   public func unset(_ key: String) async throws {
-    _ = try await client.unsetConf(keys: [key])
+    try await client.unsetConf(keys: [key])
   }
 
   /// Get a configuration.
diff --git a/Sources/SparkConnect/SparkConnectClient.swift 
b/Sources/SparkConnect/SparkConnectClient.swift
index 00663b0..57eaffd 100644
--- a/Sources/SparkConnect/SparkConnectClient.swift
+++ b/Sources/SparkConnect/SparkConnectClient.swift
@@ -87,6 +87,7 @@ public actor SparkConnectClient {
   /// As a test connection, this sends the server `SparkVersion` request.
   /// - Parameter sessionID: A string for the session ID.
   /// - Returns: An `AnalyzePlanResponse` instance for `SparkVersion`
+  @discardableResult
   func connect(_ sessionID: String) async throws -> AnalyzePlanResponse {
     try await withGPRC { client in
       // To prevent server-side `INVALID_HANDLE.FORMAT (SQLSTATE: HY000)` 
exception.
@@ -137,6 +138,7 @@ public actor SparkConnectClient {
   /// Request the server to set a map of configurations for this session.
   /// - Parameter map: A map of key-value pairs to set.
   /// - Returns: Always return true.
+  @discardableResult
   func setConf(map: [String: String]) async throws -> Bool {
     try await withGPRC { client in
       let service = SparkConnectService.Client(wrapping: client)
@@ -144,7 +146,7 @@ public actor SparkConnectClient {
       request.clientType = clientType
       request.userContext = userContext
       request.sessionID = self.sessionID!
-      let _ = try await service.config(request)
+      _ = try await service.config(request)
       return true
     }
   }
@@ -160,7 +162,11 @@ public actor SparkConnectClient {
     request.operation.opType = .unset(unset)
     return request
   }
-
+  
+  /// Request the server to unset keys
+  /// - Parameter keys: An array of keys
+  /// - Returns: Always return true
+  @discardableResult
   func unsetConf(keys: [String]) async throws -> Bool {
     try await withGPRC { client in
       let service = SparkConnectService.Client(wrapping: client)
@@ -509,6 +515,7 @@ public actor SparkConnectClient {
     self.result.append(response)
   }
 
+  @discardableResult
   func execute(_ sessionID: String, _ command: Command) async throws -> 
[ExecutePlanResponse] {
     self.result.removeAll()
     try await withGPRC { client in
@@ -555,6 +562,7 @@ public actor SparkConnectClient {
   /// Parse a DDL string to ``Spark_Connect_DataType`` instance.
   /// - Parameter ddlString: A string to parse.
   /// - Returns: A ``Spark_Connect_DataType`` instance.
+  @discardableResult
   func ddlParse(_ ddlString: String) async throws -> Spark_Connect_DataType {
     try await withGPRC { client in
       let service = SparkConnectService.Client(wrapping: client)
diff --git a/Sources/SparkConnect/SparkFileUtils.swift 
b/Sources/SparkConnect/SparkFileUtils.swift
index 5cfd504..c91ee0c 100644
--- a/Sources/SparkConnect/SparkFileUtils.swift
+++ b/Sources/SparkConnect/SparkFileUtils.swift
@@ -85,7 +85,7 @@ public enum SparkFileUtils {
   static func createDirectory(root: String, namePrefix: String = "spark") -> 
URL {
     let tempDir = URL(fileURLWithPath: root).appendingPathComponent(
       "\(namePrefix)-\(UUID().uuidString)")
-    _ = createDirectory(at: tempDir)
+    createDirectory(at: tempDir)
     return tempDir
   }
 
diff --git a/Sources/SparkConnect/SparkSession.swift 
b/Sources/SparkConnect/SparkSession.swift
index 478df5c..b06370e 100644
--- a/Sources/SparkConnect/SparkSession.swift
+++ b/Sources/SparkConnect/SparkSession.swift
@@ -201,6 +201,7 @@ public actor SparkSession {
 
     /// Remove all stored configurations.
     /// - Returns: self
+    @discardableResult
     func clear() -> Builder {
       sparkConf.removeAll()
       return self
diff --git a/Tests/SparkConnectTests/BuilderTests.swift 
b/Tests/SparkConnectTests/BuilderTests.swift
index de2b31d..45b44f4 100644
--- a/Tests/SparkConnectTests/BuilderTests.swift
+++ b/Tests/SparkConnectTests/BuilderTests.swift
@@ -42,7 +42,7 @@ struct BuilderTests {
     // Don't try to connect
     let builder = await SparkSession.builder.remote("sc://spark:1234")
     #expect(await builder.sparkConf["spark.remote"] == "sc://spark:1234")
-    _ = await builder.clear()
+    await builder.clear()
   }
 
   @Test
diff --git a/Tests/SparkConnectTests/CatalogTests.swift 
b/Tests/SparkConnectTests/CatalogTests.swift
index 3b8feca..44562d5 100644
--- a/Tests/SparkConnectTests/CatalogTests.swift
+++ b/Tests/SparkConnectTests/CatalogTests.swift
@@ -104,7 +104,7 @@ struct CatalogTests {
     let dbName = "DB_" + UUID().uuidString.replacingOccurrences(of: "-", with: 
"")
     #expect(try await spark.catalog.databaseExists(dbName) == false)
     try await SQLHelper.withDatabase(spark, dbName) ({
-      _ = try await spark.sql("CREATE DATABASE \(dbName)").count()
+      try await spark.sql("CREATE DATABASE \(dbName)").count()
       #expect(try await spark.catalog.databaseExists(dbName))
     })
     #expect(try await spark.catalog.databaseExists(dbName) == false)
diff --git a/Tests/SparkConnectTests/DataFrameReaderTests.swift 
b/Tests/SparkConnectTests/DataFrameReaderTests.swift
index 1f71d0a..b4c7599 100644
--- a/Tests/SparkConnectTests/DataFrameReaderTests.swift
+++ b/Tests/SparkConnectTests/DataFrameReaderTests.swift
@@ -82,7 +82,7 @@ struct DataFrameReaderTests {
     let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", 
with: "")
     let spark = try await SparkSession.builder.getOrCreate()
     try await SQLHelper.withTable(spark, tableName)({
-      _ = try await spark.sql("CREATE TABLE \(tableName) USING ORC AS VALUES 
(1), (2)").count()
+      try await spark.sql("CREATE TABLE \(tableName) USING ORC AS VALUES (1), 
(2)").count()
       #expect(try await spark.read.table(tableName).count() == 2)
     })
     await spark.stop()
@@ -103,10 +103,10 @@ struct DataFrameReaderTests {
   func invalidSchema() async throws {
     let spark = try await SparkSession.builder.getOrCreate()
     await #expect(throws: SparkConnectError.InvalidTypeException) {
-      _ = try await spark.read.schema("invalid-name SHORT")
+      try await spark.read.schema("invalid-name SHORT")
     }
     await #expect(throws: SparkConnectError.InvalidTypeException) {
-      _ = try await spark.read.schema("age UNKNOWN_TYPE")
+      try await spark.read.schema("age UNKNOWN_TYPE")
     }
     await spark.stop()
   }
diff --git a/Tests/SparkConnectTests/DataFrameTests.swift 
b/Tests/SparkConnectTests/DataFrameTests.swift
index 327b009..bf320e5 100644
--- a/Tests/SparkConnectTests/DataFrameTests.swift
+++ b/Tests/SparkConnectTests/DataFrameTests.swift
@@ -206,10 +206,10 @@ struct DataFrameTests {
   func selectInvalidColumn() async throws {
     let spark = try await SparkSession.builder.getOrCreate()
     try await #require(throws: Error.self) {
-      let _ = try await spark.range(1).select("invalid").schema
+      try await spark.range(1).select("invalid").schema
     }
     try await #require(throws: Error.self) {
-      let _ = try await spark.range(1).select("id + 1").schema
+      try await spark.range(1).select("id + 1").schema
     }
     await spark.stop()
   }
@@ -447,7 +447,7 @@ struct DataFrameTests {
     try await #require(throws: Error.self) {
       var invalidLevel = StorageLevel.DISK_ONLY
       invalidLevel.replication = 0
-      let _ = try await spark.range(9999).persist(storageLevel: 
invalidLevel).count()
+      try await spark.range(9999).persist(storageLevel: invalidLevel).count()
     }
     await spark.stop()
   }
@@ -707,14 +707,14 @@ struct DataFrameTests {
     let spark = try await SparkSession.builder.getOrCreate()
     let df = try await spark.range(1)
 
-    _ = try await df.unpersist()
+    try await df.unpersist()
     #expect(try await df.storageLevel == StorageLevel.NONE)
-    _ = try await df.persist()
+    try await df.persist()
     #expect(try await df.storageLevel == StorageLevel.MEMORY_AND_DISK)
 
-    _ = try await df.unpersist()
+    try await df.unpersist()
     #expect(try await df.storageLevel == StorageLevel.NONE)
-    _ = try await df.persist(storageLevel: StorageLevel.MEMORY_ONLY)
+    try await df.persist(storageLevel: StorageLevel.MEMORY_ONLY)
     #expect(try await df.storageLevel == StorageLevel.MEMORY_ONLY)
 
     await spark.stop()
diff --git a/Tests/SparkConnectTests/RuntimeConfTests.swift 
b/Tests/SparkConnectTests/RuntimeConfTests.swift
index cedab9d..127dd3c 100644
--- a/Tests/SparkConnectTests/RuntimeConfTests.swift
+++ b/Tests/SparkConnectTests/RuntimeConfTests.swift
@@ -30,7 +30,7 @@ struct RuntimeConfTests {
   @Test
   func get() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
 
     #expect(try await !conf.get("spark.app.name").isEmpty)
@@ -45,7 +45,7 @@ struct RuntimeConfTests {
   @Test
   func set() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
     try await conf.set("spark.test.key1", "value1")
     #expect(try await conf.get("spark.test.key1") == "value1")
@@ -55,7 +55,7 @@ struct RuntimeConfTests {
   @Test
   func reset() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
 
     // Success with a key that doesn't exist
@@ -76,7 +76,7 @@ struct RuntimeConfTests {
   @Test
   func getAll() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
     let map = try await conf.getAll()
     #expect(map.count > 0)
diff --git a/Tests/SparkConnectTests/SQLHelper.swift 
b/Tests/SparkConnectTests/SQLHelper.swift
index c552119..162ad67 100644
--- a/Tests/SparkConnectTests/SQLHelper.swift
+++ b/Tests/SparkConnectTests/SQLHelper.swift
@@ -32,7 +32,7 @@ struct SQLHelper {
         f,
         {
           for name in dbNames {
-            _ = try await spark.sql("DROP DATABASE IF EXISTS \(name) 
CASCADE").count()
+            try await spark.sql("DROP DATABASE IF EXISTS \(name) 
CASCADE").count()
           }
         })
     }
@@ -47,7 +47,7 @@ struct SQLHelper {
         f,
         {
           for name in tableNames {
-            _ = try await spark.sql("DROP TABLE IF EXISTS \(name)").count()
+            try await spark.sql("DROP TABLE IF EXISTS \(name)").count()
           }
         })
     }
diff --git a/Tests/SparkConnectTests/SparkConnectClientTests.swift 
b/Tests/SparkConnectTests/SparkConnectClientTests.swift
index fe972d0..d519c9b 100644
--- a/Tests/SparkConnectTests/SparkConnectClientTests.swift
+++ b/Tests/SparkConnectTests/SparkConnectClientTests.swift
@@ -48,7 +48,7 @@ struct SparkConnectClientTests {
   func connectWithInvalidUUID() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
     try await #require(throws: SparkConnectError.InvalidSessionIDException) {
-      let _ = try await client.connect("not-a-uuid-format")
+      try await client.connect("not-a-uuid-format")
     }
     await client.stop()
   }
@@ -56,14 +56,14 @@ struct SparkConnectClientTests {
   @Test
   func connect() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    let _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     await client.stop()
   }
 
   @Test
   func tags() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    let _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     let plan = await client.getPlanRange(0, 1, 1)
 
     #expect(await client.getExecutePlanRequest(plan).tags.isEmpty)
@@ -79,7 +79,7 @@ struct SparkConnectClientTests {
   @Test
   func ddlParse() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    let _ = try await client.connect(UUID().uuidString)
+    try await client.connect(UUID().uuidString)
     #expect(try await client.ddlParse("a int").simpleString == "struct<a:int>")
     await client.stop()
   }
@@ -91,7 +91,7 @@ struct SparkConnectClientTests {
     let response = try await client.connect(UUID().uuidString)
     if response.sparkVersion.version.starts(with: "4.") {
       let json =
-      
#"{"type":"struct","fields":[{"name":"id","type":"long","nullable":false,"metadata":{}}]}"#
+        
#"{"type":"struct","fields":[{"name":"id","type":"long","nullable":false,"metadata":{}}]}"#
       #expect(try await client.jsonToDdl(json) == "id BIGINT NOT NULL")
     }
     await client.stop()
diff --git a/Tests/SparkConnectTests/SparkSessionTests.swift 
b/Tests/SparkConnectTests/SparkSessionTests.swift
index f730d9c..dd0c03a 100644
--- a/Tests/SparkConnectTests/SparkSessionTests.swift
+++ b/Tests/SparkConnectTests/SparkSessionTests.swift
@@ -81,7 +81,7 @@ struct SparkSessionTests {
     let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", 
with: "")
     let spark = try await SparkSession.builder.getOrCreate()
     try await SQLHelper.withTable(spark, tableName)({
-      _ = try await spark.sql("CREATE TABLE \(tableName) USING ORC AS VALUES 
(1), (2)").count()
+      try await spark.sql("CREATE TABLE \(tableName) USING ORC AS VALUES (1), 
(2)").count()
       #expect(try await spark.table(tableName).count() == 2)
     })
     await spark.stop()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to