This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new f715923  [SPARK-52318] Refactor `SparkConnectError` to simplify case 
names
f715923 is described below

commit f7159234b328513fa4645919377f0c0144f6ac76
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Mon May 26 19:21:13 2025 -0700

    [SPARK-52318] Refactor `SparkConnectError` to simplify case names
    
    ### What changes were proposed in this pull request?
    
    This PR aims to refactor `SparkConnectError` to simplify case names.
    
    ### Why are the changes needed?
    
    We had better simplify case names by removing `Exception` postfix before 
adding more errors like `CatalogNotFound` and `SchemaNotFound`. In `Swift`, we 
use like the following with `SparkConnectError.` prefix.
    
    ```swift
    - throw SparkConnectError.UnsupportedOperationException
    + throw SparkConnectError.UnsupportedOperation
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, but it's the exception names for 0.x release.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #177 from dongjoon-hyun/SPARK-52318.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 Sources/SparkConnect/DataFrame.swift                     |  2 +-
 Sources/SparkConnect/Extension.swift                     | 10 +++++-----
 Sources/SparkConnect/MergeIntoWriter.swift               |  2 +-
 Sources/SparkConnect/ProtoUtils.swift                    |  4 ++--
 Sources/SparkConnect/Row.swift                           |  2 +-
 Sources/SparkConnect/SparkConnectClient.swift            |  4 ++--
 Sources/SparkConnect/SparkConnectError.swift             |  8 ++++----
 Sources/SparkConnect/SparkFileUtils.swift                |  2 +-
 Sources/SparkConnect/SparkSession.swift                  |  2 +-
 Sources/SparkConnect/StreamingQueryManager.swift         |  4 ++--
 Tests/SparkConnectTests/DataFrameReaderTests.swift       |  4 ++--
 Tests/SparkConnectTests/DataFrameTests.swift             |  2 +-
 Tests/SparkConnectTests/RowTests.swift                   |  4 ++--
 Tests/SparkConnectTests/SparkConnectClientTests.swift    |  2 +-
 Tests/SparkConnectTests/SparkSessionTests.swift          |  6 +++---
 Tests/SparkConnectTests/StreamingQueryManagerTests.swift |  6 +++---
 16 files changed, 32 insertions(+), 32 deletions(-)

diff --git a/Sources/SparkConnect/DataFrame.swift 
b/Sources/SparkConnect/DataFrame.swift
index e52226a..393168e 100644
--- a/Sources/SparkConnect/DataFrame.swift
+++ b/Sources/SparkConnect/DataFrame.swift
@@ -258,7 +258,7 @@ public actor DataFrame: Sendable {
     // SQLSTATE: 0A000
     // [UNSUPPORTED_CONNECT_FEATURE.RDD]
     // Feature is not supported in Spark Connect: Resilient Distributed 
Datasets (RDDs).
-    throw SparkConnectError.UnsupportedOperationException
+    throw SparkConnectError.UnsupportedOperation
   }
 
   /// Return an array of column name strings
diff --git a/Sources/SparkConnect/Extension.swift 
b/Sources/SparkConnect/Extension.swift
index 6586cd7..f7d869e 100644
--- a/Sources/SparkConnect/Extension.swift
+++ b/Sources/SparkConnect/Extension.swift
@@ -213,7 +213,7 @@ extension YearMonthInterval {
     case 0: "year"
     case 1: "month"
     default:
-      throw SparkConnectError.InvalidTypeException
+      throw SparkConnectError.InvalidType
     }
   }
 
@@ -225,7 +225,7 @@ extension YearMonthInterval {
     } else if startFieldName < endFieldName {
       "interval \(startFieldName) to \(endFieldName)"
     } else {
-      throw SparkConnectError.InvalidTypeException
+      throw SparkConnectError.InvalidType
     }
     return interval
   }
@@ -239,7 +239,7 @@ extension DayTimeInterval {
     case 2: "minute"
     case 3: "second"
     default:
-      throw SparkConnectError.InvalidTypeException
+      throw SparkConnectError.InvalidType
     }
   }
 
@@ -251,7 +251,7 @@ extension DayTimeInterval {
     } else if startFieldName < endFieldName {
       "interval \(startFieldName) to \(endFieldName)"
     } else {
-      throw SparkConnectError.InvalidTypeException
+      throw SparkConnectError.InvalidType
     }
     return interval
   }
@@ -325,7 +325,7 @@ extension DataType {
       case .unparsed:
         self.unparsed.dataTypeString
       default:
-        throw SparkConnectError.InvalidTypeException
+        throw SparkConnectError.InvalidType
       }
     }
   }
diff --git a/Sources/SparkConnect/MergeIntoWriter.swift 
b/Sources/SparkConnect/MergeIntoWriter.swift
index de15892..10f09f3 100644
--- a/Sources/SparkConnect/MergeIntoWriter.swift
+++ b/Sources/SparkConnect/MergeIntoWriter.swift
@@ -187,7 +187,7 @@ public actor MergeIntoWriter {
       && self.mergeIntoTableCommand.notMatchedActions.count == 0
       && self.mergeIntoTableCommand.notMatchedBySourceActions.count == 0
     {
-      throw SparkConnectError.InvalidArgumentException
+      throw SparkConnectError.InvalidArgument
     }
     self.mergeIntoTableCommand.sourceTablePlan = await (self.df.getPlan() as! 
Plan).root
     self.mergeIntoTableCommand.withSchemaEvolution = self.schemaEvolution
diff --git a/Sources/SparkConnect/ProtoUtils.swift 
b/Sources/SparkConnect/ProtoUtils.swift
index 738213f..f0f137a 100644
--- a/Sources/SparkConnect/ProtoUtils.swift
+++ b/Sources/SparkConnect/ProtoUtils.swift
@@ -30,10 +30,10 @@ public enum ProtoUtils {
     // because the Spark Connect job tag is also used as part of SparkContext 
job tag.
     // See SparkContext.throwIfInvalidTag and ExecuteHolderSessionTag
     if tag.isEmpty {
-      throw SparkConnectError.InvalidArgumentException
+      throw SparkConnectError.InvalidArgument
     }
     if tag.contains(SPARK_JOB_TAGS_SEP) {
-      throw SparkConnectError.InvalidArgumentException
+      throw SparkConnectError.InvalidArgument
     }
   }
 }
diff --git a/Sources/SparkConnect/Row.swift b/Sources/SparkConnect/Row.swift
index 79ade14..249ccae 100644
--- a/Sources/SparkConnect/Row.swift
+++ b/Sources/SparkConnect/Row.swift
@@ -45,7 +45,7 @@ public struct Row: Sendable, Equatable {
 
   public func get(_ i: Int) throws -> Sendable {
     if i < 0 || i >= self.length {
-      throw SparkConnectError.InvalidArgumentException
+      throw SparkConnectError.InvalidArgument
     }
     return values[i]
   }
diff --git a/Sources/SparkConnect/SparkConnectClient.swift 
b/Sources/SparkConnect/SparkConnectClient.swift
index f8a0073..93889ce 100644
--- a/Sources/SparkConnect/SparkConnectClient.swift
+++ b/Sources/SparkConnect/SparkConnectClient.swift
@@ -104,7 +104,7 @@ public actor SparkConnectClient {
     try await withGPRC { client in
       // To prevent server-side `INVALID_HANDLE.FORMAT (SQLSTATE: HY000)` 
exception.
       if UUID(uuidString: sessionID) == nil {
-        throw SparkConnectError.InvalidSessionIDException
+        throw SparkConnectError.InvalidSessionID
       }
 
       self.sessionID = sessionID
@@ -787,7 +787,7 @@ public actor SparkConnectClient {
       } catch let error as RPCError where error.code == .internalError {
         switch error.message {
         case let m where m.contains("UNSUPPORTED_DATATYPE") || 
m.contains("INVALID_IDENTIFIER"):
-          throw SparkConnectError.InvalidTypeException
+          throw SparkConnectError.InvalidType
         default:
           throw error
         }
diff --git a/Sources/SparkConnect/SparkConnectError.swift 
b/Sources/SparkConnect/SparkConnectError.swift
index 4434b6d..cff40d7 100644
--- a/Sources/SparkConnect/SparkConnectError.swift
+++ b/Sources/SparkConnect/SparkConnectError.swift
@@ -19,8 +19,8 @@
 
 /// A enum for ``SparkConnect`` package errors
 public enum SparkConnectError: Error {
-  case UnsupportedOperationException
-  case InvalidArgumentException
-  case InvalidSessionIDException
-  case InvalidTypeException
+  case InvalidArgument
+  case InvalidSessionID
+  case InvalidType
+  case UnsupportedOperation
 }
diff --git a/Sources/SparkConnect/SparkFileUtils.swift 
b/Sources/SparkConnect/SparkFileUtils.swift
index 1c7fa44..e5fe282 100644
--- a/Sources/SparkConnect/SparkFileUtils.swift
+++ b/Sources/SparkConnect/SparkFileUtils.swift
@@ -106,7 +106,7 @@ public enum SparkFileUtils {
     if fileManager.fileExists(atPath: url.path) {
       try fileManager.removeItem(at: url)
     } else {
-      throw SparkConnectError.InvalidArgumentException
+      throw SparkConnectError.InvalidArgument
     }
   }
 }
diff --git a/Sources/SparkConnect/SparkSession.swift 
b/Sources/SparkConnect/SparkSession.swift
index ed25f5e..ed5bd38 100644
--- a/Sources/SparkConnect/SparkSession.swift
+++ b/Sources/SparkConnect/SparkSession.swift
@@ -83,7 +83,7 @@ public actor SparkSession {
       // SQLSTATE: 0A000
       // [UNSUPPORTED_CONNECT_FEATURE.SESSION_SPARK_CONTEXT]
       // Feature is not supported in Spark Connect: Access to the SparkContext.
-      throw SparkConnectError.UnsupportedOperationException
+      throw SparkConnectError.UnsupportedOperation
     }
   }
 
diff --git a/Sources/SparkConnect/StreamingQueryManager.swift 
b/Sources/SparkConnect/StreamingQueryManager.swift
index b670aa3..37178bc 100644
--- a/Sources/SparkConnect/StreamingQueryManager.swift
+++ b/Sources/SparkConnect/StreamingQueryManager.swift
@@ -135,7 +135,7 @@ public actor StreamingQueryManager {
     let response = try await 
self.sparkSession.client.executeStreamingQueryManagerCommand(command)
     let query = response.first!.streamingQueryManagerCommandResult.query
     guard query.hasID else {
-      throw SparkConnectError.InvalidArgumentException
+      throw SparkConnectError.InvalidArgument
     }
     return StreamingQuery(
       UUID(uuidString: query.id.id)!,
@@ -154,7 +154,7 @@ public actor StreamingQueryManager {
     var awaitAnyTerminationCommand = 
StreamingQueryManagerCommand.AwaitAnyTerminationCommand()
     if let timeoutMs {
       guard timeoutMs > 0 else {
-        throw SparkConnectError.InvalidArgumentException
+        throw SparkConnectError.InvalidArgument
       }
       awaitAnyTerminationCommand.timeoutMs = timeoutMs
     }
diff --git a/Tests/SparkConnectTests/DataFrameReaderTests.swift 
b/Tests/SparkConnectTests/DataFrameReaderTests.swift
index 3c63116..0dfd04b 100644
--- a/Tests/SparkConnectTests/DataFrameReaderTests.swift
+++ b/Tests/SparkConnectTests/DataFrameReaderTests.swift
@@ -103,10 +103,10 @@ struct DataFrameReaderTests {
   @Test
   func invalidSchema() async throws {
     let spark = try await SparkSession.builder.getOrCreate()
-    await #expect(throws: SparkConnectError.InvalidTypeException) {
+    await #expect(throws: SparkConnectError.InvalidType) {
       try await spark.read.schema("invalid-name SHORT")
     }
-    await #expect(throws: SparkConnectError.InvalidTypeException) {
+    await #expect(throws: SparkConnectError.InvalidType) {
       try await spark.read.schema("age UNKNOWN_TYPE")
     }
     await spark.stop()
diff --git a/Tests/SparkConnectTests/DataFrameTests.swift 
b/Tests/SparkConnectTests/DataFrameTests.swift
index d85c607..15cc6d6 100644
--- a/Tests/SparkConnectTests/DataFrameTests.swift
+++ b/Tests/SparkConnectTests/DataFrameTests.swift
@@ -49,7 +49,7 @@ struct DataFrameTests {
   @Test
   func rdd() async throws {
     let spark = try await SparkSession.builder.getOrCreate()
-    await #expect(throws: SparkConnectError.UnsupportedOperationException) {
+    await #expect(throws: SparkConnectError.UnsupportedOperation) {
       try await spark.range(1).rdd()
     }
     await spark.stop()
diff --git a/Tests/SparkConnectTests/RowTests.swift 
b/Tests/SparkConnectTests/RowTests.swift
index 02da050..d44f4f2 100644
--- a/Tests/SparkConnectTests/RowTests.swift
+++ b/Tests/SparkConnectTests/RowTests.swift
@@ -28,7 +28,7 @@ struct RowTests {
   func empty() {
     #expect(Row.empty.size == 0)
     #expect(Row.empty.length == 0)
-    #expect(throws: SparkConnectError.InvalidArgumentException) {
+    #expect(throws: SparkConnectError.InvalidArgument) {
       try Row.empty.get(0)
     }
   }
@@ -57,7 +57,7 @@ struct RowTests {
     #expect(try row.get(2) as! String == "a")
     #expect(try row.get(3) as! Bool == true)
     #expect(try row.get(4) as! Decimal == Decimal(1.2))
-    #expect(throws: SparkConnectError.InvalidArgumentException) {
+    #expect(throws: SparkConnectError.InvalidArgument) {
       try Row.empty.get(-1)
     }
   }
diff --git a/Tests/SparkConnectTests/SparkConnectClientTests.swift 
b/Tests/SparkConnectTests/SparkConnectClientTests.swift
index d0c6db2..e47eab6 100644
--- a/Tests/SparkConnectTests/SparkConnectClientTests.swift
+++ b/Tests/SparkConnectTests/SparkConnectClientTests.swift
@@ -47,7 +47,7 @@ struct SparkConnectClientTests {
   @Test
   func connectWithInvalidUUID() async throws {
     let client = SparkConnectClient(remote: TEST_REMOTE)
-    try await #require(throws: SparkConnectError.InvalidSessionIDException) {
+    try await #require(throws: SparkConnectError.InvalidSessionID) {
       try await client.connect("not-a-uuid-format")
     }
     await client.stop()
diff --git a/Tests/SparkConnectTests/SparkSessionTests.swift 
b/Tests/SparkConnectTests/SparkSessionTests.swift
index 7404395..50eb95c 100644
--- a/Tests/SparkConnectTests/SparkSessionTests.swift
+++ b/Tests/SparkConnectTests/SparkSessionTests.swift
@@ -29,7 +29,7 @@ struct SparkSessionTests {
   func sparkContext() async throws {
     await SparkSession.builder.clear()
     let spark = try await SparkSession.builder.getOrCreate()
-    await #expect(throws: SparkConnectError.UnsupportedOperationException) {
+    await #expect(throws: SparkConnectError.UnsupportedOperation) {
       try await spark.sparkContext
     }
     await spark.stop()
@@ -186,10 +186,10 @@ struct SparkSessionTests {
   func invalidTags() async throws {
     await SparkSession.builder.clear()
     let spark = try await SparkSession.builder.getOrCreate()
-    await #expect(throws: SparkConnectError.InvalidArgumentException) {
+    await #expect(throws: SparkConnectError.InvalidArgument) {
       try await spark.addTag("")
     }
-    await #expect(throws: SparkConnectError.InvalidArgumentException) {
+    await #expect(throws: SparkConnectError.InvalidArgument) {
       try await spark.addTag(",")
     }
     await spark.stop()
diff --git a/Tests/SparkConnectTests/StreamingQueryManagerTests.swift 
b/Tests/SparkConnectTests/StreamingQueryManagerTests.swift
index ada8517..e457b4a 100644
--- a/Tests/SparkConnectTests/StreamingQueryManagerTests.swift
+++ b/Tests/SparkConnectTests/StreamingQueryManagerTests.swift
@@ -35,10 +35,10 @@ struct StreamingQueryManagerTests {
   @Test
   func get() async throws {
     let spark = try await SparkSession.builder.getOrCreate()
-    await #expect(throws: SparkConnectError.InvalidArgumentException) {
+    await #expect(throws: SparkConnectError.InvalidArgument) {
       try await spark.streams.get(UUID())
     }
-    await #expect(throws: SparkConnectError.InvalidArgumentException) {
+    await #expect(throws: SparkConnectError.InvalidArgument) {
       try await spark.streams.get(UUID().uuidString)
     }
     await spark.stop()
@@ -48,7 +48,7 @@ struct StreamingQueryManagerTests {
   func awaitAnyTermination() async throws {
     let spark = try await SparkSession.builder.getOrCreate()
     try await spark.streams.awaitAnyTermination(1)
-    await #expect(throws: SparkConnectError.InvalidArgumentException) {
+    await #expect(throws: SparkConnectError.InvalidArgument) {
       try await spark.streams.awaitAnyTermination(-1)
     }
     await spark.stop()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to