This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new 7b55d6a  [SPARK-54811] Use Spark 4.1.0 to regenerate `Spark 
Connect`-based `Swift` source code
7b55d6a is described below

commit 7b55d6a101f2c6bb5952c35280b13fef5a6b302b
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Dec 23 09:20:31 2025 +0900

    [SPARK-54811] Use Spark 4.1.0 to regenerate `Spark Connect`-based `Swift` 
source code
    
    ### What changes were proposed in this pull request?
    
    This PR aims to update Spark Connect-generated Swift source code with 
Apache Spark `4.1.0`.
    
    ### Why are the changes needed?
    
    To use the latest bug fixes and new messages to develop for new features of 
`4.1.0`.
    
    - https://github.com/apache/spark/pull/53024
    - https://github.com/apache/spark/pull/52894
    - https://github.com/apache/spark/pull/52890
    - https://github.com/apache/spark/pull/52872
    - https://github.com/apache/spark/pull/52746
    - https://github.com/apache/spark/pull/52831
    
    ```
    $ git clone -b v4.1.0 https://github.com/apache/spark.git
    $ cd spark/sql/connect/common/src/main/protobuf/
    $ protoc --swift_out=. spark/connect/*.proto
    $ protoc --grpc-swift_out=. spark/connect/*.proto
    
    // Remove empty GRPC files
    $ cd spark/connect
    $ grep 'This file contained no services' * | awk -F: '{print $1}' | xargs rm
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    Pass the CIs.
    
    ### How was this patch tested?
    
    Pass the CIs. I manually tested with `Apache Spark 4.1.0`.
    
    ```
    $ swift test --no-parallel
    ...
    Test run with 203 tests in 21 suites passed after 33.163 seconds.
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #271 from dongjoon-hyun/SPARK-54811.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 Sources/SparkConnect/base.grpc.swift      |  26 ++---
 Sources/SparkConnect/base.pb.swift        | 172 +++++++++++++++++++++++++++++-
 Sources/SparkConnect/expressions.pb.swift |   2 +-
 Sources/SparkConnect/pipelines.pb.swift   |  63 ++++++++---
 Sources/SparkConnect/types.pb.swift       | 154 +++++++++++++++++++++++++-
 5 files changed, 386 insertions(+), 31 deletions(-)

diff --git a/Sources/SparkConnect/base.grpc.swift 
b/Sources/SparkConnect/base.grpc.swift
index a8f97d1..76c7861 100644
--- a/Sources/SparkConnect/base.grpc.swift
+++ b/Sources/SparkConnect/base.grpc.swift
@@ -31,13 +31,13 @@ import GRPCProtobuf
 
 /// Namespace containing generated types for the 
"spark.connect.SparkConnectService" service.
 @available(macOS 15.0, iOS 18.0, watchOS 11.0, tvOS 18.0, visionOS 2.0, *)
-internal enum Spark_Connect_SparkConnectService {
+internal enum Spark_Connect_SparkConnectService: Sendable {
     /// Service descriptor for the "spark.connect.SparkConnectService" service.
     internal static let descriptor = 
GRPCCore.ServiceDescriptor(fullyQualifiedService: 
"spark.connect.SparkConnectService")
     /// Namespace for method metadata.
-    internal enum Method {
+    internal enum Method: Sendable {
         /// Namespace for "ExecutePlan" metadata.
-        internal enum ExecutePlan {
+        internal enum ExecutePlan: Sendable {
             /// Request type for "ExecutePlan".
             internal typealias Input = Spark_Connect_ExecutePlanRequest
             /// Response type for "ExecutePlan".
@@ -49,7 +49,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "AnalyzePlan" metadata.
-        internal enum AnalyzePlan {
+        internal enum AnalyzePlan: Sendable {
             /// Request type for "AnalyzePlan".
             internal typealias Input = Spark_Connect_AnalyzePlanRequest
             /// Response type for "AnalyzePlan".
@@ -61,7 +61,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "Config" metadata.
-        internal enum Config {
+        internal enum Config: Sendable {
             /// Request type for "Config".
             internal typealias Input = Spark_Connect_ConfigRequest
             /// Response type for "Config".
@@ -73,7 +73,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "AddArtifacts" metadata.
-        internal enum AddArtifacts {
+        internal enum AddArtifacts: Sendable {
             /// Request type for "AddArtifacts".
             internal typealias Input = Spark_Connect_AddArtifactsRequest
             /// Response type for "AddArtifacts".
@@ -85,7 +85,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "ArtifactStatus" metadata.
-        internal enum ArtifactStatus {
+        internal enum ArtifactStatus: Sendable {
             /// Request type for "ArtifactStatus".
             internal typealias Input = Spark_Connect_ArtifactStatusesRequest
             /// Response type for "ArtifactStatus".
@@ -97,7 +97,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "Interrupt" metadata.
-        internal enum Interrupt {
+        internal enum Interrupt: Sendable {
             /// Request type for "Interrupt".
             internal typealias Input = Spark_Connect_InterruptRequest
             /// Response type for "Interrupt".
@@ -109,7 +109,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "ReattachExecute" metadata.
-        internal enum ReattachExecute {
+        internal enum ReattachExecute: Sendable {
             /// Request type for "ReattachExecute".
             internal typealias Input = Spark_Connect_ReattachExecuteRequest
             /// Response type for "ReattachExecute".
@@ -121,7 +121,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "ReleaseExecute" metadata.
-        internal enum ReleaseExecute {
+        internal enum ReleaseExecute: Sendable {
             /// Request type for "ReleaseExecute".
             internal typealias Input = Spark_Connect_ReleaseExecuteRequest
             /// Response type for "ReleaseExecute".
@@ -133,7 +133,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "ReleaseSession" metadata.
-        internal enum ReleaseSession {
+        internal enum ReleaseSession: Sendable {
             /// Request type for "ReleaseSession".
             internal typealias Input = Spark_Connect_ReleaseSessionRequest
             /// Response type for "ReleaseSession".
@@ -145,7 +145,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "FetchErrorDetails" metadata.
-        internal enum FetchErrorDetails {
+        internal enum FetchErrorDetails: Sendable {
             /// Request type for "FetchErrorDetails".
             internal typealias Input = Spark_Connect_FetchErrorDetailsRequest
             /// Response type for "FetchErrorDetails".
@@ -157,7 +157,7 @@ internal enum Spark_Connect_SparkConnectService {
             )
         }
         /// Namespace for "CloneSession" metadata.
-        internal enum CloneSession {
+        internal enum CloneSession: Sendable {
             /// Request type for "CloneSession".
             internal typealias Input = Spark_Connect_CloneSessionRequest
             /// Response type for "CloneSession".
diff --git a/Sources/SparkConnect/base.pb.swift 
b/Sources/SparkConnect/base.pb.swift
index b90a9ef..84fc610 100644
--- a/Sources/SparkConnect/base.pb.swift
+++ b/Sources/SparkConnect/base.pb.swift
@@ -37,10 +37,46 @@ fileprivate struct _GeneratedWithProtocGenSwiftVersion: 
SwiftProtobuf.ProtobufAP
   typealias Version = _2
 }
 
+/// Compression codec for plan compression.
+enum Spark_Connect_CompressionCodec: SwiftProtobuf.Enum, Swift.CaseIterable {
+  typealias RawValue = Int
+  case unspecified // = 0
+  case zstd // = 1
+  case UNRECOGNIZED(Int)
+
+  init() {
+    self = .unspecified
+  }
+
+  init?(rawValue: Int) {
+    switch rawValue {
+    case 0: self = .unspecified
+    case 1: self = .zstd
+    default: self = .UNRECOGNIZED(rawValue)
+    }
+  }
+
+  var rawValue: Int {
+    switch self {
+    case .unspecified: return 0
+    case .zstd: return 1
+    case .UNRECOGNIZED(let i): return i
+    }
+  }
+
+  // The compiler won't synthesize support with the UNRECOGNIZED case.
+  static let allCases: [Spark_Connect_CompressionCodec] = [
+    .unspecified,
+    .zstd,
+  ]
+
+}
+
 /// A [[Plan]] is the structure that carries the runtime information for the 
execution from the
-/// client to the server. A [[Plan]] can either be of the type [[Relation]] 
which is a reference
-/// to the underlying logical plan or it can be of the [[Command]] type that 
is used to execute
-/// commands on the server.
+/// client to the server. A [[Plan]] can be one of the following:
+/// - [[Relation]]: a reference to the underlying logical plan.
+/// - [[Command]]: used to execute commands on the server.
+/// - [[CompressedOperation]]: a compressed representation of either a 
Relation or a Command.
 struct Spark_Connect_Plan: Sendable {
   // SwiftProtobuf.Message conformance is added in an extension below. See the
   // `Message` and `Message+*Additions` files in the SwiftProtobuf library for
@@ -64,12 +100,75 @@ struct Spark_Connect_Plan: Sendable {
     set {opType = .command(newValue)}
   }
 
+  var compressedOperation: Spark_Connect_Plan.CompressedOperation {
+    get {
+      if case .compressedOperation(let v)? = opType {return v}
+      return Spark_Connect_Plan.CompressedOperation()
+    }
+    set {opType = .compressedOperation(newValue)}
+  }
+
   var unknownFields = SwiftProtobuf.UnknownStorage()
 
   enum OneOf_OpType: Equatable, Sendable {
     case root(Spark_Connect_Relation)
     case command(Spark_Connect_Command)
+    case compressedOperation(Spark_Connect_Plan.CompressedOperation)
+
+  }
+
+  struct CompressedOperation: Sendable {
+    // SwiftProtobuf.Message conformance is added in an extension below. See 
the
+    // `Message` and `Message+*Additions` files in the SwiftProtobuf library 
for
+    // methods supported on all messages.
+
+    var data: Data = Data()
+
+    var opType: Spark_Connect_Plan.CompressedOperation.OpType = .unspecified
+
+    var compressionCodec: Spark_Connect_CompressionCodec = .unspecified
+
+    var unknownFields = SwiftProtobuf.UnknownStorage()
+
+    enum OpType: SwiftProtobuf.Enum, Swift.CaseIterable {
+      typealias RawValue = Int
+      case unspecified // = 0
+      case relation // = 1
+      case command // = 2
+      case UNRECOGNIZED(Int)
+
+      init() {
+        self = .unspecified
+      }
+
+      init?(rawValue: Int) {
+        switch rawValue {
+        case 0: self = .unspecified
+        case 1: self = .relation
+        case 2: self = .command
+        default: self = .UNRECOGNIZED(rawValue)
+        }
+      }
+
+      var rawValue: Int {
+        switch self {
+        case .unspecified: return 0
+        case .relation: return 1
+        case .command: return 2
+        case .UNRECOGNIZED(let i): return i
+        }
+      }
+
+      // The compiler won't synthesize support with the UNRECOGNIZED case.
+      static let allCases: [Spark_Connect_Plan.CompressedOperation.OpType] = [
+        .unspecified,
+        .relation,
+        .command,
+      ]
 
+    }
+
+    init() {}
   }
 
   init() {}
@@ -3246,9 +3345,13 @@ struct Spark_Connect_CloneSessionResponse: Sendable {
 
 fileprivate let _protobuf_package = "spark.connect"
 
+extension Spark_Connect_CompressionCodec: SwiftProtobuf._ProtoNameProviding {
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{2}\0COMPRESSION_CODEC_UNSPECIFIED\0\u{1}COMPRESSION_CODEC_ZSTD\0")
+}
+
 extension Spark_Connect_Plan: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = _protobuf_package + ".Plan"
-  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}root\0\u{1}command\0")
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}root\0\u{1}command\0\u{3}compressed_operation\0")
 
   mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
     while let fieldNumber = try decoder.nextFieldNumber() {
@@ -3282,6 +3385,19 @@ extension Spark_Connect_Plan: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImple
           self.opType = .command(v)
         }
       }()
+      case 3: try {
+        var v: Spark_Connect_Plan.CompressedOperation?
+        var hadOneofValue = false
+        if let current = self.opType {
+          hadOneofValue = true
+          if case .compressedOperation(let m) = current {v = m}
+        }
+        try decoder.decodeSingularMessageField(value: &v)
+        if let v = v {
+          if hadOneofValue {try decoder.handleConflictingOneOf()}
+          self.opType = .compressedOperation(v)
+        }
+      }()
       default: break
       }
     }
@@ -3301,6 +3417,10 @@ extension Spark_Connect_Plan: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImple
       guard case .command(let v)? = self.opType else { preconditionFailure() }
       try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
     }()
+    case .compressedOperation?: try {
+      guard case .compressedOperation(let v)? = self.opType else { 
preconditionFailure() }
+      try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
+    }()
     case nil: break
     }
     try unknownFields.traverse(visitor: &visitor)
@@ -3313,6 +3433,50 @@ extension Spark_Connect_Plan: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImple
   }
 }
 
+extension Spark_Connect_Plan.CompressedOperation: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
+  static let protoMessageName: String = Spark_Connect_Plan.protoMessageName + 
".CompressedOperation"
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}data\0\u{3}op_type\0\u{3}compression_codec\0")
+
+  mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
+    while let fieldNumber = try decoder.nextFieldNumber() {
+      // The use of inline closures is to circumvent an issue where the 
compiler
+      // allocates stack space for every case branch when no optimizations are
+      // enabled. https://github.com/apple/swift-protobuf/issues/1034
+      switch fieldNumber {
+      case 1: try { try decoder.decodeSingularBytesField(value: &self.data) }()
+      case 2: try { try decoder.decodeSingularEnumField(value: &self.opType) 
}()
+      case 3: try { try decoder.decodeSingularEnumField(value: 
&self.compressionCodec) }()
+      default: break
+      }
+    }
+  }
+
+  func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
+    if !self.data.isEmpty {
+      try visitor.visitSingularBytesField(value: self.data, fieldNumber: 1)
+    }
+    if self.opType != .unspecified {
+      try visitor.visitSingularEnumField(value: self.opType, fieldNumber: 2)
+    }
+    if self.compressionCodec != .unspecified {
+      try visitor.visitSingularEnumField(value: self.compressionCodec, 
fieldNumber: 3)
+    }
+    try unknownFields.traverse(visitor: &visitor)
+  }
+
+  static func ==(lhs: Spark_Connect_Plan.CompressedOperation, rhs: 
Spark_Connect_Plan.CompressedOperation) -> Bool {
+    if lhs.data != rhs.data {return false}
+    if lhs.opType != rhs.opType {return false}
+    if lhs.compressionCodec != rhs.compressionCodec {return false}
+    if lhs.unknownFields != rhs.unknownFields {return false}
+    return true
+  }
+}
+
+extension Spark_Connect_Plan.CompressedOperation.OpType: 
SwiftProtobuf._ProtoNameProviding {
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{2}\0OP_TYPE_UNSPECIFIED\0\u{1}OP_TYPE_RELATION\0\u{1}OP_TYPE_COMMAND\0")
+}
+
 extension Spark_Connect_UserContext: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = _protobuf_package + ".UserContext"
   static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}user_id\0\u{3}user_name\0\u{2}e\u{f}extensions\0")
diff --git a/Sources/SparkConnect/expressions.pb.swift 
b/Sources/SparkConnect/expressions.pb.swift
index c2fc6c1..9bbf5df 100644
--- a/Sources/SparkConnect/expressions.pb.swift
+++ b/Sources/SparkConnect/expressions.pb.swift
@@ -2994,7 +2994,7 @@ extension Spark_Connect_Expression.Cast.EvalMode: 
SwiftProtobuf._ProtoNameProvid
 
 extension Spark_Connect_Expression.Literal: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = 
Spark_Connect_Expression.protoMessageName + ".Literal"
-  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}null\0\u{1}binary\0\u{1}boolean\0\u{1}byte\0\u{1}short\0\u{1}integer\0\u{1}long\0\u{2}\u{3}float\0\u{1}double\0\u{1}decimal\0\u{1}string\0\u{2}\u{3}date\0\u{1}timestamp\0\u{3}timestamp_ntz\0\u{3}calendar_interval\0\u{3}year_month_interval\0\u{3}day_time_interval\0\u{1}array\0\u{1}map\0\u{1}struct\0\u{3}specialized_array\0\u{1}time\0\u{4}J\u{1}data_type\0")
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}null\0\u{1}binary\0\u{1}boolean\0\u{1}byte\0\u{1}short\0\u{1}integer\0\u{1}long\0\u{2}\u{3}float\0\u{1}double\0\u{1}decimal\0\u{1}string\0\u{2}\u{3}date\0\u{1}timestamp\0\u{3}timestamp_ntz\0\u{3}calendar_interval\0\u{3}year_month_interval\0\u{3}day_time_interval\0\u{1}array\0\u{1}map\0\u{1}struct\0\u{3}specialized_array\0\u{1}time\0\u{4}J\u{1}data_type\0\u{c}\u{1b}\u{1}\u{c}\u{1c}\u{1}")
 
   mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
     while let fieldNumber = try decoder.nextFieldNumber() {
diff --git a/Sources/SparkConnect/pipelines.pb.swift 
b/Sources/SparkConnect/pipelines.pb.swift
index 8aeb2ea..0e122e7 100644
--- a/Sources/SparkConnect/pipelines.pb.swift
+++ b/Sources/SparkConnect/pipelines.pb.swift
@@ -388,6 +388,9 @@ struct Spark_Connect_PipelineCommand: Sendable {
         set {schema = .schemaString(newValue)}
       }
 
+      /// Optional cluster columns for the table.
+      var clusteringColumns: [String] = []
+
       var unknownFields = SwiftProtobuf.UnknownStorage()
 
       /// Schema for the table. If unset, this will be inferred from incoming 
flows.
@@ -515,6 +518,20 @@ struct Spark_Connect_PipelineCommand: Sendable {
       set {details = .extension(newValue)}
     }
 
+    /// If true, define the flow as a one-time flow, such as for backfill.
+    /// Set to true changes the flow in two ways:
+    ///   - The flow is run one time by default. If the pipeline is ran with a 
full refresh,
+    ///     the flow will run again.
+    ///   - The flow function must be a batch DataFrame, not a streaming 
DataFrame.
+    var once: Bool {
+      get {return _once ?? false}
+      set {_once = newValue}
+    }
+    /// Returns true if `once` has been explicitly set.
+    var hasOnce: Bool {return self._once != nil}
+    /// Clears the value of `once`. Subsequent reads from it will return its 
default value.
+    mutating func clearOnce() {self._once = nil}
+
     var unknownFields = SwiftProtobuf.UnknownStorage()
 
     enum OneOf_Details: Equatable, Sendable {
@@ -576,6 +593,7 @@ struct Spark_Connect_PipelineCommand: Sendable {
     fileprivate var _targetDatasetName: String? = nil
     fileprivate var _clientID: String? = nil
     fileprivate var _sourceCodeLocation: Spark_Connect_SourceCodeLocation? = 
nil
+    fileprivate var _once: Bool? = nil
   }
 
   /// Resolves all datasets and flows and start a pipeline update. Should be 
called after all
@@ -1027,6 +1045,16 @@ struct Spark_Connect_PipelineAnalysisContext: Sendable {
   /// Clears the value of `definitionPath`. Subsequent reads from it will 
return its default value.
   mutating func clearDefinitionPath() {self._definitionPath = nil}
 
+  /// The name of the Flow involved in this analysis
+  var flowName: String {
+    get {return _flowName ?? String()}
+    set {_flowName = newValue}
+  }
+  /// Returns true if `flowName` has been explicitly set.
+  var hasFlowName: Bool {return self._flowName != nil}
+  /// Clears the value of `flowName`. Subsequent reads from it will return its 
default value.
+  mutating func clearFlowName() {self._flowName = nil}
+
   /// Reserved field for protocol extensions.
   var `extension`: [SwiftProtobuf.Google_Protobuf_Any] = []
 
@@ -1036,6 +1064,7 @@ struct Spark_Connect_PipelineAnalysisContext: Sendable {
 
   fileprivate var _dataflowGraphID: String? = nil
   fileprivate var _definitionPath: String? = nil
+  fileprivate var _flowName: String? = nil
 }
 
 // MARK: - Code below here is support for the SwiftProtobuf runtime.
@@ -1421,7 +1450,7 @@ extension Spark_Connect_PipelineCommand.DefineOutput: 
SwiftProtobuf.Message, Swi
 
 extension Spark_Connect_PipelineCommand.DefineOutput.TableDetails: 
SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, 
SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = 
Spark_Connect_PipelineCommand.DefineOutput.protoMessageName + ".TableDetails"
-  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}table_properties\0\u{3}partition_cols\0\u{1}format\0\u{3}schema_data_type\0\u{3}schema_string\0")
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}table_properties\0\u{3}partition_cols\0\u{1}format\0\u{3}schema_data_type\0\u{3}schema_string\0\u{3}clustering_columns\0")
 
   mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
     while let fieldNumber = try decoder.nextFieldNumber() {
@@ -1453,6 +1482,7 @@ extension 
Spark_Connect_PipelineCommand.DefineOutput.TableDetails: SwiftProtobuf
           self.schema = .schemaString(v)
         }
       }()
+      case 6: try { try decoder.decodeRepeatedStringField(value: 
&self.clusteringColumns) }()
       default: break
       }
     }
@@ -1483,6 +1513,9 @@ extension 
Spark_Connect_PipelineCommand.DefineOutput.TableDetails: SwiftProtobuf
     }()
     case nil: break
     }
+    if !self.clusteringColumns.isEmpty {
+      try visitor.visitRepeatedStringField(value: self.clusteringColumns, 
fieldNumber: 6)
+    }
     try unknownFields.traverse(visitor: &visitor)
   }
 
@@ -1491,6 +1524,7 @@ extension 
Spark_Connect_PipelineCommand.DefineOutput.TableDetails: SwiftProtobuf
     if lhs.partitionCols != rhs.partitionCols {return false}
     if lhs._format != rhs._format {return false}
     if lhs.schema != rhs.schema {return false}
+    if lhs.clusteringColumns != rhs.clusteringColumns {return false}
     if lhs.unknownFields != rhs.unknownFields {return false}
     return true
   }
@@ -1537,7 +1571,7 @@ extension 
Spark_Connect_PipelineCommand.DefineOutput.SinkDetails: SwiftProtobuf.
 
 extension Spark_Connect_PipelineCommand.DefineFlow: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = 
Spark_Connect_PipelineCommand.protoMessageName + ".DefineFlow"
-  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}dataflow_graph_id\0\u{3}flow_name\0\u{3}target_dataset_name\0\u{3}sql_conf\0\u{3}client_id\0\u{3}source_code_location\0\u{3}relation_flow_details\0\u{2}`\u{f}extension\0")
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}dataflow_graph_id\0\u{3}flow_name\0\u{3}target_dataset_name\0\u{3}sql_conf\0\u{3}client_id\0\u{3}source_code_location\0\u{3}relation_flow_details\0\u{1}once\0\u{2}_\u{f}extension\0")
 
   mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
     while let fieldNumber = try decoder.nextFieldNumber() {
@@ -1564,6 +1598,7 @@ extension Spark_Connect_PipelineCommand.DefineFlow: 
SwiftProtobuf.Message, Swift
           self.details = .relationFlowDetails(v)
         }
       }()
+      case 8: try { try decoder.decodeSingularBoolField(value: &self._once) }()
       case 999: try {
         var v: SwiftProtobuf.Google_Protobuf_Any?
         var hadOneofValue = false
@@ -1605,17 +1640,15 @@ extension Spark_Connect_PipelineCommand.DefineFlow: 
SwiftProtobuf.Message, Swift
     try { if let v = self._sourceCodeLocation {
       try visitor.visitSingularMessageField(value: v, fieldNumber: 6)
     } }()
-    switch self.details {
-    case .relationFlowDetails?: try {
-      guard case .relationFlowDetails(let v)? = self.details else { 
preconditionFailure() }
+    try { if case .relationFlowDetails(let v)? = self.details {
       try visitor.visitSingularMessageField(value: v, fieldNumber: 7)
-    }()
-    case .extension?: try {
-      guard case .extension(let v)? = self.details else { 
preconditionFailure() }
+    } }()
+    try { if let v = self._once {
+      try visitor.visitSingularBoolField(value: v, fieldNumber: 8)
+    } }()
+    try { if case .extension(let v)? = self.details {
       try visitor.visitSingularMessageField(value: v, fieldNumber: 999)
-    }()
-    case nil: break
-    }
+    } }()
     try unknownFields.traverse(visitor: &visitor)
   }
 
@@ -1627,6 +1660,7 @@ extension Spark_Connect_PipelineCommand.DefineFlow: 
SwiftProtobuf.Message, Swift
     if lhs._clientID != rhs._clientID {return false}
     if lhs._sourceCodeLocation != rhs._sourceCodeLocation {return false}
     if lhs.details != rhs.details {return false}
+    if lhs._once != rhs._once {return false}
     if lhs.unknownFields != rhs.unknownFields {return false}
     return true
   }
@@ -2226,7 +2260,7 @@ extension 
Spark_Connect_PipelineQueryFunctionExecutionSignal: SwiftProtobuf.Mess
 
 extension Spark_Connect_PipelineAnalysisContext: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = _protobuf_package + 
".PipelineAnalysisContext"
-  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}dataflow_graph_id\0\u{3}definition_path\0\u{2}e\u{f}extension\0")
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}dataflow_graph_id\0\u{3}definition_path\0\u{3}flow_name\0\u{2}d\u{f}extension\0")
 
   mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
     while let fieldNumber = try decoder.nextFieldNumber() {
@@ -2236,6 +2270,7 @@ extension Spark_Connect_PipelineAnalysisContext: 
SwiftProtobuf.Message, SwiftPro
       switch fieldNumber {
       case 1: try { try decoder.decodeSingularStringField(value: 
&self._dataflowGraphID) }()
       case 2: try { try decoder.decodeSingularStringField(value: 
&self._definitionPath) }()
+      case 3: try { try decoder.decodeSingularStringField(value: 
&self._flowName) }()
       case 999: try { try decoder.decodeRepeatedMessageField(value: 
&self.`extension`) }()
       default: break
       }
@@ -2253,6 +2288,9 @@ extension Spark_Connect_PipelineAnalysisContext: 
SwiftProtobuf.Message, SwiftPro
     try { if let v = self._definitionPath {
       try visitor.visitSingularStringField(value: v, fieldNumber: 2)
     } }()
+    try { if let v = self._flowName {
+      try visitor.visitSingularStringField(value: v, fieldNumber: 3)
+    } }()
     if !self.`extension`.isEmpty {
       try visitor.visitRepeatedMessageField(value: self.`extension`, 
fieldNumber: 999)
     }
@@ -2262,6 +2300,7 @@ extension Spark_Connect_PipelineAnalysisContext: 
SwiftProtobuf.Message, SwiftPro
   static func ==(lhs: Spark_Connect_PipelineAnalysisContext, rhs: 
Spark_Connect_PipelineAnalysisContext) -> Bool {
     if lhs._dataflowGraphID != rhs._dataflowGraphID {return false}
     if lhs._definitionPath != rhs._definitionPath {return false}
+    if lhs._flowName != rhs._flowName {return false}
     if lhs.`extension` != rhs.`extension` {return false}
     if lhs.unknownFields != rhs.unknownFields {return false}
     return true
diff --git a/Sources/SparkConnect/types.pb.swift 
b/Sources/SparkConnect/types.pb.swift
index a4394d6..f551ad8 100644
--- a/Sources/SparkConnect/types.pb.swift
+++ b/Sources/SparkConnect/types.pb.swift
@@ -246,6 +246,23 @@ struct Spark_Connect_DataType: @unchecked Sendable {
     set {_uniqueStorage()._kind = .udt(newValue)}
   }
 
+  /// Geospatial types
+  var geometry: Spark_Connect_DataType.Geometry {
+    get {
+      if case .geometry(let v)? = _storage._kind {return v}
+      return Spark_Connect_DataType.Geometry()
+    }
+    set {_uniqueStorage()._kind = .geometry(newValue)}
+  }
+
+  var geography: Spark_Connect_DataType.Geography {
+    get {
+      if case .geography(let v)? = _storage._kind {return v}
+      return Spark_Connect_DataType.Geography()
+    }
+    set {_uniqueStorage()._kind = .geography(newValue)}
+  }
+
   /// UnparsedDataType
   var unparsed: Spark_Connect_DataType.Unparsed {
     get {
@@ -296,6 +313,9 @@ struct Spark_Connect_DataType: @unchecked Sendable {
     case variant(Spark_Connect_DataType.Variant)
     /// UserDefinedType
     case udt(Spark_Connect_DataType.UDT)
+    /// Geospatial types
+    case geometry(Spark_Connect_DataType.Geometry)
+    case geography(Spark_Connect_DataType.Geography)
     /// UnparsedDataType
     case unparsed(Spark_Connect_DataType.Unparsed)
     case time(Spark_Connect_DataType.Time)
@@ -743,6 +763,34 @@ struct Spark_Connect_DataType: @unchecked Sendable {
     fileprivate var _storage = _StorageClass.defaultInstance
   }
 
+  struct Geometry: Sendable {
+    // SwiftProtobuf.Message conformance is added in an extension below. See 
the
+    // `Message` and `Message+*Additions` files in the SwiftProtobuf library 
for
+    // methods supported on all messages.
+
+    var srid: Int32 = 0
+
+    var typeVariationReference: UInt32 = 0
+
+    var unknownFields = SwiftProtobuf.UnknownStorage()
+
+    init() {}
+  }
+
+  struct Geography: Sendable {
+    // SwiftProtobuf.Message conformance is added in an extension below. See 
the
+    // `Message` and `Message+*Additions` files in the SwiftProtobuf library 
for
+    // methods supported on all messages.
+
+    var srid: Int32 = 0
+
+    var typeVariationReference: UInt32 = 0
+
+    var unknownFields = SwiftProtobuf.UnknownStorage()
+
+    init() {}
+  }
+
   struct Variant: Sendable {
     // SwiftProtobuf.Message conformance is added in an extension below. See 
the
     // `Message` and `Message+*Additions` files in the SwiftProtobuf library 
for
@@ -836,7 +884,7 @@ fileprivate let _protobuf_package = "spark.connect"
 
 extension Spark_Connect_DataType: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = _protobuf_package + ".DataType"
-  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}null\0\u{1}binary\0\u{1}boolean\0\u{1}byte\0\u{1}short\0\u{1}integer\0\u{1}long\0\u{1}float\0\u{1}double\0\u{1}decimal\0\u{1}string\0\u{1}char\0\u{3}var_char\0\u{1}date\0\u{1}timestamp\0\u{3}timestamp_ntz\0\u{3}calendar_interval\0\u{3}year_month_interval\0\u{3}day_time_interval\0\u{1}array\0\u{1}struct\0\u{1}map\0\u{1}udt\0\u{1}unparsed\0\u{1}variant\0\u{2}\u{3}time\0\u{c}\u{1a}\u{1}\u{c}\u{1b}\u{1}")
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}null\0\u{1}binary\0\u{1}boolean\0\u{1}byte\0\u{1}short\0\u{1}integer\0\u{1}long\0\u{1}float\0\u{1}double\0\u{1}decimal\0\u{1}string\0\u{1}char\0\u{3}var_char\0\u{1}date\0\u{1}timestamp\0\u{3}timestamp_ntz\0\u{3}calendar_interval\0\u{3}year_month_interval\0\u{3}day_time_interval\0\u{1}array\0\u{1}struct\0\u{1}map\0\u{1}udt\0\u{1}unparsed\0\u{1}variant\0\u{1}geometry\0\u{1}geography\0\u{1}time\0")
 
   fileprivate class _StorageClass {
     var _kind: Spark_Connect_DataType.OneOf_Kind?
@@ -1194,6 +1242,32 @@ extension Spark_Connect_DataType: SwiftProtobuf.Message, 
SwiftProtobuf._MessageI
             _storage._kind = .variant(v)
           }
         }()
+        case 26: try {
+          var v: Spark_Connect_DataType.Geometry?
+          var hadOneofValue = false
+          if let current = _storage._kind {
+            hadOneofValue = true
+            if case .geometry(let m) = current {v = m}
+          }
+          try decoder.decodeSingularMessageField(value: &v)
+          if let v = v {
+            if hadOneofValue {try decoder.handleConflictingOneOf()}
+            _storage._kind = .geometry(v)
+          }
+        }()
+        case 27: try {
+          var v: Spark_Connect_DataType.Geography?
+          var hadOneofValue = false
+          if let current = _storage._kind {
+            hadOneofValue = true
+            if case .geography(let m) = current {v = m}
+          }
+          try decoder.decodeSingularMessageField(value: &v)
+          if let v = v {
+            if hadOneofValue {try decoder.handleConflictingOneOf()}
+            _storage._kind = .geography(v)
+          }
+        }()
         case 28: try {
           var v: Spark_Connect_DataType.Time?
           var hadOneofValue = false
@@ -1320,6 +1394,14 @@ extension Spark_Connect_DataType: SwiftProtobuf.Message, 
SwiftProtobuf._MessageI
         guard case .variant(let v)? = _storage._kind else { 
preconditionFailure() }
         try visitor.visitSingularMessageField(value: v, fieldNumber: 25)
       }()
+      case .geometry?: try {
+        guard case .geometry(let v)? = _storage._kind else { 
preconditionFailure() }
+        try visitor.visitSingularMessageField(value: v, fieldNumber: 26)
+      }()
+      case .geography?: try {
+        guard case .geography(let v)? = _storage._kind else { 
preconditionFailure() }
+        try visitor.visitSingularMessageField(value: v, fieldNumber: 27)
+      }()
       case .time?: try {
         guard case .time(let v)? = _storage._kind else { preconditionFailure() 
}
         try visitor.visitSingularMessageField(value: v, fieldNumber: 28)
@@ -2270,6 +2352,76 @@ extension Spark_Connect_DataType.Map: 
SwiftProtobuf.Message, SwiftProtobuf._Mess
   }
 }
 
+extension Spark_Connect_DataType.Geometry: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
+  static let protoMessageName: String = 
Spark_Connect_DataType.protoMessageName + ".Geometry"
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}srid\0\u{3}type_variation_reference\0")
+
+  mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
+    while let fieldNumber = try decoder.nextFieldNumber() {
+      // The use of inline closures is to circumvent an issue where the 
compiler
+      // allocates stack space for every case branch when no optimizations are
+      // enabled. https://github.com/apple/swift-protobuf/issues/1034
+      switch fieldNumber {
+      case 1: try { try decoder.decodeSingularInt32Field(value: &self.srid) }()
+      case 2: try { try decoder.decodeSingularUInt32Field(value: 
&self.typeVariationReference) }()
+      default: break
+      }
+    }
+  }
+
+  func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
+    if self.srid != 0 {
+      try visitor.visitSingularInt32Field(value: self.srid, fieldNumber: 1)
+    }
+    if self.typeVariationReference != 0 {
+      try visitor.visitSingularUInt32Field(value: self.typeVariationReference, 
fieldNumber: 2)
+    }
+    try unknownFields.traverse(visitor: &visitor)
+  }
+
+  static func ==(lhs: Spark_Connect_DataType.Geometry, rhs: 
Spark_Connect_DataType.Geometry) -> Bool {
+    if lhs.srid != rhs.srid {return false}
+    if lhs.typeVariationReference != rhs.typeVariationReference {return false}
+    if lhs.unknownFields != rhs.unknownFields {return false}
+    return true
+  }
+}
+
+extension Spark_Connect_DataType.Geography: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
+  static let protoMessageName: String = 
Spark_Connect_DataType.protoMessageName + ".Geography"
+  static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{1}srid\0\u{3}type_variation_reference\0")
+
+  mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) 
throws {
+    while let fieldNumber = try decoder.nextFieldNumber() {
+      // The use of inline closures is to circumvent an issue where the 
compiler
+      // allocates stack space for every case branch when no optimizations are
+      // enabled. https://github.com/apple/swift-protobuf/issues/1034
+      switch fieldNumber {
+      case 1: try { try decoder.decodeSingularInt32Field(value: &self.srid) }()
+      case 2: try { try decoder.decodeSingularUInt32Field(value: 
&self.typeVariationReference) }()
+      default: break
+      }
+    }
+  }
+
+  func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
+    if self.srid != 0 {
+      try visitor.visitSingularInt32Field(value: self.srid, fieldNumber: 1)
+    }
+    if self.typeVariationReference != 0 {
+      try visitor.visitSingularUInt32Field(value: self.typeVariationReference, 
fieldNumber: 2)
+    }
+    try unknownFields.traverse(visitor: &visitor)
+  }
+
+  static func ==(lhs: Spark_Connect_DataType.Geography, rhs: 
Spark_Connect_DataType.Geography) -> Bool {
+    if lhs.srid != rhs.srid {return false}
+    if lhs.typeVariationReference != rhs.typeVariationReference {return false}
+    if lhs.unknownFields != rhs.unknownFields {return false}
+    return true
+  }
+}
+
 extension Spark_Connect_DataType.Variant: SwiftProtobuf.Message, 
SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
   static let protoMessageName: String = 
Spark_Connect_DataType.protoMessageName + ".Variant"
   static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: 
"\0\u{3}type_variation_reference\0")


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to