This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new e1e8a32  [SPARK-51997] Mark `nodoc` to hide generated and internal 
classes from docs
e1e8a32 is described below

commit e1e8a327ce13d53ecaa4b7c2a5e8d69ffb822940
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun May 4 08:15:31 2025 -0700

    [SPARK-51997] Mark `nodoc` to hide generated and internal classes from docs
    
    ### What changes were proposed in this pull request?
    
    This PR aims mark `nodoc` to hide generated and internal classes from docs.
    
    ### Why are the changes needed?
    
    Since #107, the documentation is visible here.
    - 
https://swiftpackageindex.com/apache/spark-connect-swift/main/documentation/sparkconnect
    
    We need to simplify the documentation to focus `Spark Connect for Swift` 
itself by hiding irrelevant information . For example, `Arrow`-related 
information will be removed eventually when we start to use the official 
`Apache Arrow` release.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Manual review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #113 from dongjoon-hyun/SPARK-51997.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 Sources/SparkConnect/ArrowArray.swift             | 12 +++++++
 Sources/SparkConnect/ArrowArrayBuilder.swift      |  1 +
 Sources/SparkConnect/ArrowBuffer.swift            |  1 +
 Sources/SparkConnect/ArrowBufferBuilder.swift     |  1 +
 Sources/SparkConnect/ArrowData.swift              |  1 +
 Sources/SparkConnect/ArrowDecoder.swift           |  1 +
 Sources/SparkConnect/ArrowEncoder.swift           |  1 +
 Sources/SparkConnect/ArrowReader.swift            |  1 +
 Sources/SparkConnect/ArrowReaderHelper.swift      |  1 +
 Sources/SparkConnect/ArrowSchema.swift            |  2 ++
 Sources/SparkConnect/ArrowTable.swift             |  2 ++
 Sources/SparkConnect/ArrowType.swift              |  9 ++++++
 Sources/SparkConnect/ArrowWriter.swift            |  2 ++
 Sources/SparkConnect/ChunkedArray.swift           |  4 +++
 Sources/SparkConnect/File_generated.swift         |  3 ++
 Sources/SparkConnect/FlightData.swift             |  1 +
 Sources/SparkConnect/FlightDescriptor.swift       |  1 +
 Sources/SparkConnect/MemoryAllocator.swift        |  1 +
 Sources/SparkConnect/Message_generated.swift      |  8 +++++
 Sources/SparkConnect/Schema_generated.swift       | 38 +++++++++++++++++++++++
 Sources/SparkConnect/SparseTensor_generated.swift |  5 +++
 Sources/SparkConnect/Tensor_generated.swift       |  2 ++
 22 files changed, 98 insertions(+)

diff --git a/Sources/SparkConnect/ArrowArray.swift 
b/Sources/SparkConnect/ArrowArray.swift
index 1f96a1d..e3d61bb 100644
--- a/Sources/SparkConnect/ArrowArray.swift
+++ b/Sources/SparkConnect/ArrowArray.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public protocol ArrowArrayHolder {
   var type: ArrowType { get }
   var length: UInt { get }
@@ -28,6 +29,7 @@ public protocol ArrowArrayHolder {
   var getArrowColumn: (ArrowField, [ArrowArrayHolder]) throws -> ArrowColumn { 
get }
 }
 
+/// @nodoc
 public class ArrowArrayHolderImpl: ArrowArrayHolder {
   public let data: ArrowData
   public let type: ArrowType
@@ -121,6 +123,7 @@ public class ArrowArrayHolderImpl: ArrowArrayHolder {
   }
 }
 
+/// @nodoc
 public class ArrowArray<T>: AsString, AnyArray {
   public typealias ItemType = T
   public let arrowData: ArrowData
@@ -160,6 +163,7 @@ public class ArrowArray<T>: AsString, AnyArray {
   }
 }
 
+/// @nodoc
 public class FixedArray<T>: ArrowArray<T> {
   public override subscript(_ index: UInt) -> T? {
     if self.arrowData.isNull(index) {
@@ -171,6 +175,7 @@ public class FixedArray<T>: ArrowArray<T> {
   }
 }
 
+/// @nodoc
 public class StringArray: ArrowArray<String> {
   public override subscript(_ index: UInt) -> String? {
     let offsetIndex = MemoryLayout<Int32>.stride * Int(index)
@@ -197,6 +202,7 @@ public class StringArray: ArrowArray<String> {
   }
 }
 
+/// @nodoc
 public class BoolArray: ArrowArray<Bool> {
   public override subscript(_ index: UInt) -> Bool? {
     if self.arrowData.isNull(index) {
@@ -208,6 +214,7 @@ public class BoolArray: ArrowArray<Bool> {
   }
 }
 
+/// @nodoc
 public class Date32Array: ArrowArray<Date> {
   public override subscript(_ index: UInt) -> Date? {
     if self.arrowData.isNull(index) {
@@ -221,6 +228,7 @@ public class Date32Array: ArrowArray<Date> {
   }
 }
 
+/// @nodoc
 public class Date64Array: ArrowArray<Date> {
   public override subscript(_ index: UInt) -> Date? {
     if self.arrowData.isNull(index) {
@@ -234,9 +242,12 @@ public class Date64Array: ArrowArray<Date> {
   }
 }
 
+/// @nodoc
 public class Time32Array: FixedArray<Time32> {}
+/// @nodoc
 public class Time64Array: FixedArray<Time64> {}
 
+/// @nodoc
 public class BinaryArray: ArrowArray<Data> {
   public struct Options {
     public var printAsHex = false
@@ -279,6 +290,7 @@ public class BinaryArray: ArrowArray<Data> {
   }
 }
 
+/// @nodoc
 public class StructArray: ArrowArray<[Any?]> {
   public private(set) var arrowFields: [ArrowArrayHolder]?
   public required init(_ arrowData: ArrowData) throws {
diff --git a/Sources/SparkConnect/ArrowArrayBuilder.swift 
b/Sources/SparkConnect/ArrowArrayBuilder.swift
index a7c357d..5062127 100644
--- a/Sources/SparkConnect/ArrowArrayBuilder.swift
+++ b/Sources/SparkConnect/ArrowArrayBuilder.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public protocol ArrowArrayHolderBuilder {
   func toHolder() throws -> ArrowArrayHolder
   func appendAny(_ val: Any?)
diff --git a/Sources/SparkConnect/ArrowBuffer.swift 
b/Sources/SparkConnect/ArrowBuffer.swift
index e6736af..8b8a375 100644
--- a/Sources/SparkConnect/ArrowBuffer.swift
+++ b/Sources/SparkConnect/ArrowBuffer.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class ArrowBuffer {
   static let minLength: UInt = 1 << 5
   static let maxLength = UInt.max
diff --git a/Sources/SparkConnect/ArrowBufferBuilder.swift 
b/Sources/SparkConnect/ArrowBufferBuilder.swift
index e835f3a..3c38c1d 100644
--- a/Sources/SparkConnect/ArrowBufferBuilder.swift
+++ b/Sources/SparkConnect/ArrowBufferBuilder.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public protocol ArrowBufferBuilder {
   associatedtype ItemType
   var capacity: UInt { get }
diff --git a/Sources/SparkConnect/ArrowData.swift 
b/Sources/SparkConnect/ArrowData.swift
index 1817e35..cb55875 100644
--- a/Sources/SparkConnect/ArrowData.swift
+++ b/Sources/SparkConnect/ArrowData.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class ArrowData {
   public let type: ArrowType
   public let buffers: [ArrowBuffer]
diff --git a/Sources/SparkConnect/ArrowDecoder.swift 
b/Sources/SparkConnect/ArrowDecoder.swift
index 6d68fa4..1f12b8b 100644
--- a/Sources/SparkConnect/ArrowDecoder.swift
+++ b/Sources/SparkConnect/ArrowDecoder.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class ArrowDecoder: Decoder {
   var rbIndex: UInt = 0
   var singleRBCol: Int = 0
diff --git a/Sources/SparkConnect/ArrowEncoder.swift 
b/Sources/SparkConnect/ArrowEncoder.swift
index 7b21f78..6ad1b19 100644
--- a/Sources/SparkConnect/ArrowEncoder.swift
+++ b/Sources/SparkConnect/ArrowEncoder.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class ArrowEncoder: Encoder {
   public private(set) var builders = [String: ArrowArrayHolderBuilder]()
   private var byIndex = [String]()
diff --git a/Sources/SparkConnect/ArrowReader.swift 
b/Sources/SparkConnect/ArrowReader.swift
index 665f8c9..de7af4f 100644
--- a/Sources/SparkConnect/ArrowReader.swift
+++ b/Sources/SparkConnect/ArrowReader.swift
@@ -21,6 +21,7 @@ import Foundation
 let FILEMARKER = "ARROW1"
 let CONTINUATIONMARKER = -1
 
+/// @nodoc
 public class ArrowReader {  // swiftlint:disable:this type_body_length
   private class RecordBatchData {
     let schema: org_apache_arrow_flatbuf_Schema
diff --git a/Sources/SparkConnect/ArrowReaderHelper.swift 
b/Sources/SparkConnect/ArrowReaderHelper.swift
index 110ef15..c0bd55b 100644
--- a/Sources/SparkConnect/ArrowReaderHelper.swift
+++ b/Sources/SparkConnect/ArrowReaderHelper.swift
@@ -18,6 +18,7 @@
 import FlatBuffers
 import Foundation
 
+/// @nodoc
 private func makeBinaryHolder(
   _ buffers: [ArrowBuffer],
   nullCount: UInt
diff --git a/Sources/SparkConnect/ArrowSchema.swift 
b/Sources/SparkConnect/ArrowSchema.swift
index e62ebf2..f5c2d1a 100644
--- a/Sources/SparkConnect/ArrowSchema.swift
+++ b/Sources/SparkConnect/ArrowSchema.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class ArrowField {
   public let type: ArrowType
   public let name: String
@@ -29,6 +30,7 @@ public class ArrowField {
   }
 }
 
+/// @nodoc
 public class ArrowSchema {
   public let fields: [ArrowField]
   public let fieldLookup: [String: Int]
diff --git a/Sources/SparkConnect/ArrowTable.swift 
b/Sources/SparkConnect/ArrowTable.swift
index bcd3170..716ff26 100644
--- a/Sources/SparkConnect/ArrowTable.swift
+++ b/Sources/SparkConnect/ArrowTable.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class ArrowColumn {
   public let field: ArrowField
   fileprivate let dataHolder: ChunkedArrayHolder
@@ -35,6 +36,7 @@ public class ArrowColumn {
   }
 }
 
+/// @nodoc
 public class ArrowTable {
   public let schema: ArrowSchema
   public var columnCount: UInt { return UInt(self.columns.count) }
diff --git a/Sources/SparkConnect/ArrowType.swift 
b/Sources/SparkConnect/ArrowType.swift
index 8595b19..cdf9772 100644
--- a/Sources/SparkConnect/ArrowType.swift
+++ b/Sources/SparkConnect/ArrowType.swift
@@ -17,14 +17,19 @@
 
 import Foundation
 
+/// @nodoc
 public typealias Time32 = Int32
+/// @nodoc
 public typealias Time64 = Int64
+/// @nodoc
 public typealias Date32 = Int32
+/// @nodoc
 public typealias Date64 = Int64
 
 func FlatBuffersVersion_23_1_4() {  // swiftlint:disable:this identifier_name
 }
 
+/// @nodoc
 public enum ArrowError: Error {
   case none
   case unknownType(String)
@@ -84,6 +89,7 @@ public enum ArrowTime64Unit {
   case nanoseconds
 }
 
+/// @nodoc
 public class ArrowTypeTime32: ArrowType {
   let unit: ArrowTime32Unit
   public init(_ unit: ArrowTime32Unit) {
@@ -103,6 +109,7 @@ public class ArrowTypeTime32: ArrowType {
   }
 }
 
+/// @nodoc
 public class ArrowTypeTime64: ArrowType {
   let unit: ArrowTime64Unit
   public init(_ unit: ArrowTime64Unit) {
@@ -122,6 +129,7 @@ public class ArrowTypeTime64: ArrowType {
   }
 }
 
+/// @nodoc
 public class ArrowNestedType: ArrowType {
   let fields: [ArrowField]
   public init(_ info: ArrowType.Info, fields: [ArrowField]) {
@@ -130,6 +138,7 @@ public class ArrowNestedType: ArrowType {
   }
 }
 
+/// @nodoc
 public class ArrowType {
   public private(set) var info: ArrowType.Info
   public static let ArrowInt8 = Info.primitiveInfo(ArrowTypeId.int8)
diff --git a/Sources/SparkConnect/ArrowWriter.swift 
b/Sources/SparkConnect/ArrowWriter.swift
index 84bc955..4b644cf 100644
--- a/Sources/SparkConnect/ArrowWriter.swift
+++ b/Sources/SparkConnect/ArrowWriter.swift
@@ -18,11 +18,13 @@
 import FlatBuffers
 import Foundation
 
+/// @nodoc
 public protocol DataWriter {
   var count: Int { get }
   func append(_ data: Data)
 }
 
+/// @nodoc
 public class ArrowWriter {  // swiftlint:disable:this type_body_length
   public class InMemDataWriter: DataWriter {
     public private(set) var data: Data
diff --git a/Sources/SparkConnect/ChunkedArray.swift 
b/Sources/SparkConnect/ChunkedArray.swift
index 859ff2e..a6b4e03 100644
--- a/Sources/SparkConnect/ChunkedArray.swift
+++ b/Sources/SparkConnect/ChunkedArray.swift
@@ -17,16 +17,19 @@
 
 import Foundation
 
+/// @nodoc
 public protocol AnyArray {
   var arrowData: ArrowData { get }
   func asAny(_ index: UInt) -> Any?
   var length: UInt { get }
 }
 
+/// @nodoc
 public protocol AsString {
   func asString(_ index: UInt) -> String
 }
 
+/// @nodoc
 public class ChunkedArrayHolder {
   public let type: ArrowType
   public let length: UInt
@@ -93,6 +96,7 @@ public class ChunkedArrayHolder {
   }
 }
 
+/// @nodoc
 public class ChunkedArray<T>: AsString {
   public let arrays: [ArrowArray<T>]
   public let type: ArrowType
diff --git a/Sources/SparkConnect/File_generated.swift 
b/Sources/SparkConnect/File_generated.swift
index 5c86991..e2d6558 100644
--- a/Sources/SparkConnect/File_generated.swift
+++ b/Sources/SparkConnect/File_generated.swift
@@ -21,6 +21,7 @@
 
 import FlatBuffers
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Block: NativeStruct, Verifiable, 
FlatbuffersInitializable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -67,6 +68,7 @@ public struct org_apache_arrow_flatbuf_Block: NativeStruct, 
Verifiable, Flatbuff
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Block_Mutable: FlatBufferObject {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -83,6 +85,7 @@ public struct org_apache_arrow_flatbuf_Block_Mutable: 
FlatBufferObject {
 ///  ----------------------------------------------------------------------
 ///  Arrow File metadata
 ///
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Footer: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
diff --git a/Sources/SparkConnect/FlightData.swift 
b/Sources/SparkConnect/FlightData.swift
index ee13311..441e47e 100644
--- a/Sources/SparkConnect/FlightData.swift
+++ b/Sources/SparkConnect/FlightData.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class FlightData {
   let flightData: Arrow_Flight_Protocol_FlightData
   public var flightDescriptor: FlightDescriptor? {
diff --git a/Sources/SparkConnect/FlightDescriptor.swift 
b/Sources/SparkConnect/FlightDescriptor.swift
index 41639c9..2c1825b 100644
--- a/Sources/SparkConnect/FlightDescriptor.swift
+++ b/Sources/SparkConnect/FlightDescriptor.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class FlightDescriptor {
   public enum DescriptorType {
     case unknown
diff --git a/Sources/SparkConnect/MemoryAllocator.swift 
b/Sources/SparkConnect/MemoryAllocator.swift
index 63f91f5..4de072d 100644
--- a/Sources/SparkConnect/MemoryAllocator.swift
+++ b/Sources/SparkConnect/MemoryAllocator.swift
@@ -17,6 +17,7 @@
 
 import Foundation
 
+/// @nodoc
 public class MemoryAllocator {
   let alignment: Int
   init(_ alignment: Int) {
diff --git a/Sources/SparkConnect/Message_generated.swift 
b/Sources/SparkConnect/Message_generated.swift
index 30c8c5f..975702c 100644
--- a/Sources/SparkConnect/Message_generated.swift
+++ b/Sources/SparkConnect/Message_generated.swift
@@ -21,6 +21,7 @@
 
 import FlatBuffers
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_CompressionType: Int8, Enum, Verifiable {
   public typealias T = Int8
   public static var byteSize: Int { return MemoryLayout<Int8>.size }
@@ -60,6 +61,7 @@ public enum org_apache_arrow_flatbuf_BodyCompressionMethod: 
Int8, Enum, Verifiab
 ///  Arrow implementations do not need to implement all of the message types,
 ///  which may include experimental metadata types. For maximum compatibility,
 ///  it is best to send data using RecordBatch
+/// @nodoc
 public enum org_apache_arrow_flatbuf_MessageHeader: UInt8, UnionEnum {
   public typealias T = UInt8
 
@@ -89,6 +91,7 @@ public enum org_apache_arrow_flatbuf_MessageHeader: UInt8, 
UnionEnum {
 ///  For example, a List<Int16> with values `[[1, 2, 3], null, [4], [5, 6], 
null]`
 ///  would have {length: 5, null_count: 2} for its List node, and {length: 6,
 ///  null_count: 0} for its Int16 node, as separate FieldNode structs
+/// @nodoc
 public struct org_apache_arrow_flatbuf_FieldNode: NativeStruct, Verifiable, 
FlatbuffersInitializable
 {
 
@@ -141,6 +144,7 @@ public struct org_apache_arrow_flatbuf_FieldNode: 
NativeStruct, Verifiable, Flat
 ///  For example, a List<Int16> with values `[[1, 2, 3], null, [4], [5, 6], 
null]`
 ///  would have {length: 5, null_count: 2} for its List node, and {length: 6,
 ///  null_count: 0} for its Int16 node, as separate FieldNode structs
+/// @nodoc
 public struct org_apache_arrow_flatbuf_FieldNode_Mutable: FlatBufferObject {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -156,6 +160,7 @@ public struct org_apache_arrow_flatbuf_FieldNode_Mutable: 
FlatBufferObject {
 ///  Optional compression for the memory buffers constituting IPC message
 ///  bodies. Intended for use with RecordBatch but could be used for other
 ///  message types
+/// @nodoc
 public struct org_apache_arrow_flatbuf_BodyCompression: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -238,6 +243,7 @@ public struct org_apache_arrow_flatbuf_BodyCompression: 
FlatBufferObject, Verifi
 ///  A data header describing the shared memory layout of a "record" or "row"
 ///  batch. Some systems call this a "row batch" internally and others a 
"record
 ///  batch".
+/// @nodoc
 public struct org_apache_arrow_flatbuf_RecordBatch: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -398,6 +404,7 @@ public struct org_apache_arrow_flatbuf_RecordBatch: 
FlatBufferObject, Verifiable
 ///  There is one vector / column per dictionary, but that vector / column
 ///  may be spread across multiple dictionary batches by using the isDelta
 ///  flag
+/// @nodoc
 public struct org_apache_arrow_flatbuf_DictionaryBatch: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -485,6 +492,7 @@ public struct org_apache_arrow_flatbuf_DictionaryBatch: 
FlatBufferObject, Verifi
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Message: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
diff --git a/Sources/SparkConnect/Schema_generated.swift 
b/Sources/SparkConnect/Schema_generated.swift
index a3e7d8d..e8c69ef 100644
--- a/Sources/SparkConnect/Schema_generated.swift
+++ b/Sources/SparkConnect/Schema_generated.swift
@@ -21,6 +21,7 @@
 
 import FlatBuffers
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_MetadataVersion: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -63,6 +64,7 @@ public enum org_apache_arrow_flatbuf_MetadataVersion: Int16, 
Enum, Verifiable {
 ///  Enums added to this list should be assigned power-of-two values
 ///  to facilitate exchanging and comparing bitmaps for supported
 ///  features.
+/// @nodoc
 public enum org_apache_arrow_flatbuf_Feature: Int64, Enum, Verifiable {
   public typealias T = Int64
   public static var byteSize: Int { return MemoryLayout<Int64>.size }
@@ -81,6 +83,7 @@ public enum org_apache_arrow_flatbuf_Feature: Int64, Enum, 
Verifiable {
   public static var min: org_apache_arrow_flatbuf_Feature { return .unused }
 }
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_UnionMode: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -92,6 +95,7 @@ public enum org_apache_arrow_flatbuf_UnionMode: Int16, Enum, 
Verifiable {
   public static var min: org_apache_arrow_flatbuf_UnionMode { return .sparse }
 }
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_Precision: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -104,6 +108,7 @@ public enum org_apache_arrow_flatbuf_Precision: Int16, 
Enum, Verifiable {
   public static var min: org_apache_arrow_flatbuf_Precision { return .half }
 }
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_DateUnit: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -115,6 +120,7 @@ public enum org_apache_arrow_flatbuf_DateUnit: Int16, Enum, 
Verifiable {
   public static var min: org_apache_arrow_flatbuf_DateUnit { return .day }
 }
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_TimeUnit: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -128,6 +134,7 @@ public enum org_apache_arrow_flatbuf_TimeUnit: Int16, Enum, 
Verifiable {
   public static var min: org_apache_arrow_flatbuf_TimeUnit { return .second }
 }
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_IntervalUnit: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -143,6 +150,7 @@ public enum org_apache_arrow_flatbuf_IntervalUnit: Int16, 
Enum, Verifiable {
 ///  ----------------------------------------------------------------------
 ///  Top-level Type value, enabling extensible type-specific metadata. We can
 ///  add new logical types to Type without breaking backwards compatibility
+/// @nodoc
 public enum org_apache_arrow_flatbuf_Type_: UInt8, UnionEnum {
   public typealias T = UInt8
 
@@ -185,6 +193,7 @@ public enum org_apache_arrow_flatbuf_Type_: UInt8, 
UnionEnum {
 ///  Maintained for forwards compatibility, in the future
 ///  Dictionaries might be explicit maps between integers and values
 ///  allowing for non-contiguous index values
+/// @nodoc
 public enum org_apache_arrow_flatbuf_DictionaryKind: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -197,6 +206,7 @@ public enum org_apache_arrow_flatbuf_DictionaryKind: Int16, 
Enum, Verifiable {
 
 ///  ----------------------------------------------------------------------
 ///  Endianness of the platform producing the data
+/// @nodoc
 public enum org_apache_arrow_flatbuf_Endianness: Int16, Enum, Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -210,6 +220,7 @@ public enum org_apache_arrow_flatbuf_Endianness: Int16, 
Enum, Verifiable {
 
 ///  ----------------------------------------------------------------------
 ///  A Buffer represents a single contiguous memory segment
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Buffer: NativeStruct, Verifiable, 
FlatbuffersInitializable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -258,6 +269,7 @@ public struct org_apache_arrow_flatbuf_Buffer: 
NativeStruct, Verifiable, Flatbuf
 
 ///  ----------------------------------------------------------------------
 ///  A Buffer represents a single contiguous memory segment
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Buffer_Mutable: FlatBufferObject {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -271,6 +283,7 @@ public struct org_apache_arrow_flatbuf_Buffer_Mutable: 
FlatBufferObject {
 }
 
 ///  These are stored in the flatbuffer in the Type union below
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Null: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -305,6 +318,7 @@ public struct org_apache_arrow_flatbuf_Null: 
FlatBufferObject, Verifiable {
 ///  A Struct_ in the flatbuffer metadata is the same as an Arrow Struct
 ///  (according to the physical memory layout). We used Struct_ here as
 ///  Struct is a reserved word in Flatbuffers
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Struct_: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -336,6 +350,7 @@ public struct org_apache_arrow_flatbuf_Struct_: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_List: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -369,6 +384,7 @@ public struct org_apache_arrow_flatbuf_List: 
FlatBufferObject, Verifiable {
 
 ///  Same as List, but with 64-bit offsets, allowing to represent
 ///  extremely large data values.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_LargeList: FlatBufferObject, Verifiable 
{
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -400,6 +416,7 @@ public struct org_apache_arrow_flatbuf_LargeList: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_FixedSizeList: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -482,6 +499,7 @@ public struct org_apache_arrow_flatbuf_FixedSizeList: 
FlatBufferObject, Verifiab
 ///  The metadata is structured so that Arrow systems without special handling
 ///  for Map can make Map an alias for List. The "layout" attribute for the Map
 ///  field must have the same contents as a List.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Map: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -541,6 +559,7 @@ public struct org_apache_arrow_flatbuf_Map: 
FlatBufferObject, Verifiable {
 ///  By default ids in the type vector refer to the offsets in the children
 ///  optionally typeIds provides an indirection between the child offset and 
the type id
 ///  for each child `typeIds[offset]` is the id used in the type vector
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Union: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -622,6 +641,7 @@ public struct org_apache_arrow_flatbuf_Union: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Int: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -688,6 +708,7 @@ public struct org_apache_arrow_flatbuf_Int: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_FloatingPoint: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -749,6 +770,7 @@ public struct org_apache_arrow_flatbuf_FloatingPoint: 
FlatBufferObject, Verifiab
 }
 
 ///  Unicode with UTF-8 encoding
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Utf8: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -781,6 +803,7 @@ public struct org_apache_arrow_flatbuf_Utf8: 
FlatBufferObject, Verifiable {
 }
 
 ///  Opaque binary data
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Binary: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -814,6 +837,7 @@ public struct org_apache_arrow_flatbuf_Binary: 
FlatBufferObject, Verifiable {
 
 ///  Same as Utf8, but with 64-bit offsets, allowing to represent
 ///  extremely large data values.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_LargeUtf8: FlatBufferObject, Verifiable 
{
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -847,6 +871,7 @@ public struct org_apache_arrow_flatbuf_LargeUtf8: 
FlatBufferObject, Verifiable {
 
 ///  Same as Binary, but with 64-bit offsets, allowing to represent
 ///  extremely large data values.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_LargeBinary: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -878,6 +903,7 @@ public struct org_apache_arrow_flatbuf_LargeBinary: 
FlatBufferObject, Verifiable
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_FixedSizeBinary: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -935,6 +961,7 @@ public struct org_apache_arrow_flatbuf_FixedSizeBinary: 
FlatBufferObject, Verifi
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Bool: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -971,6 +998,7 @@ public struct org_apache_arrow_flatbuf_Bool: 
FlatBufferObject, Verifiable {
 ///  which encodes the indices at which the run with the value in
 ///  each corresponding index in the values child array ends.
 ///  Like list/struct types, the value array can be of any type.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_RunEndEncoded: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1008,6 +1036,7 @@ public struct org_apache_arrow_flatbuf_RunEndEncoded: 
FlatBufferObject, Verifiab
 ///  complement. Currently only 128-bit (16-byte) and 256-bit (32-byte) 
integers
 ///  are used. The representation uses the endianness indicated
 ///  in the Schema.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Decimal: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1095,6 +1124,7 @@ public struct org_apache_arrow_flatbuf_Decimal: 
FlatBufferObject, Verifiable {
 ///  * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no
 ///    leap seconds), where the values are evenly divisible by 86400000
 ///  * Days (32 bits) since the UNIX epoch
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Date: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1167,6 +1197,7 @@ public struct org_apache_arrow_flatbuf_Date: 
FlatBufferObject, Verifiable {
 ///  This definition doesn't allow for leap seconds. Time values from
 ///  measurements with leap seconds will need to be corrected when ingesting
 ///  into Arrow (for example by replacing the value 86400 with 86399).
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Time: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1342,6 +1373,7 @@ public struct org_apache_arrow_flatbuf_Time: 
FlatBufferObject, Verifiable {
 ///  values should be computed "as if" the timezone of the date-time values
 ///  was UTC; for example, the naive date-time "January 1st 1970, 00h00" would
 ///  be encoded as timestamp value 0.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Timestamp: FlatBufferObject, Verifiable 
{
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1424,6 +1456,7 @@ public struct org_apache_arrow_flatbuf_Timestamp: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Interval: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1482,6 +1515,7 @@ public struct org_apache_arrow_flatbuf_Interval: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Duration: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1543,6 +1577,7 @@ public struct org_apache_arrow_flatbuf_Duration: 
FlatBufferObject, Verifiable {
 ///  ----------------------------------------------------------------------
 ///  user defined key value pairs to add custom metadata to arrow
 ///  key namespacing is the responsibility of the user
+/// @nodoc
 public struct org_apache_arrow_flatbuf_KeyValue: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1612,6 +1647,7 @@ public struct org_apache_arrow_flatbuf_KeyValue: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_DictionaryEncoding: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1728,6 +1764,7 @@ public struct 
org_apache_arrow_flatbuf_DictionaryEncoding: FlatBufferObject, Ver
 ///  ----------------------------------------------------------------------
 ///  A field represents a named column in a record / row batch or child of a
 ///  nested type.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Field: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -1972,6 +2009,7 @@ public struct org_apache_arrow_flatbuf_Field: 
FlatBufferObject, Verifiable {
 
 ///  ----------------------------------------------------------------------
 ///  A Schema describes the columns in a row batch
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Schema: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
diff --git a/Sources/SparkConnect/SparseTensor_generated.swift 
b/Sources/SparkConnect/SparseTensor_generated.swift
index ded27f3..6a181e0 100644
--- a/Sources/SparkConnect/SparseTensor_generated.swift
+++ b/Sources/SparkConnect/SparseTensor_generated.swift
@@ -21,6 +21,7 @@
 
 import FlatBuffers
 
+/// @nodoc
 public enum org_apache_arrow_flatbuf_SparseMatrixCompressedAxis: Int16, Enum, 
Verifiable {
   public typealias T = Int16
   public static var byteSize: Int { return MemoryLayout<Int16>.size }
@@ -82,6 +83,7 @@ public enum org_apache_arrow_flatbuf_SparseTensorIndex: 
UInt8, UnionEnum {
 ///  When isCanonical is true, the indices is sorted in lexicographical order
 ///  (row-major order), and it does not have duplicated entries.  Otherwise,
 ///  the indices may not be sorted, or may have duplicated entries.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_SparseTensorIndexCOO: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -212,6 +214,7 @@ public struct 
org_apache_arrow_flatbuf_SparseTensorIndexCOO: FlatBufferObject, V
 }
 
 ///  Compressed Sparse format, that is matrix-specific.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_SparseMatrixIndexCSX: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -379,6 +382,7 @@ public struct 
org_apache_arrow_flatbuf_SparseMatrixIndexCSX: FlatBufferObject, V
 }
 
 ///  Compressed Sparse Fiber (CSF) sparse tensor index.
+/// @nodoc
 public struct org_apache_arrow_flatbuf_SparseTensorIndexCSF: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -613,6 +617,7 @@ public struct 
org_apache_arrow_flatbuf_SparseTensorIndexCSF: FlatBufferObject, V
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_SparseTensor: FlatBufferObject, 
Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
diff --git a/Sources/SparkConnect/Tensor_generated.swift 
b/Sources/SparkConnect/Tensor_generated.swift
index b5a5f84..fc75aae 100644
--- a/Sources/SparkConnect/Tensor_generated.swift
+++ b/Sources/SparkConnect/Tensor_generated.swift
@@ -24,6 +24,7 @@ import FlatBuffers
 ///  ----------------------------------------------------------------------
 ///  Data structures for dense tensors
 ///  Shape data for a single axis in a tensor
+/// @nodoc
 public struct org_apache_arrow_flatbuf_TensorDim: FlatBufferObject, Verifiable 
{
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }
@@ -92,6 +93,7 @@ public struct org_apache_arrow_flatbuf_TensorDim: 
FlatBufferObject, Verifiable {
   }
 }
 
+/// @nodoc
 public struct org_apache_arrow_flatbuf_Tensor: FlatBufferObject, Verifiable {
 
   static func validateVersion() { FlatBuffersVersion_23_1_4() }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org


Reply via email to