diff --git a/Sources/SparkConnect/base.pb.swift b/Sources/SparkConnect/base.pb.swift index c7c17df..dfdb27b 100644 --- a/Sources/SparkConnect/base.pb.swift +++ b/Sources/SparkConnect/base.pb.swift @@ -1296,6 +1296,24 @@ struct Spark_Connect_ExecutePlanResponse: Sendable { set {responseType = .mlCommandResult(newValue)} } + /// Response containing pipeline event that is streamed back to the client during a pipeline run + var pipelineEventResult: Spark_Connect_PipelineEventResult { + get { + if case .pipelineEventResult(let v)? = responseType {return v} + return Spark_Connect_PipelineEventResult() + } + set {responseType = .pipelineEventResult(newValue)} + } + + /// Pipeline command response + var pipelineCommandResult: Spark_Connect_PipelineCommandResult { + get { + if case .pipelineCommandResult(let v)? = responseType {return v} + return Spark_Connect_PipelineCommandResult() + } + set {responseType = .pipelineCommandResult(newValue)} + } + /// Support arbitrary result objects. var `extension`: SwiftProtobuf.Google_Protobuf_Any { get { @@ -1356,6 +1374,10 @@ struct Spark_Connect_ExecutePlanResponse: Sendable { case checkpointCommandResult(Spark_Connect_CheckpointCommandResult) /// ML command response case mlCommandResult(Spark_Connect_MlCommandResult) + /// Response containing pipeline event that is streamed back to the client during a pipeline run + case pipelineEventResult(Spark_Connect_PipelineEventResult) + /// Pipeline command response + case pipelineCommandResult(Spark_Connect_PipelineCommandResult) /// Support arbitrary result objects. case `extension`(SwiftProtobuf.Google_Protobuf_Any) @@ -4952,6 +4974,8 @@ extension Spark_Connect_ExecutePlanResponse: SwiftProtobuf.Message, SwiftProtobu 18: .standard(proto: "execution_progress"), 19: .standard(proto: "checkpoint_command_result"), 20: .standard(proto: "ml_command_result"), + 21: .standard(proto: "pipeline_event_result"), + 22: .standard(proto: "pipeline_command_result"), 999: .same(proto: "extension"), 4: .same(proto: "metrics"), 6: .standard(proto: "observed_metrics"), @@ -5127,6 +5151,32 @@ extension Spark_Connect_ExecutePlanResponse: SwiftProtobuf.Message, SwiftProtobu self.responseType = .mlCommandResult(v) } }() + case 21: try { + var v: Spark_Connect_PipelineEventResult? + var hadOneofValue = false + if let current = self.responseType { + hadOneofValue = true + if case .pipelineEventResult(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.responseType = .pipelineEventResult(v) + } + }() + case 22: try { + var v: Spark_Connect_PipelineCommandResult? + var hadOneofValue = false + if let current = self.responseType { + hadOneofValue = true + if case .pipelineCommandResult(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.responseType = .pipelineCommandResult(v) + } + }() case 999: try { var v: SwiftProtobuf.Google_Protobuf_Any? var hadOneofValue = false @@ -5220,6 +5270,14 @@ extension Spark_Connect_ExecutePlanResponse: SwiftProtobuf.Message, SwiftProtobu guard case .mlCommandResult(let v)? = self.responseType else { preconditionFailure() } try visitor.visitSingularMessageField(value: v, fieldNumber: 20) }() + case .pipelineEventResult?: try { + guard case .pipelineEventResult(let v)? = self.responseType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 21) + }() + case .pipelineCommandResult?: try { + guard case .pipelineCommandResult(let v)? = self.responseType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 22) + }() case .extension?: try { guard case .extension(let v)? = self.responseType else { preconditionFailure() } try visitor.visitSingularMessageField(value: v, fieldNumber: 999) diff --git a/Sources/SparkConnect/commands.pb.swift b/Sources/SparkConnect/commands.pb.swift index 83c90bc..4e8cf41 100644 --- a/Sources/SparkConnect/commands.pb.swift +++ b/Sources/SparkConnect/commands.pb.swift @@ -235,6 +235,14 @@ struct Spark_Connect_Command: Sendable { set {commandType = .executeExternalCommand(newValue)} } + var pipelineCommand: Spark_Connect_PipelineCommand { + get { + if case .pipelineCommand(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand() + } + set {commandType = .pipelineCommand(newValue)} + } + /// This field is used to mark extensions to the protocol. When plugins generate arbitrary /// Commands they can add them here. During the planning the correct resolution is done. var `extension`: SwiftProtobuf.Google_Protobuf_Any { @@ -266,6 +274,7 @@ struct Spark_Connect_Command: Sendable { case mergeIntoTableCommand(Spark_Connect_MergeIntoTableCommand) case mlCommand(Spark_Connect_MlCommand) case executeExternalCommand(Spark_Connect_ExecuteExternalCommand) + case pipelineCommand(Spark_Connect_PipelineCommand) /// This field is used to mark extensions to the protocol. When plugins generate arbitrary /// Commands they can add them here. During the planning the correct resolution is done. case `extension`(SwiftProtobuf.Google_Protobuf_Any) @@ -1873,6 +1882,7 @@ extension Spark_Connect_Command: SwiftProtobuf.Message, SwiftProtobuf._MessageIm 16: .standard(proto: "merge_into_table_command"), 17: .standard(proto: "ml_command"), 18: .standard(proto: "execute_external_command"), + 19: .standard(proto: "pipeline_command"), 999: .same(proto: "extension"), ] @@ -2116,6 +2126,19 @@ extension Spark_Connect_Command: SwiftProtobuf.Message, SwiftProtobuf._MessageIm self.commandType = .executeExternalCommand(v) } }() + case 19: try { + var v: Spark_Connect_PipelineCommand? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .pipelineCommand(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .pipelineCommand(v) + } + }() case 999: try { var v: SwiftProtobuf.Google_Protobuf_Any? var hadOneofValue = false @@ -2212,6 +2235,10 @@ extension Spark_Connect_Command: SwiftProtobuf.Message, SwiftProtobuf._MessageIm guard case .executeExternalCommand(let v)? = self.commandType else { preconditionFailure() } try visitor.visitSingularMessageField(value: v, fieldNumber: 18) }() + case .pipelineCommand?: try { + guard case .pipelineCommand(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 19) + }() case .extension?: try { guard case .extension(let v)? = self.commandType else { preconditionFailure() } try visitor.visitSingularMessageField(value: v, fieldNumber: 999) diff --git a/Sources/SparkConnect/expressions.pb.swift b/Sources/SparkConnect/expressions.pb.swift index 9dd10fe..73368f7 100644 --- a/Sources/SparkConnect/expressions.pb.swift +++ b/Sources/SparkConnect/expressions.pb.swift @@ -1774,6 +1774,9 @@ struct Spark_Connect_SubqueryExpression: Sendable { /// Clears the value of `tableArgOptions`. Subsequent reads from it will return its default value. mutating func clearTableArgOptions() {self._tableArgOptions = nil} + /// (Optional) IN subquery values. + var inSubqueryValues: [Spark_Connect_Expression] = [] + var unknownFields = SwiftProtobuf.UnknownStorage() enum SubqueryType: SwiftProtobuf.Enum, Swift.CaseIterable { @@ -1782,6 +1785,7 @@ struct Spark_Connect_SubqueryExpression: Sendable { case scalar // = 1 case exists // = 2 case tableArg // = 3 + case `in` // = 4 case UNRECOGNIZED(Int) init() { @@ -1794,6 +1798,7 @@ struct Spark_Connect_SubqueryExpression: Sendable { case 1: self = .scalar case 2: self = .exists case 3: self = .tableArg + case 4: self = .in default: self = .UNRECOGNIZED(rawValue) } } @@ -1804,6 +1809,7 @@ struct Spark_Connect_SubqueryExpression: Sendable { case .scalar: return 1 case .exists: return 2 case .tableArg: return 3 + case .in: return 4 case .UNRECOGNIZED(let i): return i } } @@ -1814,6 +1820,7 @@ struct Spark_Connect_SubqueryExpression: Sendable { .scalar, .exists, .tableArg, + .in, ] } @@ -4774,6 +4781,7 @@ extension Spark_Connect_SubqueryExpression: SwiftProtobuf.Message, SwiftProtobuf 1: .standard(proto: "plan_id"), 2: .standard(proto: "subquery_type"), 3: .standard(proto: "table_arg_options"), + 4: .standard(proto: "in_subquery_values"), ] mutating func decodeMessage(decoder: inout D) throws { @@ -4785,6 +4793,7 @@ extension Spark_Connect_SubqueryExpression: SwiftProtobuf.Message, SwiftProtobuf case 1: try { try decoder.decodeSingularInt64Field(value: &self.planID) }() case 2: try { try decoder.decodeSingularEnumField(value: &self.subqueryType) }() case 3: try { try decoder.decodeSingularMessageField(value: &self._tableArgOptions) }() + case 4: try { try decoder.decodeRepeatedMessageField(value: &self.inSubqueryValues) }() default: break } } @@ -4804,6 +4813,9 @@ extension Spark_Connect_SubqueryExpression: SwiftProtobuf.Message, SwiftProtobuf try { if let v = self._tableArgOptions { try visitor.visitSingularMessageField(value: v, fieldNumber: 3) } }() + if !self.inSubqueryValues.isEmpty { + try visitor.visitRepeatedMessageField(value: self.inSubqueryValues, fieldNumber: 4) + } try unknownFields.traverse(visitor: &visitor) } @@ -4811,6 +4823,7 @@ extension Spark_Connect_SubqueryExpression: SwiftProtobuf.Message, SwiftProtobuf if lhs.planID != rhs.planID {return false} if lhs.subqueryType != rhs.subqueryType {return false} if lhs._tableArgOptions != rhs._tableArgOptions {return false} + if lhs.inSubqueryValues != rhs.inSubqueryValues {return false} if lhs.unknownFields != rhs.unknownFields {return false} return true } @@ -4822,6 +4835,7 @@ extension Spark_Connect_SubqueryExpression.SubqueryType: SwiftProtobuf._ProtoNam 1: .same(proto: "SUBQUERY_TYPE_SCALAR"), 2: .same(proto: "SUBQUERY_TYPE_EXISTS"), 3: .same(proto: "SUBQUERY_TYPE_TABLE_ARG"), + 4: .same(proto: "SUBQUERY_TYPE_IN"), ] } diff --git a/Sources/SparkConnect/ml.pb.swift b/Sources/SparkConnect/ml.pb.swift index 6049cab..cd743c6 100644 --- a/Sources/SparkConnect/ml.pb.swift +++ b/Sources/SparkConnect/ml.pb.swift @@ -92,6 +92,30 @@ struct Spark_Connect_MlCommand: Sendable { set {command = .evaluate(newValue)} } + var cleanCache: Spark_Connect_MlCommand.CleanCache { + get { + if case .cleanCache(let v)? = command {return v} + return Spark_Connect_MlCommand.CleanCache() + } + set {command = .cleanCache(newValue)} + } + + var getCacheInfo: Spark_Connect_MlCommand.GetCacheInfo { + get { + if case .getCacheInfo(let v)? = command {return v} + return Spark_Connect_MlCommand.GetCacheInfo() + } + set {command = .getCacheInfo(newValue)} + } + + var createSummary: Spark_Connect_MlCommand.CreateSummary { + get { + if case .createSummary(let v)? = command {return v} + return Spark_Connect_MlCommand.CreateSummary() + } + set {command = .createSummary(newValue)} + } + var unknownFields = SwiftProtobuf.UnknownStorage() enum OneOf_Command: Equatable, Sendable { @@ -101,6 +125,9 @@ struct Spark_Connect_MlCommand: Sendable { case write(Spark_Connect_MlCommand.Write) case read(Spark_Connect_MlCommand.Read) case evaluate(Spark_Connect_MlCommand.Evaluate) + case cleanCache(Spark_Connect_MlCommand.CleanCache) + case getCacheInfo(Spark_Connect_MlCommand.GetCacheInfo) + case createSummary(Spark_Connect_MlCommand.CreateSummary) } @@ -158,6 +185,41 @@ struct Spark_Connect_MlCommand: Sendable { var objRefs: [Spark_Connect_ObjectRef] = [] + /// if set `evict_only` to true, only evict the cached model from memory, + /// but keep the offloaded model in Spark driver local disk. + var evictOnly: Bool { + get {return _evictOnly ?? false} + set {_evictOnly = newValue} + } + /// Returns true if `evictOnly` has been explicitly set. + var hasEvictOnly: Bool {return self._evictOnly != nil} + /// Clears the value of `evictOnly`. Subsequent reads from it will return its default value. + mutating func clearEvictOnly() {self._evictOnly = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _evictOnly: Bool? = nil + } + + /// Force to clean up all the ML cached objects + struct CleanCache: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + } + + /// Get the information of all the ML cached objects + struct GetCacheInfo: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + var unknownFields = SwiftProtobuf.UnknownStorage() init() {} @@ -304,6 +366,39 @@ struct Spark_Connect_MlCommand: Sendable { fileprivate var _dataset: Spark_Connect_Relation? = nil } + /// This is for re-creating the model summary when the model summary is lost + /// (model summary is lost when the model is offloaded and then loaded back) + struct CreateSummary: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + var modelRef: Spark_Connect_ObjectRef { + get {return _modelRef ?? Spark_Connect_ObjectRef()} + set {_modelRef = newValue} + } + /// Returns true if `modelRef` has been explicitly set. + var hasModelRef: Bool {return self._modelRef != nil} + /// Clears the value of `modelRef`. Subsequent reads from it will return its default value. + mutating func clearModelRef() {self._modelRef = nil} + + var dataset: Spark_Connect_Relation { + get {return _dataset ?? Spark_Connect_Relation()} + set {_dataset = newValue} + } + /// Returns true if `dataset` has been explicitly set. + var hasDataset: Bool {return self._dataset != nil} + /// Clears the value of `dataset`. Subsequent reads from it will return its default value. + mutating func clearDataset() {self._dataset = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _modelRef: Spark_Connect_ObjectRef? = nil + fileprivate var _dataset: Spark_Connect_Relation? = nil + } + init() {} } @@ -401,6 +496,16 @@ struct Spark_Connect_MlCommandResult: Sendable { /// Clears the value of `params`. Subsequent reads from it will return its default value. mutating func clearParams() {self._params = nil} + /// (Optional) warning message generated during the ML command execution + var warningMessage: String { + get {return _warningMessage ?? String()} + set {_warningMessage = newValue} + } + /// Returns true if `warningMessage` has been explicitly set. + var hasWarningMessage: Bool {return self._warningMessage != nil} + /// Clears the value of `warningMessage`. Subsequent reads from it will return its default value. + mutating func clearWarningMessage() {self._warningMessage = nil} + var unknownFields = SwiftProtobuf.UnknownStorage() enum OneOf_Type: Equatable, Sendable { @@ -415,6 +520,7 @@ struct Spark_Connect_MlCommandResult: Sendable { fileprivate var _uid: String? = nil fileprivate var _params: Spark_Connect_MlParams? = nil + fileprivate var _warningMessage: String? = nil } init() {} @@ -433,6 +539,9 @@ extension Spark_Connect_MlCommand: SwiftProtobuf.Message, SwiftProtobuf._Message 4: .same(proto: "write"), 5: .same(proto: "read"), 6: .same(proto: "evaluate"), + 7: .standard(proto: "clean_cache"), + 8: .standard(proto: "get_cache_info"), + 9: .standard(proto: "create_summary"), ] mutating func decodeMessage(decoder: inout D) throws { @@ -519,6 +628,45 @@ extension Spark_Connect_MlCommand: SwiftProtobuf.Message, SwiftProtobuf._Message self.command = .evaluate(v) } }() + case 7: try { + var v: Spark_Connect_MlCommand.CleanCache? + var hadOneofValue = false + if let current = self.command { + hadOneofValue = true + if case .cleanCache(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.command = .cleanCache(v) + } + }() + case 8: try { + var v: Spark_Connect_MlCommand.GetCacheInfo? + var hadOneofValue = false + if let current = self.command { + hadOneofValue = true + if case .getCacheInfo(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.command = .getCacheInfo(v) + } + }() + case 9: try { + var v: Spark_Connect_MlCommand.CreateSummary? + var hadOneofValue = false + if let current = self.command { + hadOneofValue = true + if case .createSummary(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.command = .createSummary(v) + } + }() default: break } } @@ -554,6 +702,18 @@ extension Spark_Connect_MlCommand: SwiftProtobuf.Message, SwiftProtobuf._Message guard case .evaluate(let v)? = self.command else { preconditionFailure() } try visitor.visitSingularMessageField(value: v, fieldNumber: 6) }() + case .cleanCache?: try { + guard case .cleanCache(let v)? = self.command else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + }() + case .getCacheInfo?: try { + guard case .getCacheInfo(let v)? = self.command else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + }() + case .createSummary?: try { + guard case .createSummary(let v)? = self.command else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + }() case nil: break } try unknownFields.traverse(visitor: &visitor) @@ -618,6 +778,7 @@ extension Spark_Connect_MlCommand.Delete: SwiftProtobuf.Message, SwiftProtobuf._ static let protoMessageName: String = Spark_Connect_MlCommand.protoMessageName + ".Delete" static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 1: .standard(proto: "obj_refs"), + 2: .standard(proto: "evict_only"), ] mutating func decodeMessage(decoder: inout D) throws { @@ -627,20 +788,67 @@ extension Spark_Connect_MlCommand.Delete: SwiftProtobuf.Message, SwiftProtobuf._ // enabled. https://github.com/apple/swift-protobuf/issues/1034 switch fieldNumber { case 1: try { try decoder.decodeRepeatedMessageField(value: &self.objRefs) }() + case 2: try { try decoder.decodeSingularBoolField(value: &self._evictOnly) }() default: break } } } func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 if !self.objRefs.isEmpty { try visitor.visitRepeatedMessageField(value: self.objRefs, fieldNumber: 1) } + try { if let v = self._evictOnly { + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + } }() try unknownFields.traverse(visitor: &visitor) } static func ==(lhs: Spark_Connect_MlCommand.Delete, rhs: Spark_Connect_MlCommand.Delete) -> Bool { if lhs.objRefs != rhs.objRefs {return false} + if lhs._evictOnly != rhs._evictOnly {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_MlCommand.CleanCache: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_MlCommand.protoMessageName + ".CleanCache" + static let _protobuf_nameMap = SwiftProtobuf._NameMap() + + mutating func decodeMessage(decoder: inout D) throws { + // Load everything into unknown fields + while try decoder.nextFieldNumber() != nil {} + } + + func traverse(visitor: inout V) throws { + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_MlCommand.CleanCache, rhs: Spark_Connect_MlCommand.CleanCache) -> Bool { + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_MlCommand.GetCacheInfo: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_MlCommand.protoMessageName + ".GetCacheInfo" + static let _protobuf_nameMap = SwiftProtobuf._NameMap() + + mutating func decodeMessage(decoder: inout D) throws { + // Load everything into unknown fields + while try decoder.nextFieldNumber() != nil {} + } + + func traverse(visitor: inout V) throws { + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_MlCommand.GetCacheInfo, rhs: Spark_Connect_MlCommand.GetCacheInfo) -> Bool { if lhs.unknownFields != rhs.unknownFields {return false} return true } @@ -830,6 +1038,48 @@ extension Spark_Connect_MlCommand.Evaluate: SwiftProtobuf.Message, SwiftProtobuf } } +extension Spark_Connect_MlCommand.CreateSummary: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_MlCommand.protoMessageName + ".CreateSummary" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "model_ref"), + 2: .same(proto: "dataset"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularMessageField(value: &self._modelRef) }() + case 2: try { try decoder.decodeSingularMessageField(value: &self._dataset) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._modelRef { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } }() + try { if let v = self._dataset { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_MlCommand.CreateSummary, rhs: Spark_Connect_MlCommand.CreateSummary) -> Bool { + if lhs._modelRef != rhs._modelRef {return false} + if lhs._dataset != rhs._dataset {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + extension Spark_Connect_MlCommandResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { static let protoMessageName: String = _protobuf_package + ".MlCommandResult" static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ @@ -920,6 +1170,7 @@ extension Spark_Connect_MlCommandResult.MlOperatorInfo: SwiftProtobuf.Message, S 2: .same(proto: "name"), 3: .same(proto: "uid"), 4: .same(proto: "params"), + 5: .standard(proto: "warning_message"), ] mutating func decodeMessage(decoder: inout D) throws { @@ -951,6 +1202,7 @@ extension Spark_Connect_MlCommandResult.MlOperatorInfo: SwiftProtobuf.Message, S }() case 3: try { try decoder.decodeSingularStringField(value: &self._uid) }() case 4: try { try decoder.decodeSingularMessageField(value: &self._params) }() + case 5: try { try decoder.decodeSingularStringField(value: &self._warningMessage) }() default: break } } @@ -978,6 +1230,9 @@ extension Spark_Connect_MlCommandResult.MlOperatorInfo: SwiftProtobuf.Message, S try { if let v = self._params { try visitor.visitSingularMessageField(value: v, fieldNumber: 4) } }() + try { if let v = self._warningMessage { + try visitor.visitSingularStringField(value: v, fieldNumber: 5) + } }() try unknownFields.traverse(visitor: &visitor) } @@ -985,6 +1240,7 @@ extension Spark_Connect_MlCommandResult.MlOperatorInfo: SwiftProtobuf.Message, S if lhs.type != rhs.type {return false} if lhs._uid != rhs._uid {return false} if lhs._params != rhs._params {return false} + if lhs._warningMessage != rhs._warningMessage {return false} if lhs.unknownFields != rhs.unknownFields {return false} return true } diff --git a/Sources/SparkConnect/pipelines.pb.swift b/Sources/SparkConnect/pipelines.pb.swift new file mode 100644 index 0000000..076e1c1 --- /dev/null +++ b/Sources/SparkConnect/pipelines.pb.swift @@ -0,0 +1,1234 @@ +// DO NOT EDIT. +// swift-format-ignore-file +// swiftlint:disable all +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: spark/connect/pipelines.proto +// +// For information on using the generated types, please see the documentation: +// https://github.com/apple/swift-protobuf/ + +// +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that you are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +/// The type of dataset. +enum Spark_Connect_DatasetType: SwiftProtobuf.Enum, Swift.CaseIterable { + typealias RawValue = Int + + /// Safe default value. Should not be used. + case unspecified // = 0 + + /// A materialized view dataset which is published to the catalog + case materializedView // = 1 + + /// A table which is published to the catalog + case table // = 2 + + /// A view which is not published to the catalog + case temporaryView // = 3 + case UNRECOGNIZED(Int) + + init() { + self = .unspecified + } + + init?(rawValue: Int) { + switch rawValue { + case 0: self = .unspecified + case 1: self = .materializedView + case 2: self = .table + case 3: self = .temporaryView + default: self = .UNRECOGNIZED(rawValue) + } + } + + var rawValue: Int { + switch self { + case .unspecified: return 0 + case .materializedView: return 1 + case .table: return 2 + case .temporaryView: return 3 + case .UNRECOGNIZED(let i): return i + } + } + + // The compiler won't synthesize support with the UNRECOGNIZED case. + static let allCases: [Spark_Connect_DatasetType] = [ + .unspecified, + .materializedView, + .table, + .temporaryView, + ] + +} + +/// Dispatch object for pipelines commands. See each individual command for documentation. +struct Spark_Connect_PipelineCommand: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + var commandType: Spark_Connect_PipelineCommand.OneOf_CommandType? = nil + + var createDataflowGraph: Spark_Connect_PipelineCommand.CreateDataflowGraph { + get { + if case .createDataflowGraph(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand.CreateDataflowGraph() + } + set {commandType = .createDataflowGraph(newValue)} + } + + var defineDataset: Spark_Connect_PipelineCommand.DefineDataset { + get { + if case .defineDataset(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand.DefineDataset() + } + set {commandType = .defineDataset(newValue)} + } + + var defineFlow: Spark_Connect_PipelineCommand.DefineFlow { + get { + if case .defineFlow(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand.DefineFlow() + } + set {commandType = .defineFlow(newValue)} + } + + var dropDataflowGraph: Spark_Connect_PipelineCommand.DropDataflowGraph { + get { + if case .dropDataflowGraph(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand.DropDataflowGraph() + } + set {commandType = .dropDataflowGraph(newValue)} + } + + var startRun: Spark_Connect_PipelineCommand.StartRun { + get { + if case .startRun(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand.StartRun() + } + set {commandType = .startRun(newValue)} + } + + var defineSqlGraphElements: Spark_Connect_PipelineCommand.DefineSqlGraphElements { + get { + if case .defineSqlGraphElements(let v)? = commandType {return v} + return Spark_Connect_PipelineCommand.DefineSqlGraphElements() + } + set {commandType = .defineSqlGraphElements(newValue)} + } + + var unknownFields = SwiftProtobuf.UnknownStorage() + + enum OneOf_CommandType: Equatable, Sendable { + case createDataflowGraph(Spark_Connect_PipelineCommand.CreateDataflowGraph) + case defineDataset(Spark_Connect_PipelineCommand.DefineDataset) + case defineFlow(Spark_Connect_PipelineCommand.DefineFlow) + case dropDataflowGraph(Spark_Connect_PipelineCommand.DropDataflowGraph) + case startRun(Spark_Connect_PipelineCommand.StartRun) + case defineSqlGraphElements(Spark_Connect_PipelineCommand.DefineSqlGraphElements) + + } + + /// Request to create a new dataflow graph. + struct CreateDataflowGraph: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The default catalog. + var defaultCatalog: String { + get {return _defaultCatalog ?? String()} + set {_defaultCatalog = newValue} + } + /// Returns true if `defaultCatalog` has been explicitly set. + var hasDefaultCatalog: Bool {return self._defaultCatalog != nil} + /// Clears the value of `defaultCatalog`. Subsequent reads from it will return its default value. + mutating func clearDefaultCatalog() {self._defaultCatalog = nil} + + /// The default database. + var defaultDatabase: String { + get {return _defaultDatabase ?? String()} + set {_defaultDatabase = newValue} + } + /// Returns true if `defaultDatabase` has been explicitly set. + var hasDefaultDatabase: Bool {return self._defaultDatabase != nil} + /// Clears the value of `defaultDatabase`. Subsequent reads from it will return its default value. + mutating func clearDefaultDatabase() {self._defaultDatabase = nil} + + /// SQL configurations for all flows in this graph. + var sqlConf: Dictionary = [:] + + var unknownFields = SwiftProtobuf.UnknownStorage() + + struct Response: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The ID of the created graph. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + } + + init() {} + + fileprivate var _defaultCatalog: String? = nil + fileprivate var _defaultDatabase: String? = nil + } + + /// Drops the graph and stops any running attached flows. + struct DropDataflowGraph: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The graph to drop. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + } + + /// Request to define a dataset: a table, a materialized view, or a temporary view. + struct DefineDataset: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The graph to attach this dataset to. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + /// Name of the dataset. Can be partially or fully qualified. + var datasetName: String { + get {return _datasetName ?? String()} + set {_datasetName = newValue} + } + /// Returns true if `datasetName` has been explicitly set. + var hasDatasetName: Bool {return self._datasetName != nil} + /// Clears the value of `datasetName`. Subsequent reads from it will return its default value. + mutating func clearDatasetName() {self._datasetName = nil} + + /// The type of the dataset. + var datasetType: Spark_Connect_DatasetType { + get {return _datasetType ?? .unspecified} + set {_datasetType = newValue} + } + /// Returns true if `datasetType` has been explicitly set. + var hasDatasetType: Bool {return self._datasetType != nil} + /// Clears the value of `datasetType`. Subsequent reads from it will return its default value. + mutating func clearDatasetType() {self._datasetType = nil} + + /// Optional comment for the dataset. + var comment: String { + get {return _comment ?? String()} + set {_comment = newValue} + } + /// Returns true if `comment` has been explicitly set. + var hasComment: Bool {return self._comment != nil} + /// Clears the value of `comment`. Subsequent reads from it will return its default value. + mutating func clearComment() {self._comment = nil} + + /// Optional table properties. Only applies to dataset_type == TABLE and dataset_type == MATERIALIZED_VIEW. + var tableProperties: Dictionary = [:] + + /// Optional partition columns for the dataset. Only applies to dataset_type == TABLE and + /// dataset_type == MATERIALIZED_VIEW. + var partitionCols: [String] = [] + + /// Schema for the dataset. If unset, this will be inferred from incoming flows. + var schema: Spark_Connect_DataType { + get {return _schema ?? Spark_Connect_DataType()} + set {_schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + var hasSchema: Bool {return self._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + mutating func clearSchema() {self._schema = nil} + + /// The output table format of the dataset. Only applies to dataset_type == TABLE and + /// dataset_type == MATERIALIZED_VIEW. + var format: String { + get {return _format ?? String()} + set {_format = newValue} + } + /// Returns true if `format` has been explicitly set. + var hasFormat: Bool {return self._format != nil} + /// Clears the value of `format`. Subsequent reads from it will return its default value. + mutating func clearFormat() {self._format = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + fileprivate var _datasetName: String? = nil + fileprivate var _datasetType: Spark_Connect_DatasetType? = nil + fileprivate var _comment: String? = nil + fileprivate var _schema: Spark_Connect_DataType? = nil + fileprivate var _format: String? = nil + } + + /// Request to define a flow targeting a dataset. + struct DefineFlow: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The graph to attach this flow to. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + /// Name of the flow. For standalone flows, this must be a single-part name. + var flowName: String { + get {return _flowName ?? String()} + set {_flowName = newValue} + } + /// Returns true if `flowName` has been explicitly set. + var hasFlowName: Bool {return self._flowName != nil} + /// Clears the value of `flowName`. Subsequent reads from it will return its default value. + mutating func clearFlowName() {self._flowName = nil} + + /// Name of the dataset this flow writes to. Can be partially or fully qualified. + var targetDatasetName: String { + get {return _targetDatasetName ?? String()} + set {_targetDatasetName = newValue} + } + /// Returns true if `targetDatasetName` has been explicitly set. + var hasTargetDatasetName: Bool {return self._targetDatasetName != nil} + /// Clears the value of `targetDatasetName`. Subsequent reads from it will return its default value. + mutating func clearTargetDatasetName() {self._targetDatasetName = nil} + + /// An unresolved relation that defines the dataset's flow. + var plan: Spark_Connect_Relation { + get {return _plan ?? Spark_Connect_Relation()} + set {_plan = newValue} + } + /// Returns true if `plan` has been explicitly set. + var hasPlan: Bool {return self._plan != nil} + /// Clears the value of `plan`. Subsequent reads from it will return its default value. + mutating func clearPlan() {self._plan = nil} + + /// SQL configurations set when running this flow. + var sqlConf: Dictionary = [:] + + /// If true, this flow will only be run once per full refresh. + var once: Bool { + get {return _once ?? false} + set {_once = newValue} + } + /// Returns true if `once` has been explicitly set. + var hasOnce: Bool {return self._once != nil} + /// Clears the value of `once`. Subsequent reads from it will return its default value. + mutating func clearOnce() {self._once = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + fileprivate var _flowName: String? = nil + fileprivate var _targetDatasetName: String? = nil + fileprivate var _plan: Spark_Connect_Relation? = nil + fileprivate var _once: Bool? = nil + } + + /// Resolves all datasets and flows and start a pipeline update. Should be called after all + /// graph elements are registered. + struct StartRun: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The graph to start. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + } + + /// Parses the SQL file and registers all datasets and flows. + struct DefineSqlGraphElements: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The graph to attach this dataset to. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + /// The full path to the SQL file. Can be relative or absolute. + var sqlFilePath: String { + get {return _sqlFilePath ?? String()} + set {_sqlFilePath = newValue} + } + /// Returns true if `sqlFilePath` has been explicitly set. + var hasSqlFilePath: Bool {return self._sqlFilePath != nil} + /// Clears the value of `sqlFilePath`. Subsequent reads from it will return its default value. + mutating func clearSqlFilePath() {self._sqlFilePath = nil} + + /// The contents of the SQL file. + var sqlText: String { + get {return _sqlText ?? String()} + set {_sqlText = newValue} + } + /// Returns true if `sqlText` has been explicitly set. + var hasSqlText: Bool {return self._sqlText != nil} + /// Clears the value of `sqlText`. Subsequent reads from it will return its default value. + mutating func clearSqlText() {self._sqlText = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + fileprivate var _sqlFilePath: String? = nil + fileprivate var _sqlText: String? = nil + } + + init() {} +} + +/// Dispatch object for pipelines command results. +struct Spark_Connect_PipelineCommandResult: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + var resultType: Spark_Connect_PipelineCommandResult.OneOf_ResultType? = nil + + var createDataflowGraphResult: Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult { + get { + if case .createDataflowGraphResult(let v)? = resultType {return v} + return Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult() + } + set {resultType = .createDataflowGraphResult(newValue)} + } + + var unknownFields = SwiftProtobuf.UnknownStorage() + + enum OneOf_ResultType: Equatable, Sendable { + case createDataflowGraphResult(Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult) + + } + + struct CreateDataflowGraphResult: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The ID of the created graph. + var dataflowGraphID: String { + get {return _dataflowGraphID ?? String()} + set {_dataflowGraphID = newValue} + } + /// Returns true if `dataflowGraphID` has been explicitly set. + var hasDataflowGraphID: Bool {return self._dataflowGraphID != nil} + /// Clears the value of `dataflowGraphID`. Subsequent reads from it will return its default value. + mutating func clearDataflowGraphID() {self._dataflowGraphID = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _dataflowGraphID: String? = nil + } + + init() {} +} + +/// A response containing an event emitted during the run of a pipeline. +struct Spark_Connect_PipelineEventResult: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + var event: Spark_Connect_PipelineEvent { + get {return _event ?? Spark_Connect_PipelineEvent()} + set {_event = newValue} + } + /// Returns true if `event` has been explicitly set. + var hasEvent: Bool {return self._event != nil} + /// Clears the value of `event`. Subsequent reads from it will return its default value. + mutating func clearEvent() {self._event = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _event: Spark_Connect_PipelineEvent? = nil +} + +struct Spark_Connect_PipelineEvent: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// The timestamp corresponding to when the event occurred. + var timestamp: SwiftProtobuf.Google_Protobuf_Timestamp { + get {return _timestamp ?? SwiftProtobuf.Google_Protobuf_Timestamp()} + set {_timestamp = newValue} + } + /// Returns true if `timestamp` has been explicitly set. + var hasTimestamp: Bool {return self._timestamp != nil} + /// Clears the value of `timestamp`. Subsequent reads from it will return its default value. + mutating func clearTimestamp() {self._timestamp = nil} + + /// The message that should be displayed to users. + var message: String { + get {return _message ?? String()} + set {_message = newValue} + } + /// Returns true if `message` has been explicitly set. + var hasMessage: Bool {return self._message != nil} + /// Clears the value of `message`. Subsequent reads from it will return its default value. + mutating func clearMessage() {self._message = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _timestamp: SwiftProtobuf.Google_Protobuf_Timestamp? = nil + fileprivate var _message: String? = nil +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "spark.connect" + +extension Spark_Connect_DatasetType: SwiftProtobuf._ProtoNameProviding { + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "DATASET_TYPE_UNSPECIFIED"), + 1: .same(proto: "MATERIALIZED_VIEW"), + 2: .same(proto: "TABLE"), + 3: .same(proto: "TEMPORARY_VIEW"), + ] +} + +extension Spark_Connect_PipelineCommand: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = _protobuf_package + ".PipelineCommand" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "create_dataflow_graph"), + 2: .standard(proto: "define_dataset"), + 3: .standard(proto: "define_flow"), + 4: .standard(proto: "drop_dataflow_graph"), + 5: .standard(proto: "start_run"), + 6: .standard(proto: "define_sql_graph_elements"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { + var v: Spark_Connect_PipelineCommand.CreateDataflowGraph? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .createDataflowGraph(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .createDataflowGraph(v) + } + }() + case 2: try { + var v: Spark_Connect_PipelineCommand.DefineDataset? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .defineDataset(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .defineDataset(v) + } + }() + case 3: try { + var v: Spark_Connect_PipelineCommand.DefineFlow? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .defineFlow(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .defineFlow(v) + } + }() + case 4: try { + var v: Spark_Connect_PipelineCommand.DropDataflowGraph? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .dropDataflowGraph(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .dropDataflowGraph(v) + } + }() + case 5: try { + var v: Spark_Connect_PipelineCommand.StartRun? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .startRun(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .startRun(v) + } + }() + case 6: try { + var v: Spark_Connect_PipelineCommand.DefineSqlGraphElements? + var hadOneofValue = false + if let current = self.commandType { + hadOneofValue = true + if case .defineSqlGraphElements(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.commandType = .defineSqlGraphElements(v) + } + }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + switch self.commandType { + case .createDataflowGraph?: try { + guard case .createDataflowGraph(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + }() + case .defineDataset?: try { + guard case .defineDataset(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + }() + case .defineFlow?: try { + guard case .defineFlow(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + }() + case .dropDataflowGraph?: try { + guard case .dropDataflowGraph(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + }() + case .startRun?: try { + guard case .startRun(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + }() + case .defineSqlGraphElements?: try { + guard case .defineSqlGraphElements(let v)? = self.commandType else { preconditionFailure() } + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + }() + case nil: break + } + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand, rhs: Spark_Connect_PipelineCommand) -> Bool { + if lhs.commandType != rhs.commandType {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.CreateDataflowGraph: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.protoMessageName + ".CreateDataflowGraph" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "default_catalog"), + 2: .standard(proto: "default_database"), + 5: .standard(proto: "sql_conf"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._defaultCatalog) }() + case 2: try { try decoder.decodeSingularStringField(value: &self._defaultDatabase) }() + case 5: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap.self, value: &self.sqlConf) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._defaultCatalog { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try { if let v = self._defaultDatabase { + try visitor.visitSingularStringField(value: v, fieldNumber: 2) + } }() + if !self.sqlConf.isEmpty { + try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap.self, value: self.sqlConf, fieldNumber: 5) + } + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.CreateDataflowGraph, rhs: Spark_Connect_PipelineCommand.CreateDataflowGraph) -> Bool { + if lhs._defaultCatalog != rhs._defaultCatalog {return false} + if lhs._defaultDatabase != rhs._defaultDatabase {return false} + if lhs.sqlConf != rhs.sqlConf {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.CreateDataflowGraph.Response: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.CreateDataflowGraph.protoMessageName + ".Response" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.CreateDataflowGraph.Response, rhs: Spark_Connect_PipelineCommand.CreateDataflowGraph.Response) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.DropDataflowGraph: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.protoMessageName + ".DropDataflowGraph" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.DropDataflowGraph, rhs: Spark_Connect_PipelineCommand.DropDataflowGraph) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.DefineDataset: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.protoMessageName + ".DefineDataset" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + 2: .standard(proto: "dataset_name"), + 3: .standard(proto: "dataset_type"), + 4: .same(proto: "comment"), + 5: .standard(proto: "table_properties"), + 6: .standard(proto: "partition_cols"), + 7: .same(proto: "schema"), + 8: .same(proto: "format"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + case 2: try { try decoder.decodeSingularStringField(value: &self._datasetName) }() + case 3: try { try decoder.decodeSingularEnumField(value: &self._datasetType) }() + case 4: try { try decoder.decodeSingularStringField(value: &self._comment) }() + case 5: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap.self, value: &self.tableProperties) }() + case 6: try { try decoder.decodeRepeatedStringField(value: &self.partitionCols) }() + case 7: try { try decoder.decodeSingularMessageField(value: &self._schema) }() + case 8: try { try decoder.decodeSingularStringField(value: &self._format) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try { if let v = self._datasetName { + try visitor.visitSingularStringField(value: v, fieldNumber: 2) + } }() + try { if let v = self._datasetType { + try visitor.visitSingularEnumField(value: v, fieldNumber: 3) + } }() + try { if let v = self._comment { + try visitor.visitSingularStringField(value: v, fieldNumber: 4) + } }() + if !self.tableProperties.isEmpty { + try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap.self, value: self.tableProperties, fieldNumber: 5) + } + if !self.partitionCols.isEmpty { + try visitor.visitRepeatedStringField(value: self.partitionCols, fieldNumber: 6) + } + try { if let v = self._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } }() + try { if let v = self._format { + try visitor.visitSingularStringField(value: v, fieldNumber: 8) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.DefineDataset, rhs: Spark_Connect_PipelineCommand.DefineDataset) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs._datasetName != rhs._datasetName {return false} + if lhs._datasetType != rhs._datasetType {return false} + if lhs._comment != rhs._comment {return false} + if lhs.tableProperties != rhs.tableProperties {return false} + if lhs.partitionCols != rhs.partitionCols {return false} + if lhs._schema != rhs._schema {return false} + if lhs._format != rhs._format {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.DefineFlow: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.protoMessageName + ".DefineFlow" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + 2: .standard(proto: "flow_name"), + 3: .standard(proto: "target_dataset_name"), + 4: .same(proto: "plan"), + 5: .standard(proto: "sql_conf"), + 6: .same(proto: "once"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + case 2: try { try decoder.decodeSingularStringField(value: &self._flowName) }() + case 3: try { try decoder.decodeSingularStringField(value: &self._targetDatasetName) }() + case 4: try { try decoder.decodeSingularMessageField(value: &self._plan) }() + case 5: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap.self, value: &self.sqlConf) }() + case 6: try { try decoder.decodeSingularBoolField(value: &self._once) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try { if let v = self._flowName { + try visitor.visitSingularStringField(value: v, fieldNumber: 2) + } }() + try { if let v = self._targetDatasetName { + try visitor.visitSingularStringField(value: v, fieldNumber: 3) + } }() + try { if let v = self._plan { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } }() + if !self.sqlConf.isEmpty { + try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap.self, value: self.sqlConf, fieldNumber: 5) + } + try { if let v = self._once { + try visitor.visitSingularBoolField(value: v, fieldNumber: 6) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.DefineFlow, rhs: Spark_Connect_PipelineCommand.DefineFlow) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs._flowName != rhs._flowName {return false} + if lhs._targetDatasetName != rhs._targetDatasetName {return false} + if lhs._plan != rhs._plan {return false} + if lhs.sqlConf != rhs.sqlConf {return false} + if lhs._once != rhs._once {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.StartRun: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.protoMessageName + ".StartRun" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.StartRun, rhs: Spark_Connect_PipelineCommand.StartRun) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommand.DefineSqlGraphElements: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommand.protoMessageName + ".DefineSqlGraphElements" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + 2: .standard(proto: "sql_file_path"), + 3: .standard(proto: "sql_text"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + case 2: try { try decoder.decodeSingularStringField(value: &self._sqlFilePath) }() + case 3: try { try decoder.decodeSingularStringField(value: &self._sqlText) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try { if let v = self._sqlFilePath { + try visitor.visitSingularStringField(value: v, fieldNumber: 2) + } }() + try { if let v = self._sqlText { + try visitor.visitSingularStringField(value: v, fieldNumber: 3) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommand.DefineSqlGraphElements, rhs: Spark_Connect_PipelineCommand.DefineSqlGraphElements) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs._sqlFilePath != rhs._sqlFilePath {return false} + if lhs._sqlText != rhs._sqlText {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommandResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = _protobuf_package + ".PipelineCommandResult" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "create_dataflow_graph_result"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { + var v: Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult? + var hadOneofValue = false + if let current = self.resultType { + hadOneofValue = true + if case .createDataflowGraphResult(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v { + if hadOneofValue {try decoder.handleConflictingOneOf()} + self.resultType = .createDataflowGraphResult(v) + } + }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if case .createDataflowGraphResult(let v)? = self.resultType { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommandResult, rhs: Spark_Connect_PipelineCommandResult) -> Bool { + if lhs.resultType != rhs.resultType {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = Spark_Connect_PipelineCommandResult.protoMessageName + ".CreateDataflowGraphResult" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "dataflow_graph_id"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self._dataflowGraphID) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._dataflowGraphID { + try visitor.visitSingularStringField(value: v, fieldNumber: 1) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult, rhs: Spark_Connect_PipelineCommandResult.CreateDataflowGraphResult) -> Bool { + if lhs._dataflowGraphID != rhs._dataflowGraphID {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineEventResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = _protobuf_package + ".PipelineEventResult" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "event"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularMessageField(value: &self._event) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._event { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineEventResult, rhs: Spark_Connect_PipelineEventResult) -> Bool { + if lhs._event != rhs._event {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + +extension Spark_Connect_PipelineEvent: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = _protobuf_package + ".PipelineEvent" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "timestamp"), + 2: .same(proto: "message"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularMessageField(value: &self._timestamp) }() + case 2: try { try decoder.decodeSingularStringField(value: &self._message) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + try { if let v = self._timestamp { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } }() + try { if let v = self._message { + try visitor.visitSingularStringField(value: v, fieldNumber: 2) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_PipelineEvent, rhs: Spark_Connect_PipelineEvent) -> Bool { + if lhs._timestamp != rhs._timestamp {return false} + if lhs._message != rhs._message {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} diff --git a/Sources/SparkConnect/relations.pb.swift b/Sources/SparkConnect/relations.pb.swift index 24cad82..d5b2438 100644 --- a/Sources/SparkConnect/relations.pb.swift +++ b/Sources/SparkConnect/relations.pb.swift @@ -632,6 +632,16 @@ struct Spark_Connect_MlRelation: @unchecked Sendable { set {_uniqueStorage()._mlType = .fetch(newValue)} } + /// (Optional) the dataset for restoring the model summary + var modelSummaryDataset: Spark_Connect_Relation { + get {return _storage._modelSummaryDataset ?? Spark_Connect_Relation()} + set {_uniqueStorage()._modelSummaryDataset = newValue} + } + /// Returns true if `modelSummaryDataset` has been explicitly set. + var hasModelSummaryDataset: Bool {return _storage._modelSummaryDataset != nil} + /// Clears the value of `modelSummaryDataset`. Subsequent reads from it will return its default value. + mutating func clearModelSummaryDataset() {_uniqueStorage()._modelSummaryDataset = nil} + var unknownFields = SwiftProtobuf.UnknownStorage() enum OneOf_MlType: Equatable, Sendable { @@ -3111,6 +3121,17 @@ struct Spark_Connect_GroupMap: @unchecked Sendable { /// Clears the value of `stateSchema`. Subsequent reads from it will return its default value. mutating func clearStateSchema() {_uniqueStorage()._stateSchema = nil} + /// Below fields are used by TransformWithState and TransformWithStateInPandas + /// (Optional) TransformWithState related parameters. + var transformWithStateInfo: Spark_Connect_TransformWithStateInfo { + get {return _storage._transformWithStateInfo ?? Spark_Connect_TransformWithStateInfo()} + set {_uniqueStorage()._transformWithStateInfo = newValue} + } + /// Returns true if `transformWithStateInfo` has been explicitly set. + var hasTransformWithStateInfo: Bool {return _storage._transformWithStateInfo != nil} + /// Clears the value of `transformWithStateInfo`. Subsequent reads from it will return its default value. + mutating func clearTransformWithStateInfo() {_uniqueStorage()._transformWithStateInfo = nil} + var unknownFields = SwiftProtobuf.UnknownStorage() init() {} @@ -3118,6 +3139,44 @@ struct Spark_Connect_GroupMap: @unchecked Sendable { fileprivate var _storage = _StorageClass.defaultInstance } +/// Additional input parameters used for TransformWithState operator. +struct Spark_Connect_TransformWithStateInfo: Sendable { + // SwiftProtobuf.Message conformance is added in an extension below. See the + // `Message` and `Message+*Additions` files in the SwiftProtobuf library for + // methods supported on all messages. + + /// (Required) Time mode string for transformWithState. + var timeMode: String = String() + + /// (Optional) Event time column name. + var eventTimeColumnName: String { + get {return _eventTimeColumnName ?? String()} + set {_eventTimeColumnName = newValue} + } + /// Returns true if `eventTimeColumnName` has been explicitly set. + var hasEventTimeColumnName: Bool {return self._eventTimeColumnName != nil} + /// Clears the value of `eventTimeColumnName`. Subsequent reads from it will return its default value. + mutating func clearEventTimeColumnName() {self._eventTimeColumnName = nil} + + /// (Optional) Schema for the output DataFrame. + /// Only required used for TransformWithStateInPandas. + var outputSchema: Spark_Connect_DataType { + get {return _outputSchema ?? Spark_Connect_DataType()} + set {_outputSchema = newValue} + } + /// Returns true if `outputSchema` has been explicitly set. + var hasOutputSchema: Bool {return self._outputSchema != nil} + /// Clears the value of `outputSchema`. Subsequent reads from it will return its default value. + mutating func clearOutputSchema() {self._outputSchema = nil} + + var unknownFields = SwiftProtobuf.UnknownStorage() + + init() {} + + fileprivate var _eventTimeColumnName: String? = nil + fileprivate var _outputSchema: Spark_Connect_DataType? = nil +} + struct Spark_Connect_CoGroupMap: @unchecked Sendable { // SwiftProtobuf.Message conformance is added in an extension below. See the // `Message` and `Message+*Additions` files in the SwiftProtobuf library for @@ -4763,10 +4822,12 @@ extension Spark_Connect_MlRelation: SwiftProtobuf.Message, SwiftProtobuf._Messag static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 1: .same(proto: "transform"), 2: .same(proto: "fetch"), + 3: .standard(proto: "model_summary_dataset"), ] fileprivate class _StorageClass { var _mlType: Spark_Connect_MlRelation.OneOf_MlType? + var _modelSummaryDataset: Spark_Connect_Relation? = nil // This property is used as the initial default value for new instances of the type. // The type itself is protecting the reference to its storage via CoW semantics. @@ -4778,6 +4839,7 @@ extension Spark_Connect_MlRelation: SwiftProtobuf.Message, SwiftProtobuf._Messag init(copying source: _StorageClass) { _mlType = source._mlType + _modelSummaryDataset = source._modelSummaryDataset } } @@ -4822,6 +4884,7 @@ extension Spark_Connect_MlRelation: SwiftProtobuf.Message, SwiftProtobuf._Messag _storage._mlType = .fetch(v) } }() + case 3: try { try decoder.decodeSingularMessageField(value: &_storage._modelSummaryDataset) }() default: break } } @@ -4845,6 +4908,9 @@ extension Spark_Connect_MlRelation: SwiftProtobuf.Message, SwiftProtobuf._Messag }() case nil: break } + try { if let v = _storage._modelSummaryDataset { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } }() } try unknownFields.traverse(visitor: &visitor) } @@ -4855,6 +4921,7 @@ extension Spark_Connect_MlRelation: SwiftProtobuf.Message, SwiftProtobuf._Messag let _storage = _args.0 let rhs_storage = _args.1 if _storage._mlType != rhs_storage._mlType {return false} + if _storage._modelSummaryDataset != rhs_storage._modelSummaryDataset {return false} return true } if !storagesAreEqual {return false} @@ -9354,6 +9421,7 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI 8: .standard(proto: "output_mode"), 9: .standard(proto: "timeout_conf"), 10: .standard(proto: "state_schema"), + 11: .standard(proto: "transform_with_state_info"), ] fileprivate class _StorageClass { @@ -9367,6 +9435,7 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI var _outputMode: String? = nil var _timeoutConf: String? = nil var _stateSchema: Spark_Connect_DataType? = nil + var _transformWithStateInfo: Spark_Connect_TransformWithStateInfo? = nil // This property is used as the initial default value for new instances of the type. // The type itself is protecting the reference to its storage via CoW semantics. @@ -9387,6 +9456,7 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI _outputMode = source._outputMode _timeoutConf = source._timeoutConf _stateSchema = source._stateSchema + _transformWithStateInfo = source._transformWithStateInfo } } @@ -9415,6 +9485,7 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI case 8: try { try decoder.decodeSingularStringField(value: &_storage._outputMode) }() case 9: try { try decoder.decodeSingularStringField(value: &_storage._timeoutConf) }() case 10: try { try decoder.decodeSingularMessageField(value: &_storage._stateSchema) }() + case 11: try { try decoder.decodeSingularMessageField(value: &_storage._transformWithStateInfo) }() default: break } } @@ -9457,6 +9528,9 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI try { if let v = _storage._stateSchema { try visitor.visitSingularMessageField(value: v, fieldNumber: 10) } }() + try { if let v = _storage._transformWithStateInfo { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } }() } try unknownFields.traverse(visitor: &visitor) } @@ -9476,6 +9550,7 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI if _storage._outputMode != rhs_storage._outputMode {return false} if _storage._timeoutConf != rhs_storage._timeoutConf {return false} if _storage._stateSchema != rhs_storage._stateSchema {return false} + if _storage._transformWithStateInfo != rhs_storage._transformWithStateInfo {return false} return true } if !storagesAreEqual {return false} @@ -9485,6 +9560,54 @@ extension Spark_Connect_GroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageI } } +extension Spark_Connect_TransformWithStateInfo: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + static let protoMessageName: String = _protobuf_package + ".TransformWithStateInfo" + static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "time_mode"), + 2: .standard(proto: "event_time_column_name"), + 3: .standard(proto: "output_schema"), + ] + + mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every case branch when no optimizations are + // enabled. https://github.com/apple/swift-protobuf/issues/1034 + switch fieldNumber { + case 1: try { try decoder.decodeSingularStringField(value: &self.timeMode) }() + case 2: try { try decoder.decodeSingularStringField(value: &self._eventTimeColumnName) }() + case 3: try { try decoder.decodeSingularMessageField(value: &self._outputSchema) }() + default: break + } + } + } + + func traverse(visitor: inout V) throws { + // The use of inline closures is to circumvent an issue where the compiler + // allocates stack space for every if/case branch local when no optimizations + // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and + // https://github.com/apple/swift-protobuf/issues/1182 + if !self.timeMode.isEmpty { + try visitor.visitSingularStringField(value: self.timeMode, fieldNumber: 1) + } + try { if let v = self._eventTimeColumnName { + try visitor.visitSingularStringField(value: v, fieldNumber: 2) + } }() + try { if let v = self._outputSchema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } }() + try unknownFields.traverse(visitor: &visitor) + } + + static func ==(lhs: Spark_Connect_TransformWithStateInfo, rhs: Spark_Connect_TransformWithStateInfo) -> Bool { + if lhs.timeMode != rhs.timeMode {return false} + if lhs._eventTimeColumnName != rhs._eventTimeColumnName {return false} + if lhs._outputSchema != rhs._outputSchema {return false} + if lhs.unknownFields != rhs.unknownFields {return false} + return true + } +} + extension Spark_Connect_CoGroupMap: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { static let protoMessageName: String = _protobuf_package + ".CoGroupMap" static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ diff --git a/Sources/SparkConnect/types.pb.swift b/Sources/SparkConnect/types.pb.swift index 4c0b0fa..01a29f2 100644 --- a/Sources/SparkConnect/types.pb.swift +++ b/Sources/SparkConnect/types.pb.swift @@ -804,33 +804,36 @@ fileprivate let _protobuf_package = "spark.connect" extension Spark_Connect_DataType: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { static let protoMessageName: String = _protobuf_package + ".DataType" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "null"), - 2: .same(proto: "binary"), - 3: .same(proto: "boolean"), - 4: .same(proto: "byte"), - 5: .same(proto: "short"), - 6: .same(proto: "integer"), - 7: .same(proto: "long"), - 8: .same(proto: "float"), - 9: .same(proto: "double"), - 10: .same(proto: "decimal"), - 11: .same(proto: "string"), - 12: .same(proto: "char"), - 13: .standard(proto: "var_char"), - 14: .same(proto: "date"), - 15: .same(proto: "timestamp"), - 16: .standard(proto: "timestamp_ntz"), - 17: .standard(proto: "calendar_interval"), - 18: .standard(proto: "year_month_interval"), - 19: .standard(proto: "day_time_interval"), - 20: .same(proto: "array"), - 21: .same(proto: "struct"), - 22: .same(proto: "map"), - 25: .same(proto: "variant"), - 23: .same(proto: "udt"), - 24: .same(proto: "unparsed"), - ] + static let _protobuf_nameMap = SwiftProtobuf._NameMap( + reservedNames: [], + reservedRanges: [26..<28], + numberNameMappings: [ + 1: .same(proto: "null"), + 2: .same(proto: "binary"), + 3: .same(proto: "boolean"), + 4: .same(proto: "byte"), + 5: .same(proto: "short"), + 6: .same(proto: "integer"), + 7: .same(proto: "long"), + 8: .same(proto: "float"), + 9: .same(proto: "double"), + 10: .same(proto: "decimal"), + 11: .same(proto: "string"), + 12: .same(proto: "char"), + 13: .standard(proto: "var_char"), + 14: .same(proto: "date"), + 15: .same(proto: "timestamp"), + 16: .standard(proto: "timestamp_ntz"), + 17: .standard(proto: "calendar_interval"), + 18: .standard(proto: "year_month_interval"), + 19: .standard(proto: "day_time_interval"), + 20: .same(proto: "array"), + 21: .same(proto: "struct"), + 22: .same(proto: "map"), + 25: .same(proto: "variant"), + 23: .same(proto: "udt"), + 24: .same(proto: "unparsed"), + ]) fileprivate class _StorageClass { var _kind: Spark_Connect_DataType.OneOf_Kind?