@@ -110,16 +110,24 @@ public actor DataFrame: Sendable {
110110 }
111111 }
112112
113- private func analyzePlanIfNeeded( ) async throws {
114- if self . _schema != nil {
115- return
116- }
113+ private func withGPRC< Result: Sendable > (
114+ _ f: ( GRPCClient < GRPCNIOTransportHTTP2 . HTTP2ClientTransport . Posix > ) async throws -> Result
115+ ) async throws -> Result {
117116 try await withGRPCClient (
118117 transport: . http2NIOPosix(
119118 target: . dns( host: spark. client. host, port: spark. client. port) ,
120119 transportSecurity: . plaintext
121120 )
122121 ) { client in
122+ return try await f ( client)
123+ }
124+ }
125+
126+ private func analyzePlanIfNeeded( ) async throws {
127+ if self . _schema != nil {
128+ return
129+ }
130+ try await withGPRC { client in
123131 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
124132 let response = try await service. analyzePlan (
125133 spark. client. getAnalyzePlanRequest ( spark. sessionID, plan) )
@@ -132,12 +140,7 @@ public actor DataFrame: Sendable {
132140 public func count( ) async throws -> Int64 {
133141 let counter = Atomic ( Int64 ( 0 ) )
134142
135- try await withGRPCClient (
136- transport: . http2NIOPosix(
137- target: . dns( host: spark. client. host, port: spark. client. port) ,
138- transportSecurity: . plaintext
139- )
140- ) { client in
143+ try await withGPRC { client in
141144 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
142145 try await service. executePlan ( spark. client. getExecutePlanRequest ( plan) ) {
143146 response in
@@ -151,12 +154,7 @@ public actor DataFrame: Sendable {
151154
152155 /// Execute the plan and try to fill `schema` and `batches`.
153156 private func execute( ) async throws {
154- try await withGRPCClient (
155- transport: . http2NIOPosix(
156- target: . dns( host: spark. client. host, port: spark. client. port) ,
157- transportSecurity: . plaintext
158- )
159- ) { client in
157+ try await withGPRC { client in
160158 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
161159 try await service. executePlan ( spark. client. getExecutePlanRequest ( plan) ) {
162160 response in
@@ -394,12 +392,7 @@ public actor DataFrame: Sendable {
394392 /// (without any Spark executors).
395393 /// - Returns: True if the plan is local.
396394 public func isLocal( ) async throws -> Bool {
397- try await withGRPCClient (
398- transport: . http2NIOPosix(
399- target: . dns( host: spark. client. host, port: spark. client. port) ,
400- transportSecurity: . plaintext
401- )
402- ) { client in
395+ try await withGPRC { client in
403396 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
404397 let response = try await service. analyzePlan ( spark. client. getIsLocal ( spark. sessionID, plan) )
405398 return response. isLocal. isLocal
@@ -410,12 +403,7 @@ public actor DataFrame: Sendable {
410403 /// arrives.
411404 /// - Returns: True if a plan is streaming.
412405 public func isStreaming( ) async throws -> Bool {
413- try await withGRPCClient (
414- transport: . http2NIOPosix(
415- target: . dns( host: spark. client. host, port: spark. client. port) ,
416- transportSecurity: . plaintext
417- )
418- ) { client in
406+ try await withGPRC { client in
419407 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
420408 let response = try await service. analyzePlan ( spark. client. getIsStreaming ( spark. sessionID, plan) )
421409 return response. isStreaming. isStreaming
@@ -439,12 +427,7 @@ public actor DataFrame: Sendable {
439427 public func persist( storageLevel: StorageLevel = StorageLevel . MEMORY_AND_DISK) async throws
440428 -> DataFrame
441429 {
442- try await withGRPCClient (
443- transport: . http2NIOPosix(
444- target: . dns( host: spark. client. host, port: spark. client. port) ,
445- transportSecurity: . plaintext
446- )
447- ) { client in
430+ try await withGPRC { client in
448431 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
449432 _ = try await service. analyzePlan (
450433 spark. client. getPersist ( spark. sessionID, plan, storageLevel) )
@@ -458,12 +441,7 @@ public actor DataFrame: Sendable {
458441 /// - Parameter blocking: Whether to block until all blocks are deleted.
459442 /// - Returns: A `DataFrame`
460443 public func unpersist( blocking: Bool = false ) async throws -> DataFrame {
461- try await withGRPCClient (
462- transport: . http2NIOPosix(
463- target: . dns( host: spark. client. host, port: spark. client. port) ,
464- transportSecurity: . plaintext
465- )
466- ) { client in
444+ try await withGPRC { client in
467445 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
468446 _ = try await service. analyzePlan ( spark. client. getUnpersist ( spark. sessionID, plan, blocking) )
469447 }
@@ -473,12 +451,7 @@ public actor DataFrame: Sendable {
473451
474452 public var storageLevel : StorageLevel {
475453 get async throws {
476- try await withGRPCClient (
477- transport: . http2NIOPosix(
478- target: . dns( host: spark. client. host, port: spark. client. port) ,
479- transportSecurity: . plaintext
480- )
481- ) { client in
454+ try await withGPRC { client in
482455 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
483456 return try await service
484457 . analyzePlan ( spark. client. getStorageLevel ( spark. sessionID, plan) ) . getStorageLevel. storageLevel. toStorageLevel
@@ -505,12 +478,7 @@ public actor DataFrame: Sendable {
505478 /// - Parameter mode: the expected output format of plans;
506479 /// `simple`, `extended`, `codegen`, `cost`, `formatted`.
507480 public func explain( _ mode: String ) async throws {
508- try await withGRPCClient (
509- transport: . http2NIOPosix(
510- target: . dns( host: spark. client. host, port: spark. client. port) ,
511- transportSecurity: . plaintext
512- )
513- ) { client in
481+ try await withGPRC { client in
514482 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
515483 let response = try await service. analyzePlan ( spark. client. getExplain ( spark. sessionID, plan, mode) )
516484 print ( response. explain. explainString)
@@ -522,12 +490,7 @@ public actor DataFrame: Sendable {
522490 /// results. Depending on the source relations, this may not find all input files. Duplicates are removed.
523491 /// - Returns: An array of file path strings.
524492 public func inputFiles( ) async throws -> [ String ] {
525- try await withGRPCClient (
526- transport: . http2NIOPosix(
527- target: . dns( host: spark. client. host, port: spark. client. port) ,
528- transportSecurity: . plaintext
529- )
530- ) { client in
493+ try await withGPRC { client in
531494 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
532495 let response = try await service. analyzePlan ( spark. client. getInputFiles ( spark. sessionID, plan) )
533496 return response. inputFiles. files
@@ -542,12 +505,7 @@ public actor DataFrame: Sendable {
542505 /// Prints the schema up to the given level to the console in a nice tree format.
543506 /// - Parameter level: A level to be printed.
544507 public func printSchema( _ level: Int32 ) async throws {
545- try await withGRPCClient (
546- transport: . http2NIOPosix(
547- target: . dns( host: spark. client. host, port: spark. client. port) ,
548- transportSecurity: . plaintext
549- )
550- ) { client in
508+ try await withGPRC { client in
551509 let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
552510 let response = try await service. analyzePlan ( spark. client. getTreeString ( spark. sessionID, plan, level) )
553511 print ( response. treeString. treeString)
0 commit comments