@@ -110,16 +110,24 @@ public actor DataFrame: Sendable {
110
110
}
111
111
}
112
112
113
- private func analyzePlanIfNeeded( ) async throws {
114
- if self . _schema != nil {
115
- return
116
- }
113
+ private func withGPRC< Result: Sendable > (
114
+ _ f: ( GRPCClient < GRPCNIOTransportHTTP2 . HTTP2ClientTransport . Posix > ) async throws -> Result
115
+ ) async throws -> Result {
117
116
try await withGRPCClient (
118
117
transport: . http2NIOPosix(
119
118
target: . dns( host: spark. client. host, port: spark. client. port) ,
120
119
transportSecurity: . plaintext
121
120
)
122
121
) { client in
122
+ return try await f ( client)
123
+ }
124
+ }
125
+
126
+ private func analyzePlanIfNeeded( ) async throws {
127
+ if self . _schema != nil {
128
+ return
129
+ }
130
+ try await withGPRC { client in
123
131
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
124
132
let response = try await service. analyzePlan (
125
133
spark. client. getAnalyzePlanRequest ( spark. sessionID, plan) )
@@ -132,12 +140,7 @@ public actor DataFrame: Sendable {
132
140
public func count( ) async throws -> Int64 {
133
141
let counter = Atomic ( Int64 ( 0 ) )
134
142
135
- try await withGRPCClient (
136
- transport: . http2NIOPosix(
137
- target: . dns( host: spark. client. host, port: spark. client. port) ,
138
- transportSecurity: . plaintext
139
- )
140
- ) { client in
143
+ try await withGPRC { client in
141
144
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
142
145
try await service. executePlan ( spark. client. getExecutePlanRequest ( plan) ) {
143
146
response in
@@ -154,12 +157,7 @@ public actor DataFrame: Sendable {
154
157
// Clear all existing batches.
155
158
self . batches. removeAll ( )
156
159
157
- try await withGRPCClient (
158
- transport: . http2NIOPosix(
159
- target: . dns( host: spark. client. host, port: spark. client. port) ,
160
- transportSecurity: . plaintext
161
- )
162
- ) { client in
160
+ try await withGPRC { client in
163
161
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
164
162
try await service. executePlan ( spark. client. getExecutePlanRequest ( plan) ) {
165
163
response in
@@ -397,12 +395,7 @@ public actor DataFrame: Sendable {
397
395
/// (without any Spark executors).
398
396
/// - Returns: True if the plan is local.
399
397
public func isLocal( ) async throws -> Bool {
400
- try await withGRPCClient (
401
- transport: . http2NIOPosix(
402
- target: . dns( host: spark. client. host, port: spark. client. port) ,
403
- transportSecurity: . plaintext
404
- )
405
- ) { client in
398
+ try await withGPRC { client in
406
399
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
407
400
let response = try await service. analyzePlan ( spark. client. getIsLocal ( spark. sessionID, plan) )
408
401
return response. isLocal. isLocal
@@ -413,12 +406,7 @@ public actor DataFrame: Sendable {
413
406
/// arrives.
414
407
/// - Returns: True if a plan is streaming.
415
408
public func isStreaming( ) async throws -> Bool {
416
- try await withGRPCClient (
417
- transport: . http2NIOPosix(
418
- target: . dns( host: spark. client. host, port: spark. client. port) ,
419
- transportSecurity: . plaintext
420
- )
421
- ) { client in
409
+ try await withGPRC { client in
422
410
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
423
411
let response = try await service. analyzePlan ( spark. client. getIsStreaming ( spark. sessionID, plan) )
424
412
return response. isStreaming. isStreaming
@@ -442,12 +430,7 @@ public actor DataFrame: Sendable {
442
430
public func persist( storageLevel: StorageLevel = StorageLevel . MEMORY_AND_DISK) async throws
443
431
-> DataFrame
444
432
{
445
- try await withGRPCClient (
446
- transport: . http2NIOPosix(
447
- target: . dns( host: spark. client. host, port: spark. client. port) ,
448
- transportSecurity: . plaintext
449
- )
450
- ) { client in
433
+ try await withGPRC { client in
451
434
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
452
435
_ = try await service. analyzePlan (
453
436
spark. client. getPersist ( spark. sessionID, plan, storageLevel) )
@@ -461,12 +444,7 @@ public actor DataFrame: Sendable {
461
444
/// - Parameter blocking: Whether to block until all blocks are deleted.
462
445
/// - Returns: A `DataFrame`
463
446
public func unpersist( blocking: Bool = false ) async throws -> DataFrame {
464
- try await withGRPCClient (
465
- transport: . http2NIOPosix(
466
- target: . dns( host: spark. client. host, port: spark. client. port) ,
467
- transportSecurity: . plaintext
468
- )
469
- ) { client in
447
+ try await withGPRC { client in
470
448
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
471
449
_ = try await service. analyzePlan ( spark. client. getUnpersist ( spark. sessionID, plan, blocking) )
472
450
}
@@ -476,12 +454,7 @@ public actor DataFrame: Sendable {
476
454
477
455
public var storageLevel : StorageLevel {
478
456
get async throws {
479
- try await withGRPCClient (
480
- transport: . http2NIOPosix(
481
- target: . dns( host: spark. client. host, port: spark. client. port) ,
482
- transportSecurity: . plaintext
483
- )
484
- ) { client in
457
+ try await withGPRC { client in
485
458
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
486
459
return try await service
487
460
. analyzePlan ( spark. client. getStorageLevel ( spark. sessionID, plan) ) . getStorageLevel. storageLevel. toStorageLevel
@@ -508,12 +481,7 @@ public actor DataFrame: Sendable {
508
481
/// - Parameter mode: the expected output format of plans;
509
482
/// `simple`, `extended`, `codegen`, `cost`, `formatted`.
510
483
public func explain( _ mode: String ) async throws {
511
- try await withGRPCClient (
512
- transport: . http2NIOPosix(
513
- target: . dns( host: spark. client. host, port: spark. client. port) ,
514
- transportSecurity: . plaintext
515
- )
516
- ) { client in
484
+ try await withGPRC { client in
517
485
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
518
486
let response = try await service. analyzePlan ( spark. client. getExplain ( spark. sessionID, plan, mode) )
519
487
print ( response. explain. explainString)
@@ -525,12 +493,7 @@ public actor DataFrame: Sendable {
525
493
/// results. Depending on the source relations, this may not find all input files. Duplicates are removed.
526
494
/// - Returns: An array of file path strings.
527
495
public func inputFiles( ) async throws -> [ String ] {
528
- try await withGRPCClient (
529
- transport: . http2NIOPosix(
530
- target: . dns( host: spark. client. host, port: spark. client. port) ,
531
- transportSecurity: . plaintext
532
- )
533
- ) { client in
496
+ try await withGPRC { client in
534
497
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
535
498
let response = try await service. analyzePlan ( spark. client. getInputFiles ( spark. sessionID, plan) )
536
499
return response. inputFiles. files
@@ -545,12 +508,7 @@ public actor DataFrame: Sendable {
545
508
/// Prints the schema up to the given level to the console in a nice tree format.
546
509
/// - Parameter level: A level to be printed.
547
510
public func printSchema( _ level: Int32 ) async throws {
548
- try await withGRPCClient (
549
- transport: . http2NIOPosix(
550
- target: . dns( host: spark. client. host, port: spark. client. port) ,
551
- transportSecurity: . plaintext
552
- )
553
- ) { client in
511
+ try await withGPRC { client in
554
512
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
555
513
let response = try await service. analyzePlan ( spark. client. getTreeString ( spark. sessionID, plan, level) )
556
514
print ( response. treeString. treeString)
0 commit comments