@@ -110,16 +110,24 @@ public actor DataFrame: Sendable {
110
110
}
111
111
}
112
112
113
- private func analyzePlanIfNeeded( ) async throws {
114
- if self . _schema != nil {
115
- return
116
- }
113
+ private func withGPRC< Result: Sendable > (
114
+ _ f: ( GRPCClient < GRPCNIOTransportHTTP2 . HTTP2ClientTransport . Posix > ) async throws -> Result
115
+ ) async throws -> Result {
117
116
try await withGRPCClient (
118
117
transport: . http2NIOPosix(
119
118
target: . dns( host: spark. client. host, port: spark. client. port) ,
120
119
transportSecurity: . plaintext
121
120
)
122
121
) { client in
122
+ return try await f ( client)
123
+ }
124
+ }
125
+
126
+ private func analyzePlanIfNeeded( ) async throws {
127
+ if self . _schema != nil {
128
+ return
129
+ }
130
+ try await withGPRC { client in
123
131
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
124
132
let response = try await service. analyzePlan (
125
133
spark. client. getAnalyzePlanRequest ( spark. sessionID, plan) )
@@ -132,12 +140,7 @@ public actor DataFrame: Sendable {
132
140
public func count( ) async throws -> Int64 {
133
141
let counter = Atomic ( Int64 ( 0 ) )
134
142
135
- try await withGRPCClient (
136
- transport: . http2NIOPosix(
137
- target: . dns( host: spark. client. host, port: spark. client. port) ,
138
- transportSecurity: . plaintext
139
- )
140
- ) { client in
143
+ try await withGPRC { client in
141
144
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
142
145
try await service. executePlan ( spark. client. getExecutePlanRequest ( plan) ) {
143
146
response in
@@ -151,12 +154,7 @@ public actor DataFrame: Sendable {
151
154
152
155
/// Execute the plan and try to fill `schema` and `batches`.
153
156
private func execute( ) async throws {
154
- try await withGRPCClient (
155
- transport: . http2NIOPosix(
156
- target: . dns( host: spark. client. host, port: spark. client. port) ,
157
- transportSecurity: . plaintext
158
- )
159
- ) { client in
157
+ try await withGPRC { client in
160
158
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
161
159
try await service. executePlan ( spark. client. getExecutePlanRequest ( plan) ) {
162
160
response in
@@ -394,12 +392,7 @@ public actor DataFrame: Sendable {
394
392
/// (without any Spark executors).
395
393
/// - Returns: True if the plan is local.
396
394
public func isLocal( ) async throws -> Bool {
397
- try await withGRPCClient (
398
- transport: . http2NIOPosix(
399
- target: . dns( host: spark. client. host, port: spark. client. port) ,
400
- transportSecurity: . plaintext
401
- )
402
- ) { client in
395
+ try await withGPRC { client in
403
396
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
404
397
let response = try await service. analyzePlan ( spark. client. getIsLocal ( spark. sessionID, plan) )
405
398
return response. isLocal. isLocal
@@ -410,12 +403,7 @@ public actor DataFrame: Sendable {
410
403
/// arrives.
411
404
/// - Returns: True if a plan is streaming.
412
405
public func isStreaming( ) async throws -> Bool {
413
- try await withGRPCClient (
414
- transport: . http2NIOPosix(
415
- target: . dns( host: spark. client. host, port: spark. client. port) ,
416
- transportSecurity: . plaintext
417
- )
418
- ) { client in
406
+ try await withGPRC { client in
419
407
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
420
408
let response = try await service. analyzePlan ( spark. client. getIsStreaming ( spark. sessionID, plan) )
421
409
return response. isStreaming. isStreaming
@@ -439,12 +427,7 @@ public actor DataFrame: Sendable {
439
427
public func persist( storageLevel: StorageLevel = StorageLevel . MEMORY_AND_DISK) async throws
440
428
-> DataFrame
441
429
{
442
- try await withGRPCClient (
443
- transport: . http2NIOPosix(
444
- target: . dns( host: spark. client. host, port: spark. client. port) ,
445
- transportSecurity: . plaintext
446
- )
447
- ) { client in
430
+ try await withGPRC { client in
448
431
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
449
432
_ = try await service. analyzePlan (
450
433
spark. client. getPersist ( spark. sessionID, plan, storageLevel) )
@@ -458,12 +441,7 @@ public actor DataFrame: Sendable {
458
441
/// - Parameter blocking: Whether to block until all blocks are deleted.
459
442
/// - Returns: A `DataFrame`
460
443
public func unpersist( blocking: Bool = false ) async throws -> DataFrame {
461
- try await withGRPCClient (
462
- transport: . http2NIOPosix(
463
- target: . dns( host: spark. client. host, port: spark. client. port) ,
464
- transportSecurity: . plaintext
465
- )
466
- ) { client in
444
+ try await withGPRC { client in
467
445
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
468
446
_ = try await service. analyzePlan ( spark. client. getUnpersist ( spark. sessionID, plan, blocking) )
469
447
}
@@ -473,12 +451,7 @@ public actor DataFrame: Sendable {
473
451
474
452
public var storageLevel : StorageLevel {
475
453
get async throws {
476
- try await withGRPCClient (
477
- transport: . http2NIOPosix(
478
- target: . dns( host: spark. client. host, port: spark. client. port) ,
479
- transportSecurity: . plaintext
480
- )
481
- ) { client in
454
+ try await withGPRC { client in
482
455
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
483
456
return try await service
484
457
. analyzePlan ( spark. client. getStorageLevel ( spark. sessionID, plan) ) . getStorageLevel. storageLevel. toStorageLevel
@@ -505,12 +478,7 @@ public actor DataFrame: Sendable {
505
478
/// - Parameter mode: the expected output format of plans;
506
479
/// `simple`, `extended`, `codegen`, `cost`, `formatted`.
507
480
public func explain( _ mode: String ) async throws {
508
- try await withGRPCClient (
509
- transport: . http2NIOPosix(
510
- target: . dns( host: spark. client. host, port: spark. client. port) ,
511
- transportSecurity: . plaintext
512
- )
513
- ) { client in
481
+ try await withGPRC { client in
514
482
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
515
483
let response = try await service. analyzePlan ( spark. client. getExplain ( spark. sessionID, plan, mode) )
516
484
print ( response. explain. explainString)
@@ -522,12 +490,7 @@ public actor DataFrame: Sendable {
522
490
/// results. Depending on the source relations, this may not find all input files. Duplicates are removed.
523
491
/// - Returns: An array of file path strings.
524
492
public func inputFiles( ) async throws -> [ String ] {
525
- try await withGRPCClient (
526
- transport: . http2NIOPosix(
527
- target: . dns( host: spark. client. host, port: spark. client. port) ,
528
- transportSecurity: . plaintext
529
- )
530
- ) { client in
493
+ try await withGPRC { client in
531
494
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
532
495
let response = try await service. analyzePlan ( spark. client. getInputFiles ( spark. sessionID, plan) )
533
496
return response. inputFiles. files
@@ -542,12 +505,7 @@ public actor DataFrame: Sendable {
542
505
/// Prints the schema up to the given level to the console in a nice tree format.
543
506
/// - Parameter level: A level to be printed.
544
507
public func printSchema( _ level: Int32 ) async throws {
545
- try await withGRPCClient (
546
- transport: . http2NIOPosix(
547
- target: . dns( host: spark. client. host, port: spark. client. port) ,
548
- transportSecurity: . plaintext
549
- )
550
- ) { client in
508
+ try await withGPRC { client in
551
509
let service = Spark_Connect_SparkConnectService . Client ( wrapping: client)
552
510
let response = try await service. analyzePlan ( spark. client. getTreeString ( spark. sessionID, plan, level) )
553
511
print ( response. treeString. treeString)
0 commit comments